response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Prepare list of pandas DataFrames to be used as input to pd.concat.
Ensure any columns of type 'category' have the same categories across each
dataframe.
Parameters
----------
df_list : list
List of dataframes with same columns.
inplace : bool
True if input list can be modified. Default is False.
Returns
-------
concatenated : df
Dataframe of concatenated list. | def categorical_df_concat(df_list, inplace=False):
"""
Prepare list of pandas DataFrames to be used as input to pd.concat.
Ensure any columns of type 'category' have the same categories across each
dataframe.
Parameters
----------
df_list : list
List of dataframes with same columns.
inplace : bool
True if input list can be modified. Default is False.
Returns
-------
concatenated : df
Dataframe of concatenated list.
"""
if not inplace:
df_list = deepcopy(df_list)
# Assert each dataframe has the same columns/dtypes
df = df_list[0]
if not all([(df.dtypes.equals(df_i.dtypes)) for df_i in df_list[1:]]):
raise ValueError("Input DataFrames must have the same columns/dtypes.")
categorical_columns = df.columns[df.dtypes == 'category']
for col in categorical_columns:
new_categories = _sort_set_none_first(
_union_all(frame[col].cat.categories for frame in df_list)
)
with ignore_pandas_nan_categorical_warning():
for df in df_list:
df[col].cat.set_categories(new_categories, inplace=True)
return pd.concat(df_list) |
Union entries in ``iterables`` into a set.
| def _union_all(iterables):
"""Union entries in ``iterables`` into a set.
"""
return set().union(*iterables) |
Sort a set, sorting ``None`` before other elements, if present.
| def _sort_set_none_first(set_):
"""Sort a set, sorting ``None`` before other elements, if present.
"""
if None in set_:
set_.remove(None)
out = [None]
out.extend(sorted(set_))
set_.add(None)
return out
else:
return sorted(set_) |
Create an empty dataframe with columns of particular types.
Parameters
----------
*columns
The (column_name, column_dtype) pairs.
Returns
-------
typed_dataframe : pd.DataFrame
The empty typed dataframe.
Examples
--------
>>> df = empty_dataframe(
... ('a', 'int64'),
... ('b', 'float64'),
... ('c', 'datetime64[ns]'),
... )
>>> df
Empty DataFrame
Columns: [a, b, c]
Index: []
df.dtypes
a int64
b float64
c datetime64[ns]
dtype: object | def empty_dataframe(*columns):
"""Create an empty dataframe with columns of particular types.
Parameters
----------
*columns
The (column_name, column_dtype) pairs.
Returns
-------
typed_dataframe : pd.DataFrame
The empty typed dataframe.
Examples
--------
>>> df = empty_dataframe(
... ('a', 'int64'),
... ('b', 'float64'),
... ('c', 'datetime64[ns]'),
... )
>>> df
Empty DataFrame
Columns: [a, b, c]
Index: []
df.dtypes
a int64
b float64
c datetime64[ns]
dtype: object
"""
return pd.DataFrame(np.array([], dtype=list(columns))) |
Check that a list of Index objects are all equal.
Parameters
----------
indexes : iterable[pd.Index]
Iterable of indexes to check.
Raises
------
ValueError
If the indexes are not all the same. | def check_indexes_all_same(indexes, message="Indexes are not equal."):
"""Check that a list of Index objects are all equal.
Parameters
----------
indexes : iterable[pd.Index]
Iterable of indexes to check.
Raises
------
ValueError
If the indexes are not all the same.
"""
iterator = iter(indexes)
first = next(iterator)
for other in iterator:
same = (first == other)
if not same.all():
bad_loc = np.flatnonzero(~same)[0]
raise ValueError(
"{}\nFirst difference is at index {}: "
"{} != {}".format(
message, bad_loc, first[bad_loc], other[bad_loc]
),
) |
Check if a path is hidden.
Parameters
----------
path : str
A filepath. | def hidden(path):
"""Check if a path is hidden.
Parameters
----------
path : str
A filepath.
"""
return os.path.split(path)[1].startswith('.') |
Ensure that a directory named "path" exists. | def ensure_directory(path):
"""
Ensure that a directory named "path" exists.
"""
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == EEXIST and os.path.isdir(path):
return
raise |
Ensure that the directory containing `path` exists.
This is just a convenience wrapper for doing::
ensure_directory(os.path.dirname(path)) | def ensure_directory_containing(path):
"""
Ensure that the directory containing `path` exists.
This is just a convenience wrapper for doing::
ensure_directory(os.path.dirname(path))
"""
ensure_directory(os.path.dirname(path)) |
Ensure that a file exists. This will create any parent directories needed
and create an empty file if it does not exist.
Parameters
----------
path : str
The file path to ensure exists. | def ensure_file(path):
"""
Ensure that a file exists. This will create any parent directories needed
and create an empty file if it does not exist.
Parameters
----------
path : str
The file path to ensure exists.
"""
ensure_directory_containing(path)
open(path, 'a+').close() |
Updates the modified time of an existing file. This will create any
parent directories needed and create an empty file if it does not exist.
Parameters
----------
path : str
The file path to update.
times : tuple
A tuple of size two; access time and modified time | def update_modified_time(path, times=None):
"""
Updates the modified time of an existing file. This will create any
parent directories needed and create an empty file if it does not exist.
Parameters
----------
path : str
The file path to update.
times : tuple
A tuple of size two; access time and modified time
"""
ensure_directory_containing(path)
os.utime(path, times) |
Get the last modified time of path as a Timestamp. | def last_modified_time(path):
"""
Get the last modified time of path as a Timestamp.
"""
return pd.Timestamp(os.path.getmtime(path), unit='s', tz='UTC') |
Check whether `path` was modified since `dt`.
Returns False if path doesn't exist.
Parameters
----------
path : str
Path to the file to be checked.
dt : pd.Timestamp
The date against which to compare last_modified_time(path).
Returns
-------
was_modified : bool
Will be ``False`` if path doesn't exists, or if its last modified date
is earlier than or equal to `dt` | def modified_since(path, dt):
"""
Check whether `path` was modified since `dt`.
Returns False if path doesn't exist.
Parameters
----------
path : str
Path to the file to be checked.
dt : pd.Timestamp
The date against which to compare last_modified_time(path).
Returns
-------
was_modified : bool
Will be ``False`` if path doesn't exists, or if its last modified date
is earlier than or equal to `dt`
"""
return exists(path) and last_modified_time(path) > dt |
Get the root directory for all zipline-managed files.
For testing purposes, this accepts a dictionary to interpret as the os
environment.
Parameters
----------
environ : dict, optional
A dict to interpret as the os environment.
Returns
-------
root : string
Path to the zipline root dir. | def zipline_root(environ=None):
"""
Get the root directory for all zipline-managed files.
For testing purposes, this accepts a dictionary to interpret as the os
environment.
Parameters
----------
environ : dict, optional
A dict to interpret as the os environment.
Returns
-------
root : string
Path to the zipline root dir.
"""
if environ is None:
environ = os.environ
root = environ.get('ZIPLINE_ROOT', None)
if root is None:
root = expanduser('~/.zipline')
return root |
Get a path relative to the zipline root.
Parameters
----------
paths : list[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline root. | def zipline_path(paths, environ=None):
"""
Get a path relative to the zipline root.
Parameters
----------
paths : list[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline root.
"""
return join(zipline_root(environ=environ), *paths) |
Get the path to the default zipline extension file.
Parameters
----------
environ : dict, optional
An environment dict to forwart to zipline_root.
Returns
-------
default_extension_path : str
The file path to the default zipline extension file. | def default_extension(environ=None):
"""
Get the path to the default zipline extension file.
Parameters
----------
environ : dict, optional
An environment dict to forwart to zipline_root.
Returns
-------
default_extension_path : str
The file path to the default zipline extension file.
"""
return zipline_path(['extension.py'], environ=environ) |
The root directory for zipline data files.
Parameters
----------
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
data_root : str
The zipline data root. | def data_root(environ=None):
"""
The root directory for zipline data files.
Parameters
----------
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
data_root : str
The zipline data root.
"""
return zipline_path(['data'], environ=environ) |
Ensure that the data root exists. | def ensure_data_root(environ=None):
"""
Ensure that the data root exists.
"""
ensure_directory(data_root(environ=environ)) |
Get a path relative to the zipline data directory.
Parameters
----------
paths : iterable[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline data root. | def data_path(paths, environ=None):
"""
Get a path relative to the zipline data directory.
Parameters
----------
paths : iterable[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline data root.
"""
return zipline_path(['data'] + list(paths), environ=environ) |
The root directory for zipline cache files.
Parameters
----------
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
cache_root : str
The zipline cache root. | def cache_root(environ=None):
"""
The root directory for zipline cache files.
Parameters
----------
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
cache_root : str
The zipline cache root.
"""
return zipline_path(['cache'], environ=environ) |
Ensure that the data root exists. | def ensure_cache_root(environ=None):
"""
Ensure that the data root exists.
"""
ensure_directory(cache_root(environ=environ)) |
Get a path relative to the zipline cache directory.
Parameters
----------
paths : iterable[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline cache root. | def cache_path(paths, environ=None):
"""
Get a path relative to the zipline cache directory.
Parameters
----------
paths : iterable[str]
List of requested path pieces.
environ : dict, optional
An environment dict to forward to zipline_root.
Returns
-------
newpath : str
The requested path joined with the zipline cache root.
"""
return zipline_path(['cache'] + list(paths), environ=environ) |
Decorator that applies pre-processors to the arguments of a function before
calling the function.
Parameters
----------
**processors : dict
Map from argument name -> processor function.
A processor function takes three arguments: (func, argname, argvalue).
`func` is the the function for which we're processing args.
`argname` is the name of the argument we're processing.
`argvalue` is the value of the argument we're processing.
Examples
--------
>>> def _ensure_tuple(func, argname, arg):
... if isinstance(arg, tuple):
... return argvalue
... try:
... return tuple(arg)
... except TypeError:
... raise TypeError(
... "%s() expected argument '%s' to"
... " be iterable, but got %s instead." % (
... func.__name__, argname, arg,
... )
... )
...
>>> @preprocess(arg=_ensure_tuple)
... def foo(arg):
... return arg
...
>>> foo([1, 2, 3])
(1, 2, 3)
>>> foo("a")
('a',)
>>> foo(2)
Traceback (most recent call last):
...
TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead. | def preprocess(*_unused, **processors):
"""
Decorator that applies pre-processors to the arguments of a function before
calling the function.
Parameters
----------
**processors : dict
Map from argument name -> processor function.
A processor function takes three arguments: (func, argname, argvalue).
`func` is the the function for which we're processing args.
`argname` is the name of the argument we're processing.
`argvalue` is the value of the argument we're processing.
Examples
--------
>>> def _ensure_tuple(func, argname, arg):
... if isinstance(arg, tuple):
... return argvalue
... try:
... return tuple(arg)
... except TypeError:
... raise TypeError(
... "%s() expected argument '%s' to"
... " be iterable, but got %s instead." % (
... func.__name__, argname, arg,
... )
... )
...
>>> @preprocess(arg=_ensure_tuple)
... def foo(arg):
... return arg
...
>>> foo([1, 2, 3])
(1, 2, 3)
>>> foo("a")
('a',)
>>> foo(2)
Traceback (most recent call last):
...
TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead.
"""
if _unused:
raise TypeError("preprocess() doesn't accept positional arguments")
def _decorator(f):
args, varargs, varkw, defaults = argspec = getargspec(f)
if defaults is None:
defaults = ()
no_defaults = (NO_DEFAULT,) * (len(args) - len(defaults))
args_defaults = list(zip(args, no_defaults + defaults))
if varargs:
args_defaults.append((varargs, NO_DEFAULT))
if varkw:
args_defaults.append((varkw, NO_DEFAULT))
argset = set(args) | {varargs, varkw} - {None}
# Arguments can be declared as tuples in Python 2.
if not all(isinstance(arg, str) for arg in args):
raise TypeError(
"Can't validate functions using tuple unpacking: %s" %
(argspec,)
)
# Ensure that all processors map to valid names.
bad_names = viewkeys(processors) - argset
if bad_names:
raise TypeError(
"Got processors for unknown arguments: %s." % bad_names
)
return _build_preprocessed_function(
f, processors, args_defaults, varargs, varkw,
)
return _decorator |
Wrap a function in a processor that calls `f` on the argument before
passing it along.
Useful for creating simple arguments to the `@preprocess` decorator.
Parameters
----------
f : function
Function accepting a single argument and returning a replacement.
Examples
--------
>>> @preprocess(x=call(lambda x: x + 1))
... def foo(x):
... return x
...
>>> foo(1)
2 | def call(f):
"""
Wrap a function in a processor that calls `f` on the argument before
passing it along.
Useful for creating simple arguments to the `@preprocess` decorator.
Parameters
----------
f : function
Function accepting a single argument and returning a replacement.
Examples
--------
>>> @preprocess(x=call(lambda x: x + 1))
... def foo(x):
... return x
...
>>> foo(1)
2
"""
@wraps(f)
def processor(func, argname, arg):
return f(arg)
return processor |
Build a preprocessed function with the same signature as `func`.
Uses `exec` internally to build a function that actually has the same
signature as `func. | def _build_preprocessed_function(func,
processors,
args_defaults,
varargs,
varkw):
"""
Build a preprocessed function with the same signature as `func`.
Uses `exec` internally to build a function that actually has the same
signature as `func.
"""
format_kwargs = {'func_name': func.__name__}
def mangle(name):
return 'a' + uuid4().hex + name
format_kwargs['mangled_func'] = mangled_funcname = mangle(func.__name__)
def make_processor_assignment(arg, processor_name):
template = "{arg} = {processor}({func}, '{arg}', {arg})"
return template.format(
arg=arg,
processor=processor_name,
func=mangled_funcname,
)
exec_globals = {mangled_funcname: func, 'wraps': wraps}
defaults_seen = 0
default_name_template = 'a' + uuid4().hex + '_%d'
signature = []
call_args = []
assignments = []
star_map = {
varargs: '*',
varkw: '**',
}
def name_as_arg(arg):
return star_map.get(arg, '') + arg
for arg, default in args_defaults:
if default is NO_DEFAULT:
signature.append(name_as_arg(arg))
else:
default_name = default_name_template % defaults_seen
exec_globals[default_name] = default
signature.append('='.join([name_as_arg(arg), default_name]))
defaults_seen += 1
if arg in processors:
procname = mangle('_processor_' + arg)
exec_globals[procname] = processors[arg]
assignments.append(make_processor_assignment(arg, procname))
call_args.append(name_as_arg(arg))
exec_str = dedent(
"""\
@wraps({wrapped_funcname})
def {func_name}({signature}):
{assignments}
return {wrapped_funcname}({call_args})
"""
).format(
func_name=func.__name__,
signature=', '.join(signature),
assignments='\n '.join(assignments),
wrapped_funcname=mangled_funcname,
call_args=', '.join(call_args),
)
compiled = compile(
exec_str,
func.__code__.co_filename,
mode='exec',
)
exec_locals = {}
exec_(compiled, exec_globals, exec_locals)
new_func = exec_locals[func.__name__]
code = new_func.__code__
args = {
attr: getattr(code, attr)
for attr in dir(code)
if attr.startswith('co_')
}
# Copy the firstlineno out of the underlying function so that exceptions
# get raised with the correct traceback.
# This also makes dynamic source inspection (like IPython `??` operator)
# work as intended.
try:
# Try to get the pycode object from the underlying function.
original_code = func.__code__
except AttributeError:
try:
# The underlying callable was not a function, try to grab the
# `__func__.__code__` which exists on method objects.
original_code = func.__func__.__code__
except AttributeError:
# The underlying callable does not have a `__code__`. There is
# nothing for us to correct.
return new_func
args['co_firstlineno'] = original_code.co_firstlineno
new_func.__code__ = CodeType(*map(getitem(args), _code_argorder))
return new_func |
Convert a tuple into a range with error handling.
Parameters
----------
tup : tuple (len 2 or 3)
The tuple to turn into a range.
Returns
-------
range : range
The range from the tuple.
Raises
------
ValueError
Raised when the tuple length is not 2 or 3. | def from_tuple(tup):
"""Convert a tuple into a range with error handling.
Parameters
----------
tup : tuple (len 2 or 3)
The tuple to turn into a range.
Returns
-------
range : range
The range from the tuple.
Raises
------
ValueError
Raised when the tuple length is not 2 or 3.
"""
if len(tup) not in (2, 3):
raise ValueError(
'tuple must contain 2 or 3 elements, not: %d (%r' % (
len(tup),
tup,
),
)
return range(*tup) |
Convert a tuple into a range but pass ranges through silently.
This is useful to ensure that input is a range so that attributes may
be accessed with `.start`, `.stop` or so that containment checks are
constant time.
Parameters
----------
tup_or_range : tuple or range
A tuple to pass to from_tuple or a range to return.
Returns
-------
range : range
The input to convert to a range.
Raises
------
ValueError
Raised when the input is not a tuple or a range. ValueError is also
raised if the input is a tuple whose length is not 2 or 3. | def maybe_from_tuple(tup_or_range):
"""Convert a tuple into a range but pass ranges through silently.
This is useful to ensure that input is a range so that attributes may
be accessed with `.start`, `.stop` or so that containment checks are
constant time.
Parameters
----------
tup_or_range : tuple or range
A tuple to pass to from_tuple or a range to return.
Returns
-------
range : range
The input to convert to a range.
Raises
------
ValueError
Raised when the input is not a tuple or a range. ValueError is also
raised if the input is a tuple whose length is not 2 or 3.
"""
if isinstance(tup_or_range, tuple):
return from_tuple(tup_or_range)
elif isinstance(tup_or_range, range):
return tup_or_range
raise ValueError(
'maybe_from_tuple expects a tuple or range, got %r: %r' % (
type(tup_or_range).__name__,
tup_or_range,
),
) |
Check that the steps of ``a`` and ``b`` are both 1.
Parameters
----------
a : range
The first range to check.
b : range
The second range to check.
Raises
------
ValueError
Raised when either step is not 1. | def _check_steps(a, b):
"""Check that the steps of ``a`` and ``b`` are both 1.
Parameters
----------
a : range
The first range to check.
b : range
The second range to check.
Raises
------
ValueError
Raised when either step is not 1.
"""
if a.step != 1:
raise ValueError('a.step must be equal to 1, got: %s' % a.step)
if b.step != 1:
raise ValueError('b.step must be equal to 1, got: %s' % b.step) |
Check if two ranges overlap.
Parameters
----------
a : range
The first range.
b : range
The second range.
Returns
-------
overlaps : bool
Do these ranges overlap.
Notes
-----
This function does not support ranges with step != 1. | def overlap(a, b):
"""Check if two ranges overlap.
Parameters
----------
a : range
The first range.
b : range
The second range.
Returns
-------
overlaps : bool
Do these ranges overlap.
Notes
-----
This function does not support ranges with step != 1.
"""
_check_steps(a, b)
return a.stop >= b.start and b.stop >= a.start |
Merge two ranges with step == 1.
Parameters
----------
a : range
The first range.
b : range
The second range. | def merge(a, b):
"""Merge two ranges with step == 1.
Parameters
----------
a : range
The first range.
b : range
The second range.
"""
_check_steps(a, b)
return range(min(a.start, b.start), max(a.stop, b.stop)) |
helper for ``_group_ranges``
| def _combine(n, rs):
"""helper for ``_group_ranges``
"""
try:
r, rs = peek(rs)
except StopIteration:
yield n
return
if overlap(n, r):
yield merge(n, r)
next(rs)
for r in rs:
yield r
else:
yield n
for r in rs:
yield r |
Group any overlapping ranges into a single range.
Parameters
----------
ranges : iterable[ranges]
A sorted sequence of ranges to group.
Returns
-------
grouped : iterable[ranges]
A sorted sequence of ranges with overlapping ranges merged together. | def group_ranges(ranges):
"""Group any overlapping ranges into a single range.
Parameters
----------
ranges : iterable[ranges]
A sorted sequence of ranges to group.
Returns
-------
grouped : iterable[ranges]
A sorted sequence of ranges with overlapping ranges merged together.
"""
return foldr(_combine, ranges, ()) |
Return any ranges that intersect.
Parameters
----------
ranges : iterable[ranges]
A sequence of ranges to check for intersections.
Returns
-------
intersections : iterable[ranges]
A sequence of all of the ranges that intersected in ``ranges``.
Examples
--------
>>> ranges = [range(0, 1), range(2, 5), range(4, 7)]
>>> list(intersecting_ranges(ranges))
[range(2, 5), range(4, 7)]
>>> ranges = [range(0, 1), range(2, 3)]
>>> list(intersecting_ranges(ranges))
[]
>>> ranges = [range(0, 1), range(1, 2)]
>>> list(intersecting_ranges(ranges))
[range(0, 1), range(1, 2)] | def intersecting_ranges(ranges):
"""Return any ranges that intersect.
Parameters
----------
ranges : iterable[ranges]
A sequence of ranges to check for intersections.
Returns
-------
intersections : iterable[ranges]
A sequence of all of the ranges that intersected in ``ranges``.
Examples
--------
>>> ranges = [range(0, 1), range(2, 5), range(4, 7)]
>>> list(intersecting_ranges(ranges))
[range(2, 5), range(4, 7)]
>>> ranges = [range(0, 1), range(2, 3)]
>>> list(intersecting_ranges(ranges))
[]
>>> ranges = [range(0, 1), range(1, 2)]
>>> list(intersecting_ranges(ranges))
[range(0, 1), range(1, 2)]
"""
ranges = sorted(ranges, key=op.attrgetter('start'))
return sorted_diff(ranges, group_ranges(ranges)) |
Run a backtest for the given algorithm.
This is shared between the cli and :func:`zipline.run_algo`. | def _run(handle_data,
initialize,
before_trading_start,
analyze,
algofile,
algotext,
defines,
data_frequency,
capital_base,
bundle,
bundle_timestamp,
start,
end,
output,
trading_calendar,
print_algo,
metrics_set,
local_namespace,
environ,
blotter,
benchmark_spec):
"""Run a backtest for the given algorithm.
This is shared between the cli and :func:`zipline.run_algo`.
"""
bundle_data = bundles.load(
bundle,
environ,
bundle_timestamp,
)
if trading_calendar is None:
trading_calendar = get_calendar('XNYS')
# date parameter validation
if trading_calendar.session_distance(start, end) < 1:
raise _RunAlgoError(
'There are no trading days between %s and %s' % (
start.date(),
end.date(),
),
)
benchmark_sid, benchmark_returns = benchmark_spec.resolve(
asset_finder=bundle_data.asset_finder,
start_date=start,
end_date=end,
)
if algotext is not None:
if local_namespace:
ip = get_ipython() # noqa
namespace = ip.user_ns
else:
namespace = {}
for assign in defines:
try:
name, value = assign.split('=', 2)
except ValueError:
raise ValueError(
'invalid define %r, should be of the form name=value' %
assign,
)
try:
# evaluate in the same namespace so names may refer to
# eachother
namespace[name] = eval(value, namespace)
except Exception as e:
raise ValueError(
'failed to execute definition for name %r: %s' % (name, e),
)
elif defines:
raise _RunAlgoError(
'cannot pass define without `algotext`',
"cannot pass '-D' / '--define' without '-t' / '--algotext'",
)
else:
namespace = {}
if algofile is not None:
algotext = algofile.read()
if print_algo:
if PYGMENTS:
highlight(
algotext,
PythonLexer(),
TerminalFormatter(),
outfile=sys.stdout,
)
else:
click.echo(algotext)
first_trading_day = \
bundle_data.equity_minute_bar_reader.first_trading_day
data = DataPortal(
bundle_data.asset_finder,
trading_calendar=trading_calendar,
first_trading_day=first_trading_day,
equity_minute_reader=bundle_data.equity_minute_bar_reader,
equity_daily_reader=bundle_data.equity_daily_bar_reader,
adjustment_reader=bundle_data.adjustment_reader,
)
pipeline_loader = USEquityPricingLoader.without_fx(
bundle_data.equity_daily_bar_reader,
bundle_data.adjustment_reader,
)
def choose_loader(column):
if column in USEquityPricing.columns:
return pipeline_loader
raise ValueError(
"No PipelineLoader registered for column %s." % column
)
if isinstance(metrics_set, six.string_types):
try:
metrics_set = metrics.load(metrics_set)
except ValueError as e:
raise _RunAlgoError(str(e))
if isinstance(blotter, six.string_types):
try:
blotter = load(Blotter, blotter)
except ValueError as e:
raise _RunAlgoError(str(e))
try:
perf = TradingAlgorithm(
namespace=namespace,
data_portal=data,
get_pipeline_loader=choose_loader,
trading_calendar=trading_calendar,
sim_params=SimulationParameters(
start_session=start,
end_session=end,
trading_calendar=trading_calendar,
capital_base=capital_base,
data_frequency=data_frequency,
),
metrics_set=metrics_set,
blotter=blotter,
benchmark_returns=benchmark_returns,
benchmark_sid=benchmark_sid,
**{
'initialize': initialize,
'handle_data': handle_data,
'before_trading_start': before_trading_start,
'analyze': analyze,
} if algotext is None else {
'algo_filename': getattr(algofile, 'name', '<algorithm>'),
'script': algotext,
}
).run()
except NoBenchmark:
raise _RunAlgoError(
(
'No ``benchmark_spec`` was provided, and'
' ``zipline.api.set_benchmark`` was not called in'
' ``initialize``.'
),
(
"Neither '--benchmark-symbol' nor '--benchmark-sid' was"
" provided, and ``zipline.api.set_benchmark`` was not called"
" in ``initialize``. Did you mean to pass '--no-benchmark'?"
),
)
if output == '-':
click.echo(str(perf))
elif output != os.devnull: # make the zipline magic not write any data
perf.to_pickle(output)
return perf |
Load all of the given extensions. This should be called by run_algo
or the cli.
Parameters
----------
default : bool
Load the default exension (~/.zipline/extension.py)?
extension : iterable[str]
The paths to the extensions to load. If the path ends in ``.py`` it is
treated as a script and executed. If it does not end in ``.py`` it is
treated as a module to be imported.
strict : bool
Should failure to load an extension raise. If this is false it will
still warn.
environ : mapping
The environment to use to find the default extension path.
reload : bool, optional
Reload any extensions that have already been loaded. | def load_extensions(default, extensions, strict, environ, reload=False):
"""Load all of the given extensions. This should be called by run_algo
or the cli.
Parameters
----------
default : bool
Load the default exension (~/.zipline/extension.py)?
extension : iterable[str]
The paths to the extensions to load. If the path ends in ``.py`` it is
treated as a script and executed. If it does not end in ``.py`` it is
treated as a module to be imported.
strict : bool
Should failure to load an extension raise. If this is false it will
still warn.
environ : mapping
The environment to use to find the default extension path.
reload : bool, optional
Reload any extensions that have already been loaded.
"""
if default:
default_extension_path = pth.default_extension(environ=environ)
pth.ensure_file(default_extension_path)
# put the default extension first so other extensions can depend on
# the order they are loaded
extensions = concatv([default_extension_path], extensions)
for ext in extensions:
if ext in _loaded_extensions and not reload:
continue
try:
# load all of the zipline extensionss
if ext.endswith('.py'):
with open(ext) as f:
ns = {}
six.exec_(compile(f.read(), ext, 'exec'), ns, ns)
else:
__import__(ext)
except Exception as e:
if strict:
# if `strict` we should raise the actual exception and fail
raise
# without `strict` we should just log the failure
warnings.warn(
'Failed to load extension: %r\n%s' % (ext, e),
stacklevel=2
)
else:
_loaded_extensions.add(ext) |
Run a trading algorithm.
Parameters
----------
start : datetime
The start date of the backtest.
end : datetime
The end date of the backtest..
initialize : callable[context -> None]
The initialize function to use for the algorithm. This is called once
at the very begining of the backtest and should be used to set up
any state needed by the algorithm.
capital_base : float
The starting capital for the backtest.
handle_data : callable[(context, BarData) -> None], optional
The handle_data function to use for the algorithm. This is called
every minute when ``data_frequency == 'minute'`` or every day
when ``data_frequency == 'daily'``.
before_trading_start : callable[(context, BarData) -> None], optional
The before_trading_start function for the algorithm. This is called
once before each trading day (after initialize on the first day).
analyze : callable[(context, pd.DataFrame) -> None], optional
The analyze function to use for the algorithm. This function is called
once at the end of the backtest and is passed the context and the
performance data.
data_frequency : {'daily', 'minute'}, optional
The data frequency to run the algorithm at.
bundle : str, optional
The name of the data bundle to use to load the data to run the backtest
with. This defaults to 'quantopian-quandl'.
bundle_timestamp : datetime, optional
The datetime to lookup the bundle data for. This defaults to the
current time.
trading_calendar : TradingCalendar, optional
The trading calendar to use for your backtest.
metrics_set : iterable[Metric] or str, optional
The set of metrics to compute in the simulation. If a string is passed,
resolve the set with :func:`zipline.finance.metrics.load`.
benchmark_returns : pd.Series, optional
Series of returns to use as the benchmark.
default_extension : bool, optional
Should the default zipline extension be loaded. This is found at
``$ZIPLINE_ROOT/extension.py``
extensions : iterable[str], optional
The names of any other extensions to load. Each element may either be
a dotted module path like ``a.b.c`` or a path to a python file ending
in ``.py`` like ``a/b/c.py``.
strict_extensions : bool, optional
Should the run fail if any extensions fail to load. If this is false,
a warning will be raised instead.
environ : mapping[str -> str], optional
The os environment to use. Many extensions use this to get parameters.
This defaults to ``os.environ``.
blotter : str or zipline.finance.blotter.Blotter, optional
Blotter to use with this algorithm. If passed as a string, we look for
a blotter construction function registered with
``zipline.extensions.register`` and call it with no parameters.
Default is a :class:`zipline.finance.blotter.SimulationBlotter` that
never cancels orders.
Returns
-------
perf : pd.DataFrame
The daily performance of the algorithm.
See Also
--------
zipline.data.bundles.bundles : The available data bundles. | def run_algorithm(start,
end,
initialize,
capital_base,
handle_data=None,
before_trading_start=None,
analyze=None,
data_frequency='daily',
bundle='quantopian-quandl',
bundle_timestamp=None,
trading_calendar=None,
metrics_set='default',
benchmark_returns=None,
default_extension=True,
extensions=(),
strict_extensions=True,
environ=os.environ,
blotter='default'):
"""
Run a trading algorithm.
Parameters
----------
start : datetime
The start date of the backtest.
end : datetime
The end date of the backtest..
initialize : callable[context -> None]
The initialize function to use for the algorithm. This is called once
at the very begining of the backtest and should be used to set up
any state needed by the algorithm.
capital_base : float
The starting capital for the backtest.
handle_data : callable[(context, BarData) -> None], optional
The handle_data function to use for the algorithm. This is called
every minute when ``data_frequency == 'minute'`` or every day
when ``data_frequency == 'daily'``.
before_trading_start : callable[(context, BarData) -> None], optional
The before_trading_start function for the algorithm. This is called
once before each trading day (after initialize on the first day).
analyze : callable[(context, pd.DataFrame) -> None], optional
The analyze function to use for the algorithm. This function is called
once at the end of the backtest and is passed the context and the
performance data.
data_frequency : {'daily', 'minute'}, optional
The data frequency to run the algorithm at.
bundle : str, optional
The name of the data bundle to use to load the data to run the backtest
with. This defaults to 'quantopian-quandl'.
bundle_timestamp : datetime, optional
The datetime to lookup the bundle data for. This defaults to the
current time.
trading_calendar : TradingCalendar, optional
The trading calendar to use for your backtest.
metrics_set : iterable[Metric] or str, optional
The set of metrics to compute in the simulation. If a string is passed,
resolve the set with :func:`zipline.finance.metrics.load`.
benchmark_returns : pd.Series, optional
Series of returns to use as the benchmark.
default_extension : bool, optional
Should the default zipline extension be loaded. This is found at
``$ZIPLINE_ROOT/extension.py``
extensions : iterable[str], optional
The names of any other extensions to load. Each element may either be
a dotted module path like ``a.b.c`` or a path to a python file ending
in ``.py`` like ``a/b/c.py``.
strict_extensions : bool, optional
Should the run fail if any extensions fail to load. If this is false,
a warning will be raised instead.
environ : mapping[str -> str], optional
The os environment to use. Many extensions use this to get parameters.
This defaults to ``os.environ``.
blotter : str or zipline.finance.blotter.Blotter, optional
Blotter to use with this algorithm. If passed as a string, we look for
a blotter construction function registered with
``zipline.extensions.register`` and call it with no parameters.
Default is a :class:`zipline.finance.blotter.SimulationBlotter` that
never cancels orders.
Returns
-------
perf : pd.DataFrame
The daily performance of the algorithm.
See Also
--------
zipline.data.bundles.bundles : The available data bundles.
"""
load_extensions(default_extension, extensions, strict_extensions, environ)
benchmark_spec = BenchmarkSpec.from_returns(benchmark_returns)
return _run(
handle_data=handle_data,
initialize=initialize,
before_trading_start=before_trading_start,
analyze=analyze,
algofile=None,
algotext=None,
defines=(),
data_frequency=data_frequency,
capital_base=capital_base,
bundle=bundle,
bundle_timestamp=bundle_timestamp,
start=start,
end=end,
output=os.devnull,
trading_calendar=trading_calendar,
print_algo=False,
metrics_set=metrics_set,
local_namespace=False,
environ=environ,
blotter=blotter,
benchmark_spec=benchmark_spec,
) |
To resolve the symbol in the LEVERAGED_ETF list,
the date on which the symbol was in effect is needed.
Furthermore, to maintain a point in time record of our own maintenance
of the restricted list, we need a knowledge date. Thus, restricted lists
are dictionaries of datetime->symbol lists.
new symbols should be entered as a new knowledge date entry.
This method assumes a directory structure of:
SECURITY_LISTS_DIR/listname/knowledge_date/lookup_date/add.txt
SECURITY_LISTS_DIR/listname/knowledge_date/lookup_date/delete.txt
The return value is a dictionary with:
knowledge_date -> lookup_date ->
{add: [symbol list], 'delete': [symbol list]} | def load_from_directory(list_name):
"""
To resolve the symbol in the LEVERAGED_ETF list,
the date on which the symbol was in effect is needed.
Furthermore, to maintain a point in time record of our own maintenance
of the restricted list, we need a knowledge date. Thus, restricted lists
are dictionaries of datetime->symbol lists.
new symbols should be entered as a new knowledge date entry.
This method assumes a directory structure of:
SECURITY_LISTS_DIR/listname/knowledge_date/lookup_date/add.txt
SECURITY_LISTS_DIR/listname/knowledge_date/lookup_date/delete.txt
The return value is a dictionary with:
knowledge_date -> lookup_date ->
{add: [symbol list], 'delete': [symbol list]}
"""
data = {}
dir_path = os.path.join(SECURITY_LISTS_DIR, list_name)
for kd_name in listdir(dir_path):
kd = datetime.strptime(kd_name, DATE_FORMAT).replace(
tzinfo=pytz.utc)
data[kd] = {}
kd_path = os.path.join(dir_path, kd_name)
for ld_name in listdir(kd_path):
ld = datetime.strptime(ld_name, DATE_FORMAT).replace(
tzinfo=pytz.utc)
data[kd][ld] = {}
ld_path = os.path.join(kd_path, ld_name)
for fname in listdir(ld_path):
fpath = os.path.join(ld_path, fname)
with open(fpath) as f:
symbols = f.read().splitlines()
data[kd][ld][fname] = symbols
return data |
Apply a prefix to each line in s after the first. | def pad_lines_after_first(prefix, s):
"""Apply a prefix to each line in s after the first."""
return ('\n' + prefix).join(s.splitlines()) |
Template ``formatters`` into ``docstring``.
Parameters
----------
owner_name : str
The name of the function or class whose docstring is being templated.
Only used for error messages.
docstring : str
The docstring to template.
formatters : dict[str -> str]
Parameters for a a str.format() call on ``docstring``.
Multi-line values in ``formatters`` will have leading whitespace padded
to match the leading whitespace of the substitution string. | def format_docstring(owner_name, docstring, formatters):
"""
Template ``formatters`` into ``docstring``.
Parameters
----------
owner_name : str
The name of the function or class whose docstring is being templated.
Only used for error messages.
docstring : str
The docstring to template.
formatters : dict[str -> str]
Parameters for a a str.format() call on ``docstring``.
Multi-line values in ``formatters`` will have leading whitespace padded
to match the leading whitespace of the substitution string.
"""
# Build a dict of parameters to a vanilla format() call by searching for
# each entry in **formatters and applying any leading whitespace to each
# line in the desired substitution.
format_params = {}
for target, doc_for_target in iteritems(formatters):
# Search for '{name}', with optional leading whitespace.
regex = re.compile(r'^(\s*)' + '({' + target + '})$', re.MULTILINE)
matches = regex.findall(docstring)
if not matches:
raise ValueError(
"Couldn't find template for parameter {!r} in docstring "
"for {}."
"\nParameter name must be alone on a line surrounded by "
"braces.".format(target, owner_name),
)
elif len(matches) > 1:
raise ValueError(
"Couldn't found multiple templates for parameter {!r}"
"in docstring for {}."
"\nParameter should only appear once.".format(
target, owner_name
)
)
(leading_whitespace, _) = matches[0]
format_params[target] = pad_lines_after_first(
leading_whitespace,
doc_for_target,
)
return docstring.format(**format_params) |
Decorator allowing the use of templated docstrings.
Examples
--------
>>> @templated_docstring(foo='bar')
... def my_func(self, foo):
... '''{foo}'''
...
>>> my_func.__doc__
'bar' | def templated_docstring(**docs):
"""
Decorator allowing the use of templated docstrings.
Examples
--------
>>> @templated_docstring(foo='bar')
... def my_func(self, foo):
... '''{foo}'''
...
>>> my_func.__doc__
'bar'
"""
def decorator(f):
f.__doc__ = format_docstring(f.__name__, f.__doc__, docs)
return f
return decorator |
Copies the docstring from one function to another.
Parameters
----------
from_ : any
The object to copy the docstring from.
to : any
The object to copy the docstring to.
Returns
-------
to : any
``to`` with the docstring from ``from_`` | def copydoc(from_, to):
"""Copies the docstring from one function to another.
Parameters
----------
from_ : any
The object to copy the docstring from.
to : any
The object to copy the docstring to.
Returns
-------
to : any
``to`` with the docstring from ``from_``
"""
to.__doc__ = from_.__doc__
return to |
Format a bulleted list of values.
| def bulleted_list(items, max_count=None, indent=2):
"""Format a bulleted list of values.
"""
if max_count is not None and len(items) > max_count:
item_list = list(items)
items = item_list[:max_count - 1]
items.append('...')
items.append(item_list[-1])
line_template = (" " * indent) + "- {}"
return "\n".join(map(line_template.format, items)) |
Because Zulip uses management commands in production, `manage.py
help` is a form of documentation for users. Here we exclude from
that documentation built-in commands that are not constructive for
end users or even Zulip developers to run.
Ideally, we'd do further customization to display management
commands with more organization in the help text, and also hide
development-focused management commands in production. | def get_filtered_commands() -> Dict[str, str]:
"""Because Zulip uses management commands in production, `manage.py
help` is a form of documentation for users. Here we exclude from
that documentation built-in commands that are not constructive for
end users or even Zulip developers to run.
Ideally, we'd do further customization to display management
commands with more organization in the help text, and also hide
development-focused management commands in production.
"""
all_commands = get_commands()
documented_commands = dict()
documented_apps = [
# "auth" removed because its commands are not applicable to Zulip.
# "contenttypes" removed because we don't use that subsystem, and
# even if we did.
"django.core",
"analytics",
# "otp_static" removed because it's a 2FA internals detail.
# "sessions" removed because it's just a cron job with a misleading
# name, since all it does is delete expired sessions.
# "social_django" removed for similar reasons to sessions.
# "staticfiles" removed because its commands are only usefully run when
# wrapped by Zulip tooling.
# "two_factor" removed because it's a 2FA internals detail.
"zerver",
"zilencer",
]
documented_command_subsets = {
"django.core": {
"changepassword",
"dbshell",
"makemigrations",
"migrate",
"shell",
"showmigrations",
},
}
for command, app in all_commands.items():
if app not in documented_apps:
continue
if app in documented_command_subsets and command not in documented_command_subsets[app]:
continue
documented_commands[command] = app
return documented_commands |
Run a FilteredManagementUtility. | def execute_from_command_line(argv: Optional[List[str]] = None) -> None:
"""Run a FilteredManagementUtility."""
utility = FilteredManagementUtility(argv)
utility.execute() |
Generate semi-realistic looking time series data for testing analytics graphs.
days -- Number of days of data. Is the number of data points generated if
frequency is CountStat.DAY.
business_hours_base -- Average value during a business hour (or day) at beginning of
time series, if frequency is CountStat.HOUR (CountStat.DAY, respectively).
non_business_hours_base -- The above, for non-business hours/days.
growth -- Ratio between average values at end of time series and beginning of time series.
autocorrelation -- Makes neighboring data points look more like each other. At 0 each
point is unaffected by the previous point, and at 1 each point is a deterministic
function of the previous point.
spikiness -- 0 means no randomness (other than holiday_rate), higher values increase
the variance.
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
frequency -- Should be CountStat.HOUR or CountStat.DAY.
partial_sum -- If True, return partial sum of the series.
random_seed -- Seed for random number generator. | def generate_time_series_data(
days: int = 100,
business_hours_base: float = 10,
non_business_hours_base: float = 10,
growth: float = 1,
autocorrelation: float = 0,
spikiness: float = 1,
holiday_rate: float = 0,
frequency: str = CountStat.DAY,
partial_sum: bool = False,
random_seed: int = 26,
) -> List[int]:
"""
Generate semi-realistic looking time series data for testing analytics graphs.
days -- Number of days of data. Is the number of data points generated if
frequency is CountStat.DAY.
business_hours_base -- Average value during a business hour (or day) at beginning of
time series, if frequency is CountStat.HOUR (CountStat.DAY, respectively).
non_business_hours_base -- The above, for non-business hours/days.
growth -- Ratio between average values at end of time series and beginning of time series.
autocorrelation -- Makes neighboring data points look more like each other. At 0 each
point is unaffected by the previous point, and at 1 each point is a deterministic
function of the previous point.
spikiness -- 0 means no randomness (other than holiday_rate), higher values increase
the variance.
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
frequency -- Should be CountStat.HOUR or CountStat.DAY.
partial_sum -- If True, return partial sum of the series.
random_seed -- Seed for random number generator.
"""
rng = Random(random_seed)
if frequency == CountStat.HOUR:
length = days * 24
seasonality = [non_business_hours_base] * 24 * 7
for day in range(5):
for hour in range(8):
seasonality[24 * day + hour] = business_hours_base
holidays = []
for i in range(days):
holidays.extend([rng.random() < holiday_rate] * 24)
elif frequency == CountStat.DAY:
length = days
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
24 * non_business_hours_base
] * 2
holidays = [rng.random() < holiday_rate for i in range(days)]
else:
raise AssertionError(f"Unknown frequency: {frequency}")
if length < 2:
raise AssertionError(
f"Must be generating at least 2 data points. Currently generating {length}"
)
growth_base = growth ** (1.0 / (length - 1))
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
noise_scalars = [rng.gauss(0, 1)]
for i in range(1, length):
noise_scalars.append(
noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation)
)
values = [
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
]
if partial_sum:
for i in range(1, length):
values[i] = values[i - 1] + values[i]
return [max(v, 0) for v in values] |
This is a preparatory migration for our Analytics tables.
The backstory is that Django's unique_together indexes do not properly
handle the subgroup=None corner case (allowing duplicate rows that have a
subgroup of None), which meant that in race conditions, rather than updating
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
create a duplicate row.
In the next migration, we'll add a proper constraint to fix this bug, but
we need to fix any existing problematic rows before we can add that constraint.
We fix this in an appropriate fashion for each type of CountStat object; mainly
this means deleting the extra rows, but for LoggingCountStat objects, we need to
additionally combine the sums. | def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
"""This is a preparatory migration for our Analytics tables.
The backstory is that Django's unique_together indexes do not properly
handle the subgroup=None corner case (allowing duplicate rows that have a
subgroup of None), which meant that in race conditions, rather than updating
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
create a duplicate row.
In the next migration, we'll add a proper constraint to fix this bug, but
we need to fix any existing problematic rows before we can add that constraint.
We fix this in an appropriate fashion for each type of CountStat object; mainly
this means deleting the extra rows, but for LoggingCountStat objects, we need to
additionally combine the sums.
"""
count_tables = dict(
realm=apps.get_model("analytics", "RealmCount"),
user=apps.get_model("analytics", "UserCount"),
stream=apps.get_model("analytics", "StreamCount"),
installation=apps.get_model("analytics", "InstallationCount"),
)
for name, count_table in count_tables.items():
value = [name, "property", "end_time"]
if name == "installation":
value = ["property", "end_time"]
counts = (
count_table.objects.filter(subgroup=None)
.values(*value)
.annotate(Count("id"), Sum("value"))
.filter(id__count__gt=1)
)
for count in counts:
count.pop("id__count")
total_value = count.pop("value__sum")
duplicate_counts = list(count_table.objects.filter(**count))
first_count = duplicate_counts[0]
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
# For LoggingCountStat objects, the right fix is to combine the totals;
# for other CountStat objects, we expect the duplicates to have the same value.
# And so all we need to do is delete them.
first_count.value = total_value
first_count.save()
to_cleanup = duplicate_counts[1:]
for duplicate_count in to_cleanup:
duplicate_count.delete() |
Access a confirmation object from one of the provided confirmation
types with the provided key.
The mark_object_used parameter determines whether to mark the
confirmation object as used (which generally prevents it from
being used again). It should always be False for MultiuseInvite
objects, since they are intended to be used multiple times. | def get_object_from_key(
confirmation_key: str, confirmation_types: List[int], *, mark_object_used: bool
) -> ConfirmationObjT:
"""Access a confirmation object from one of the provided confirmation
types with the provided key.
The mark_object_used parameter determines whether to mark the
confirmation object as used (which generally prevents it from
being used again). It should always be False for MultiuseInvite
objects, since they are intended to be used multiple times.
"""
# Confirmation keys used to be 40 characters
if len(confirmation_key) not in (24, 40):
raise ConfirmationKeyError(ConfirmationKeyError.WRONG_LENGTH)
try:
confirmation = Confirmation.objects.get(
confirmation_key=confirmation_key, type__in=confirmation_types
)
except Confirmation.DoesNotExist:
raise ConfirmationKeyError(ConfirmationKeyError.DOES_NOT_EXIST)
if confirmation.expiry_date is not None and timezone_now() > confirmation.expiry_date:
raise ConfirmationKeyError(ConfirmationKeyError.EXPIRED)
obj = confirmation.content_object
assert obj is not None
used_value = confirmation_settings.STATUS_USED
revoked_value = confirmation_settings.STATUS_REVOKED
if hasattr(obj, "status") and obj.status in [used_value, revoked_value]:
# Confirmations where the object has the status attribute are one-time use
# and are marked after being used (or revoked).
raise ConfirmationKeyError(ConfirmationKeyError.EXPIRED)
if mark_object_used:
# MultiuseInvite objects do not use the STATUS_USED status, since they are
# intended to be used more than once.
assert confirmation.type != Confirmation.MULTIUSE_INVITE
assert hasattr(obj, "status")
obj.status = getattr(settings, "STATUS_USED", 1)
obj.save(update_fields=["status"])
return obj |
Generate a unique link that a logged-out user can visit to unsubscribe from
Zulip e-mails without having to first log in. | def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
"""
Generate a unique link that a logged-out user can visit to unsubscribe from
Zulip e-mails without having to first log in.
"""
return create_confirmation_link(
user_profile, Confirmation.UNSUBSCRIBE, url_args={"email_type": email_type}
) |
Get the record for this key, raising InvalidCreationKey if non-None but invalid. | def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
if creation_key is None:
return None
try:
key_record = RealmCreationKey.objects.get(creation_key=creation_key)
except RealmCreationKey.DoesNotExist:
raise RealmCreationKey.InvalidError
time_elapsed = timezone_now() - key_record.date_created
if time_elapsed.total_seconds() > settings.REALM_CREATION_LINK_VALIDITY_DAYS * 24 * 3600:
raise RealmCreationKey.InvalidError
return key_record |
Returns all rows from a cursor as a dict | def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
"""Returns all rows from a cursor as a dict"""
desc = cursor.description
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()] |
Fixed price plan offer configured via /support which the
customer is yet to buy or schedule a purchase. | def get_configured_fixed_price_plan_offer(
customer: Customer, plan_tier: int
) -> Optional[CustomerPlanOffer]:
"""
Fixed price plan offer configured via /support which the
customer is yet to buy or schedule a purchase.
"""
if plan_tier == customer.required_plan_tier:
return CustomerPlanOffer.objects.filter(
customer=customer,
tier=plan_tier,
fixed_price__isnull=False,
status=CustomerPlanOffer.CONFIGURED,
).first()
return None |
Utility function for reactivating deactivated registrations. | def do_reactivate_remote_server(remote_server: RemoteZulipServer) -> None:
"""
Utility function for reactivating deactivated registrations.
"""
if not remote_server.deactivated:
billing_logger.warning(
"Cannot reactivate remote server with ID %d, server is already active.",
remote_server.id,
)
return
remote_server.deactivated = False
remote_server.save(update_fields=["deactivated"])
RemoteZulipServerAuditLog.objects.create(
event_type=RealmAuditLog.REMOTE_SERVER_REACTIVATED,
server=remote_server,
event_time=timezone_now(),
) |
This is the endpoint accessed via the billing_access_url, generated by
remote_realm_billing_entry entry. | def remote_realm_billing_finalize_login(
request: HttpRequest,
*,
signed_billing_access_token: PathOnly[str],
full_name: Optional[str] = None,
tos_consent: Literal[None, "true"] = None,
enable_major_release_emails: Literal[None, "true", "false"] = None,
enable_maintenance_release_emails: Literal[None, "true", "false"] = None,
) -> HttpResponse:
"""
This is the endpoint accessed via the billing_access_url, generated by
remote_realm_billing_entry entry.
"""
if request.method not in ["GET", "POST"]:
return HttpResponseNotAllowed(["GET", "POST"])
tos_consent_given = tos_consent == "true"
# Sanity assert, because otherwise these make no sense.
assert (
REMOTE_BILLING_SIGNED_ACCESS_TOKEN_VALIDITY_IN_SECONDS
<= REMOTE_BILLING_SESSION_VALIDITY_SECONDS
)
identity_dict = get_identity_dict_from_signed_access_token(signed_billing_access_token)
# Now we want to fetch the RemoteRealmBillingUser object implied
# by the IdentityDict. We'll use this:
# (1) If the user came here via just GET, we want to show them a confirmation
# page with the relevant info details before finalizing login. If they wish
# to proceed, they'll approve the form, causing a POST, bring us to case (2).
# (2) If the user came here via POST, we finalize login, using the info from the
# IdentityDict to update the RemoteRealmBillingUser object if needed.
# Finally, if the user is logging in for the first time, we'll need to create
# their account first. This will happen by making them fill out a form to confirm
# their email first. Only after clicking the confirmation link in the email,
# they will have their account created and finally be redirected back here.
remote_realm_uuid = identity_dict["remote_realm_uuid"]
remote_server_uuid = identity_dict["remote_server_uuid"]
try:
remote_server = get_remote_server_by_uuid(remote_server_uuid)
remote_realm = RemoteRealm.objects.get(uuid=remote_realm_uuid, server=remote_server)
except ObjectDoesNotExist:
# These should definitely still exist, since the access token was signed
# pretty recently. (And we generally don't delete these at all.)
raise AssertionError
try:
handle_customer_migration_from_server_to_realm(server=remote_server)
except JsonableError:
# JsonableError should be propagated up, as they are meant to convey
# a json error response to be returned.
raise
except Exception: # nocoverage
billing_logger.exception(
"%s: Failed to migrate customer from server (id: %s) to realms",
request.path,
remote_server.id,
stack_info=True,
)
raise JsonableError(
_(
"Failed to migrate customer from server to realms. Please contact support for assistance."
)
)
# Redirect to error page if server is on an active plan
server_customer = get_customer_by_remote_server(remote_server)
if server_customer is not None:
server_plan = get_current_plan_by_customer(server_customer)
if server_plan is not None:
return render(
request,
"corporate/billing/remote_realm_login_error_for_server_on_active_plan.html",
context={
"server_plan_name": server_plan.name,
},
)
user_dict = identity_dict["user"]
user_email = user_dict["user_email"]
user_uuid = user_dict["user_uuid"]
assert (
settings.TERMS_OF_SERVICE_VERSION is not None
), "This is only run on the bouncer, which has ToS"
try:
remote_user = RemoteRealmBillingUser.objects.get(
remote_realm=remote_realm,
user_uuid=user_uuid,
)
tos_consent_needed = is_tos_consent_needed_for_user(remote_user)
except RemoteRealmBillingUser.DoesNotExist:
# This is the first time this user is logging in.
remote_user = None
tos_consent_needed = True
if request.method == "GET":
if remote_user is not None:
# Render a template where the user will just confirm their info,
# possibly accept ToS if needed, POST back here and will get fully
# authenticated.
context = {
"remote_server_uuid": remote_server_uuid,
"remote_realm_uuid": remote_realm_uuid,
"host": remote_realm.host,
"user_email": remote_user.email,
"user_full_name": remote_user.full_name,
"tos_consent_needed": tos_consent_needed,
"action_url": reverse(
remote_realm_billing_finalize_login, args=(signed_billing_access_token,)
),
}
return render(
request,
"corporate/billing/remote_billing_finalize_login_confirmation.html",
context=context,
)
else:
# This user is logging in for the first time, so we need to create their
# RemoteRealmBillingUser object. Render a template where they'll
# enter their email address - we'll send a verification email to it.
context = {
"email": user_email,
"action_url": reverse(
remote_realm_billing_confirm_email, args=(signed_billing_access_token,)
),
}
return render(
request,
"corporate/billing/remote_billing_confirm_email_form.html",
context=context,
)
assert request.method == "POST"
if remote_user is None:
# Users logging in for the first time need to be created and follow
# a different path - they should not be POSTing here. It should be impossible
# to get here with a remote_user that is None without tampering with the form
# or manually crafting a POST request.
raise JsonableError(_("User account doesn't exist yet."))
if tos_consent_needed and not tos_consent_given:
# This shouldn't be possible without tampering with the form, so we
# don't need a pretty error.
raise JsonableError(_("You must accept the Terms of Service to proceed."))
# The current approach is to update the full_name and email preferences
# only when the user first logs in.
if full_name is not None:
remote_user.full_name = full_name
remote_user.enable_major_release_emails = enable_major_release_emails == "true"
remote_user.enable_maintenance_release_emails = enable_maintenance_release_emails == "true"
remote_user.tos_version = settings.TERMS_OF_SERVICE_VERSION
remote_user.last_login = timezone_now()
remote_user.save(
update_fields=[
"full_name",
"tos_version",
"last_login",
"enable_maintenance_release_emails",
"enable_major_release_emails",
]
)
identity_dict["remote_billing_user_id"] = remote_user.id
request.session["remote_billing_identities"] = {}
request.session["remote_billing_identities"][f"remote_realm:{remote_realm_uuid}"] = (
identity_dict
)
next_page = identity_dict["next_page"]
assert next_page in VALID_NEXT_PAGES
if next_page is not None:
return HttpResponseRedirect(
reverse(f"remote_realm_{next_page}_page", args=(remote_realm_uuid,))
)
elif remote_realm.plan_type in [
RemoteRealm.PLAN_TYPE_SELF_MANAGED,
RemoteRealm.PLAN_TYPE_SELF_MANAGED_LEGACY,
]:
# If they have a scheduled upgrade, redirect to billing page.
billing_session = RemoteRealmBillingSession(remote_realm)
customer = billing_session.get_customer()
if (
customer is not None
and billing_session.get_legacy_remote_server_next_plan_name(customer) is not None
):
return HttpResponseRedirect(
reverse("remote_realm_billing_page", args=(remote_realm_uuid,))
)
return HttpResponseRedirect(reverse("remote_realm_plans_page", args=(remote_realm_uuid,)))
elif remote_realm.plan_type == RemoteRealm.PLAN_TYPE_COMMUNITY:
return HttpResponseRedirect(
reverse("remote_realm_sponsorship_page", args=(remote_realm_uuid,))
)
else:
return HttpResponseRedirect(reverse("remote_realm_billing_page", args=(remote_realm_uuid,))) |
Endpoint for users in the RemoteRealm flow that are logging in for the first time
and still have to have their RemoteRealmBillingUser object created.
Takes the POST from the above form asking for their email address
and sends confirmation email to the provided
email address in order to verify. Only the confirmation link will grant
a fully authenticated session. | def remote_realm_billing_confirm_email(
request: HttpRequest,
*,
signed_billing_access_token: PathOnly[str],
email: str,
) -> HttpResponse:
"""
Endpoint for users in the RemoteRealm flow that are logging in for the first time
and still have to have their RemoteRealmBillingUser object created.
Takes the POST from the above form asking for their email address
and sends confirmation email to the provided
email address in order to verify. Only the confirmation link will grant
a fully authenticated session.
"""
identity_dict = get_identity_dict_from_signed_access_token(signed_billing_access_token)
try:
remote_server = get_remote_server_by_uuid(identity_dict["remote_server_uuid"])
remote_realm = RemoteRealm.objects.get(
uuid=identity_dict["remote_realm_uuid"], server=remote_server
)
except ObjectDoesNotExist:
raise AssertionError
rate_limit_error_response = check_rate_limits(request, remote_server)
if rate_limit_error_response is not None:
return rate_limit_error_response
obj = PreregistrationRemoteRealmBillingUser.objects.create(
email=email,
remote_realm=remote_realm,
user_uuid=identity_dict["user"]["user_uuid"],
next_page=identity_dict["next_page"],
uri_scheme=identity_dict["uri_scheme"],
)
url = create_remote_billing_confirmation_link(
obj,
Confirmation.REMOTE_REALM_BILLING_LEGACY_LOGIN,
validity_in_minutes=LOGIN_CONFIRMATION_EMAIL_DURATION_HOURS * 60,
)
context = {
"remote_realm_host": remote_realm.host,
"confirmation_url": url,
"billing_help_link": "https://zulip.com/help/self-hosted-billing",
"billing_contact_email": BILLING_SUPPORT_EMAIL,
"validity_in_hours": LOGIN_CONFIRMATION_EMAIL_DURATION_HOURS,
}
send_email(
"zerver/emails/remote_realm_billing_confirm_login",
to_emails=[email],
from_address=FromAddress.tokenized_no_reply_address(),
language=get_language(),
context=context,
)
return render(
request,
"corporate/billing/remote_billing_email_confirmation_sent.html",
context={"email": email},
) |
The user comes here via the confirmation link they received via email.
Creates the RemoteRealmBillingUser object and redirects to
remote_realm_billing_finalize_login with a new signed access token,
where they will finally be logged in now that they have an account. | def remote_realm_billing_from_login_confirmation_link(
request: HttpRequest,
*,
confirmation_key: PathOnly[str],
) -> HttpResponse:
"""
The user comes here via the confirmation link they received via email.
Creates the RemoteRealmBillingUser object and redirects to
remote_realm_billing_finalize_login with a new signed access token,
where they will finally be logged in now that they have an account.
"""
try:
prereg_object = get_object_from_key(
confirmation_key,
[Confirmation.REMOTE_REALM_BILLING_LEGACY_LOGIN],
# These links aren't reusable. The user just clicks it
# to get their account created. Afterwards, they're not
# subject to the confirmation link part of the flow anymore.
mark_object_used=True,
)
except ConfirmationKeyError as exception:
return render_confirmation_key_error(request, exception)
assert isinstance(prereg_object, PreregistrationRemoteRealmBillingUser)
remote_realm = prereg_object.remote_realm
uri_scheme = prereg_object.uri_scheme
next_page = prereg_object.next_page
assert next_page in VALID_NEXT_PAGES
assert uri_scheme in ["http://", "https://"]
# Mypy is not satisfied by the above assert, so we need to cast.
uri_scheme = cast(Literal["http://", "https://"], uri_scheme)
remote_billing_user, created = RemoteRealmBillingUser.objects.get_or_create(
remote_realm=remote_realm,
user_uuid=prereg_object.user_uuid,
defaults={"email": prereg_object.email},
)
if not created:
billing_logger.info(
"Matching RemoteRealmBillingUser already exists for "
"PreregistrationRemoteRealmBillingUser %s",
prereg_object.id,
)
prereg_object.created_user = remote_billing_user
prereg_object.save(update_fields=["created_user"])
identity_dict = RemoteBillingIdentityDict(
user=RemoteBillingUserDict(
user_email=remote_billing_user.email,
user_uuid=str(remote_billing_user.user_uuid),
user_full_name=remote_billing_user.full_name,
),
remote_server_uuid=str(remote_realm.server.uuid),
remote_realm_uuid=str(remote_realm.uuid),
# This will be figured out by the next endpoint in the flow anyway.
remote_billing_user_id=None,
authenticated_at=datetime_to_timestamp(timezone_now()),
uri_scheme=uri_scheme,
next_page=next_page,
)
signed_identity_dict = signing.dumps(identity_dict)
return HttpResponseRedirect(
reverse(remote_realm_billing_finalize_login, args=[signed_identity_dict])
) |
Takes the POST from the above form and sends confirmation email to the provided
email address in order to verify. Only the confirmation link will grant
a fully authenticated session. | def remote_billing_legacy_server_confirm_login(
request: HttpRequest,
*,
server_uuid: PathOnly[str],
email: str,
next_page: VALID_NEXT_PAGES_TYPE = None,
) -> HttpResponse:
"""
Takes the POST from the above form and sends confirmation email to the provided
email address in order to verify. Only the confirmation link will grant
a fully authenticated session.
"""
try:
remote_server, remote_billing_user = get_remote_server_and_user_from_session(
request, server_uuid=server_uuid
)
if remote_billing_user is not None:
# This session is already fully authenticated, it doesn't make sense for
# the user to be here. Just raise an exception so it's immediately caught
# and the user is redirected to the beginning of the login flow where
# they can re-auth.
raise RemoteBillingAuthenticationError
except (RemoteBillingIdentityExpiredError, RemoteBillingAuthenticationError):
return HttpResponse(
reverse("remote_billing_legacy_server_login") + f"?next_page={next_page}"
)
rate_limit_error_response = check_rate_limits(request, remote_server)
if rate_limit_error_response is not None:
return rate_limit_error_response
obj = PreregistrationRemoteServerBillingUser.objects.create(
email=email,
remote_server=remote_server,
next_page=next_page,
)
url = create_remote_billing_confirmation_link(
obj,
Confirmation.REMOTE_SERVER_BILLING_LEGACY_LOGIN,
validity_in_minutes=LOGIN_CONFIRMATION_EMAIL_DURATION_HOURS * 60,
)
context = {
"remote_server_hostname": remote_server.hostname,
"confirmation_url": url,
"billing_help_link": "https://zulip.com/help/self-hosted-billing",
"billing_contact_email": BILLING_SUPPORT_EMAIL,
"validity_in_hours": LOGIN_CONFIRMATION_EMAIL_DURATION_HOURS,
}
send_email(
"zerver/emails/remote_billing_legacy_server_confirm_login",
to_emails=[email],
from_address=FromAddress.tokenized_no_reply_address(),
language=get_language(),
context=context,
)
return render(
request,
"corporate/billing/remote_billing_email_confirmation_sent.html",
context={"email": email, "remote_server_hostname": remote_server.hostname},
) |
The user comes here via the confirmation link they received via email. | def remote_billing_legacy_server_from_login_confirmation_link(
request: HttpRequest,
*,
confirmation_key: PathOnly[str],
full_name: Optional[str] = None,
tos_consent: Literal[None, "true"] = None,
enable_major_release_emails: Literal[None, "true", "false"] = None,
enable_maintenance_release_emails: Literal[None, "true", "false"] = None,
) -> HttpResponse:
"""
The user comes here via the confirmation link they received via email.
"""
if request.method not in ["GET", "POST"]:
return HttpResponseNotAllowed(["GET", "POST"])
try:
prereg_object = get_object_from_key(
confirmation_key,
[Confirmation.REMOTE_SERVER_BILLING_LEGACY_LOGIN],
# These links are reusable.
mark_object_used=False,
)
except ConfirmationKeyError as exception:
return render_confirmation_key_error(request, exception)
assert isinstance(prereg_object, PreregistrationRemoteServerBillingUser)
remote_server = prereg_object.remote_server
remote_server_uuid = str(remote_server.uuid)
remote_billing_user = RemoteServerBillingUser.objects.filter(
remote_server=remote_server, email=prereg_object.email
).first()
tos_consent_needed = remote_billing_user is None or is_tos_consent_needed_for_user(
remote_billing_user
)
if request.method == "GET":
context = {
"remote_server_uuid": remote_server_uuid,
"host": remote_server.hostname,
"user_full_name": getattr(remote_billing_user, "full_name", None),
"user_email": prereg_object.email,
"tos_consent_needed": tos_consent_needed,
"action_url": reverse(
remote_billing_legacy_server_from_login_confirmation_link,
args=(confirmation_key,),
),
"legacy_server_confirmation_flow": True,
"next_page": prereg_object.next_page,
}
return render(
request,
"corporate/billing/remote_billing_finalize_login_confirmation.html",
context=context,
)
assert request.method == "POST"
if tos_consent_needed and not tos_consent:
# This shouldn't be possible without tampering with the form, so we
# don't need a pretty error.
raise JsonableError(_("You must accept the Terms of Service to proceed."))
if (
has_live_plan_for_any_remote_realm_on_server(remote_server)
and prereg_object.next_page != "deactivate"
):
return render(
request,
"corporate/billing/remote_server_login_error_for_any_realm_on_active_plan.html",
)
if remote_billing_user is None:
assert full_name is not None
assert settings.TERMS_OF_SERVICE_VERSION is not None
remote_billing_user = RemoteServerBillingUser.objects.create(
full_name=full_name,
email=prereg_object.email,
remote_server=remote_server,
tos_version=settings.TERMS_OF_SERVICE_VERSION,
enable_major_release_emails=enable_major_release_emails == "true",
enable_maintenance_release_emails=enable_maintenance_release_emails == "true",
)
prereg_object.created_user = remote_billing_user
prereg_object.save(update_fields=["created_user"])
remote_billing_user.last_login = timezone_now()
remote_billing_user.save(update_fields=["last_login"])
# Refresh IdentityDict in the session. (Or create it
# if the user came here e.g. in a different browser than they
# started the login flow in.)
request.session["remote_billing_identities"] = {}
request.session["remote_billing_identities"][f"remote_server:{remote_server_uuid}"] = (
LegacyServerIdentityDict(
remote_server_uuid=remote_server_uuid,
authenticated_at=datetime_to_timestamp(timezone_now()),
# Having a remote_billing_user_id indicates the auth has been completed.
# The user will now be granted access to authenticated endpoints.
remote_billing_user_id=remote_billing_user.id,
)
)
next_page = prereg_object.next_page
assert next_page in VALID_NEXT_PAGES
if next_page is not None:
return HttpResponseRedirect(
reverse(f"remote_server_{next_page}_page", args=(remote_server_uuid,))
)
elif remote_server.plan_type in [
RemoteZulipServer.PLAN_TYPE_SELF_MANAGED,
RemoteZulipServer.PLAN_TYPE_SELF_MANAGED_LEGACY,
]:
# If they have a scheduled upgrade, redirect to billing page.
billing_session = RemoteServerBillingSession(remote_server)
customer = billing_session.get_customer()
if (
customer is not None
and billing_session.get_legacy_remote_server_next_plan_name(customer) is not None
):
return HttpResponseRedirect(
reverse("remote_server_billing_page", args=(remote_server_uuid,))
)
return HttpResponseRedirect(reverse("remote_server_plans_page", args=(remote_server_uuid,)))
elif remote_server.plan_type == RemoteZulipServer.PLAN_TYPE_COMMUNITY:
return HttpResponseRedirect(
reverse("remote_server_sponsorship_page", args=(remote_server_uuid,))
)
else:
return HttpResponseRedirect(
reverse("remote_server_billing_page", args=(remote_server_uuid,))
) |
Do a simple queue size check for queues whose workers don't publish stats files. | def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]]:
"""Do a simple queue size check for queues whose workers don't publish stats files."""
results = []
for queue, count in queue_counts_dict.items():
if queue in normal_queues:
continue
if count > CRITICAL_COUNT_THRESHOLD_DEFAULT:
results.append(dict(status=CRITICAL, name=queue, message=f"count critical: {count}"))
elif count > WARN_COUNT_THRESHOLD_DEFAULT:
results.append(dict(status=WARNING, name=queue, message=f"count warning: {count}"))
else:
results.append(dict(status=OK, name=queue, message=""))
return results |
Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
Removes comments from the output and recursively visits files specified inside `fpath`.
`fpath` can be either an absolute path or a relative path. | def expand_reqs(fpath: str) -> List[str]:
"""
Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
Removes comments from the output and recursively visits files specified inside `fpath`.
`fpath` can be either an absolute path or a relative path.
"""
absfpath = os.path.abspath(fpath)
output = expand_reqs_helper(absfpath)
return sorted(set(output)) |
Returns the Python version as string 'Python major.minor.patchlevel' | def python_version() -> str:
"""
Returns the Python version as string 'Python major.minor.patchlevel'
"""
return subprocess.check_output(["/usr/bin/python3", "-VV"], text=True) |
Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the
virtual environment. This file is used to determine the packages
that can be copied to a new virtual environment. | def create_requirements_index_file(venv_path: str, requirements_file: str) -> str:
"""
Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the
virtual environment. This file is used to determine the packages
that can be copied to a new virtual environment.
"""
index_filename = get_index_filename(venv_path)
packages = get_package_names(requirements_file)
with open(index_filename, "w") as writer:
writer.write("\n".join(packages))
writer.write("\n")
return index_filename |
Returns the packages installed in the virtual environment using the
package index file. | def get_venv_packages(venv_path: str) -> Set[str]:
"""
Returns the packages installed in the virtual environment using the
package index file.
"""
with open(get_index_filename(venv_path)) as reader:
return {p.strip() for p in reader.read().split("\n") if p.strip()} |
Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows:
1. Find a virtual environment, v, from the cache that has the
highest overlap with the new requirements such that:
a. The new requirements only add to the packages of v.
b. The new requirements only upgrade packages of v.
2. Copy the contents of v to the new virtual environment using
virtualenv-clone.
3. Delete all .pyc files in the new virtual environment. | def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
"""
Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows:
1. Find a virtual environment, v, from the cache that has the
highest overlap with the new requirements such that:
a. The new requirements only add to the packages of v.
b. The new requirements only upgrade packages of v.
2. Copy the contents of v to the new virtual environment using
virtualenv-clone.
3. Delete all .pyc files in the new virtual environment.
"""
if not os.path.exists(VENV_CACHE_PATH):
return False
desired_python_version = python_version()
venv_name = os.path.basename(venv_path)
overlaps: List[Tuple[int, str, Set[str]]] = []
old_packages: Set[str] = set()
for sha1sum in os.listdir(VENV_CACHE_PATH):
curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
if curr_venv_path == venv_path or not os.path.exists(get_index_filename(curr_venv_path)):
continue
# Check the Python version in the venv matches the version we want to use.
venv_python3 = os.path.join(curr_venv_path, "bin", "python3")
if not os.path.exists(venv_python3):
continue
venv_python_version = subprocess.check_output([venv_python3, "-VV"], text=True)
if desired_python_version != venv_python_version:
continue
old_packages = get_venv_packages(curr_venv_path)
# We only consider using old virtualenvs that only
# contain packages that we want in our new virtualenv.
if not (old_packages - new_packages):
overlap = new_packages & old_packages
overlaps.append((len(overlap), curr_venv_path, overlap))
target_log = get_logfile_name(venv_path)
source_venv_path = None
if overlaps:
# Here, we select the old virtualenv with the largest overlap
overlaps = sorted(overlaps)
_, source_venv_path, copied_packages = overlaps[-1]
print(f"Copying packages from {source_venv_path}")
clone_ve = f"{source_venv_path}/bin/virtualenv-clone"
cmd = [clone_ve, source_venv_path, venv_path]
try:
# TODO: We can probably remove this in a few months, now
# that we can expect that virtualenv-clone is present in
# all of our recent virtualenvs.
run_as_root(cmd)
except subprocess.CalledProcessError:
# Virtualenv-clone is either not installed or threw an
# error. Just return False: making a new venv is safe.
logging.warning("Error cloning virtualenv %s", source_venv_path)
return False
# virtualenv-clone, unfortunately, copies the success stamp,
# which means if the upcoming `pip install` phase were to
# fail, we'd end up with a broken half-provisioned virtualenv
# that's incorrectly tagged as properly provisioned. The
# right fix is to use
# https://github.com/edwardgeorge/virtualenv-clone/pull/38,
# but this rm is almost as good.
success_stamp_path = os.path.join(venv_path, "success-stamp")
run_as_root(["rm", "-f", success_stamp_path])
run_as_root(["chown", "-R", f"{os.getuid()}:{os.getgid()}", venv_path])
source_log = get_logfile_name(source_venv_path)
copy_parent_log(source_log, target_log)
create_log_entry(
target_log, source_venv_path, copied_packages, new_packages - copied_packages
)
return True
return False |
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced. | def do_patch_activate_script(venv_path: str) -> None:
"""
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced.
"""
# venv_path should be what we want to have in VIRTUAL_ENV after patching
script_path = os.path.join(venv_path, "bin", "activate")
with open(script_path) as f:
lines = f.readlines()
for i, line in enumerate(lines):
if line.startswith("VIRTUAL_ENV="):
lines[i] = f'VIRTUAL_ENV="{venv_path}"\n'
with open(script_path, "w") as f:
f.write("".join(lines)) |
Warning: su_to_zulip assumes that the zulip checkout is owned by
the zulip user (or whatever normal user is running the Zulip
installation). It should never be run from the installer or other
production contexts before /home/zulip/deployments/current is
created. | def su_to_zulip(save_suid: bool = False) -> None:
"""Warning: su_to_zulip assumes that the zulip checkout is owned by
the zulip user (or whatever normal user is running the Zulip
installation). It should never be run from the installer or other
production contexts before /home/zulip/deployments/current is
created."""
pwent = get_zulip_pwent()
os.setgid(pwent.pw_gid)
if save_suid:
os.setresuid(pwent.pw_uid, pwent.pw_uid, os.getuid())
else:
os.setuid(pwent.pw_uid)
os.environ["HOME"] = pwent.pw_dir |
Example of the useful subset of the data:
{
'ID': 'ubuntu',
'VERSION_ID': '18.04',
'NAME': 'Ubuntu',
'VERSION': '18.04.3 LTS (Bionic Beaver)',
'PRETTY_NAME': 'Ubuntu 18.04.3 LTS',
}
VERSION_CODENAME (e.g. 'bionic') is nice and readable to Ubuntu
developers, but we avoid using it, as it is not available on
RHEL-based platforms. | def parse_os_release() -> Dict[str, str]:
"""
Example of the useful subset of the data:
{
'ID': 'ubuntu',
'VERSION_ID': '18.04',
'NAME': 'Ubuntu',
'VERSION': '18.04.3 LTS (Bionic Beaver)',
'PRETTY_NAME': 'Ubuntu 18.04.3 LTS',
}
VERSION_CODENAME (e.g. 'bionic') is nice and readable to Ubuntu
developers, but we avoid using it, as it is not available on
RHEL-based platforms.
"""
distro_info: Dict[str, str] = {}
with open("/etc/os-release") as fp:
for line in fp:
line = line.strip()
if not line or line.startswith("#"):
# The line may be blank or a comment, see:
# https://www.freedesktop.org/software/systemd/man/os-release.html
continue
k, v = line.split("=", 1)
[distro_info[k]] = shlex.split(v)
return distro_info |
Known families:
debian (includes: debian, ubuntu)
ubuntu (includes: ubuntu)
fedora (includes: fedora, rhel, centos)
rhel (includes: rhel, centos)
centos (includes: centos) | def os_families() -> Set[str]:
"""
Known families:
debian (includes: debian, ubuntu)
ubuntu (includes: ubuntu)
fedora (includes: fedora, rhel, centos)
rhel (includes: rhel, centos)
centos (includes: centos)
"""
distro_info = parse_os_release()
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()} |
In order to determine if we need to run some
process, we calculate a digest of the important
files and strings whose respective contents
or values may indicate such a need.
filenames = files we should hash the contents of
extra_strings = strings we should hash directly
Grep for callers to see examples of how this is used.
To elaborate on extra_strings, they will typically
be things like:
- package versions (that we import)
- settings values (that we stringify with
json, deterministically) | def is_digest_obsolete(
hash_name: str, filenames: Sequence[str], extra_strings: Sequence[str] = []
) -> bool:
"""
In order to determine if we need to run some
process, we calculate a digest of the important
files and strings whose respective contents
or values may indicate such a need.
filenames = files we should hash the contents of
extra_strings = strings we should hash directly
Grep for callers to see examples of how this is used.
To elaborate on extra_strings, they will typically
be things like:
- package versions (that we import)
- settings values (that we stringify with
json, deterministically)
"""
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
try:
with open(last_hash_path) as f:
old_hash = f.read()
except FileNotFoundError:
# This is normal for a fresh checkout--a missing
# digest is an obsolete digest.
return True
new_hash = files_and_string_digest(filenames, extra_strings)
return new_hash != old_hash |
Remove the port from a hostname:port string. Brackets on a literal
IPv6 address are included. | def deport(netloc: str) -> str:
"""Remove the port from a hostname:port string. Brackets on a literal
IPv6 address are included."""
r = SplitResult("", netloc, "", "", "")
assert r.hostname is not None
return "[" + r.hostname + "]" if ":" in r.hostname else r.hostname |
Secret key generation taken from Django's startproject.py | def generate_django_secretkey() -> str:
"""Secret key generation taken from Django's startproject.py"""
# We do in-function imports so that we only do the expensive work
# of importing cryptography modules when necessary.
#
# This helps optimize noop provision performance.
from django.utils.crypto import get_random_string
chars = "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)"
return get_random_string(50, chars) |
Safe phrase is in lower case and doesn't contain characters which can
conflict with split boundaries. All conflicting characters are replaced
with low dash (_). | def get_safe_phrase(phrase: str) -> str:
"""
Safe phrase is in lower case and doesn't contain characters which can
conflict with split boundaries. All conflicting characters are replaced
with low dash (_).
"""
phrase = SPLIT_BOUNDARY_REGEX.sub("_", phrase)
return phrase.lower() |
The idea is to convert IGNORED_PHRASES into safe phrases, see
`get_safe_phrase()` function. The only exception is when the
IGNORED_PHRASE is at the start of the text or after a split
boundary; in this case, we change the first letter of the phrase
to upper case. | def replace_with_safe_phrase(matchobj: Match[str]) -> str:
"""
The idea is to convert IGNORED_PHRASES into safe phrases, see
`get_safe_phrase()` function. The only exception is when the
IGNORED_PHRASE is at the start of the text or after a split
boundary; in this case, we change the first letter of the phrase
to upper case.
"""
ignored_phrase = matchobj.group(0)
safe_string = get_safe_phrase(ignored_phrase)
start_index = matchobj.start()
complete_string = matchobj.string
is_string_start = start_index == 0
# We expect that there will be one space between split boundary
# and the next word.
punctuation = complete_string[max(start_index - 2, 0)]
is_after_split_boundary = punctuation in SPLIT_BOUNDARY
if is_string_start or is_after_split_boundary:
return safe_string.capitalize()
return safe_string |
This returns text which is rendered by BeautifulSoup and is in the
form that can be split easily and has all IGNORED_PHRASES processed. | def get_safe_text(text: str) -> str:
"""
This returns text which is rendered by BeautifulSoup and is in the
form that can be split easily and has all IGNORED_PHRASES processed.
"""
soup = BeautifulSoup(text, "lxml")
text = " ".join(soup.text.split()) # Remove extra whitespaces.
for phrase_regex in COMPILED_IGNORED_PHRASES:
text = phrase_regex.sub(replace_with_safe_phrase, text)
return text |
During the parsing/validation phase, it's useful to have separate
tokens for "indent" chunks, but during pretty printing, we like
to attach an `.indent` field to the substantive node, whether
it's an HTML tag or template directive or whatever. | def shift_indents_to_the_next_tokens(tokens: List[Token]) -> None:
"""
During the parsing/validation phase, it's useful to have separate
tokens for "indent" chunks, but during pretty printing, we like
to attach an `.indent` field to the substantive node, whether
it's an HTML tag or template directive or whatever.
"""
tokens[0].indent = ""
for i, token in enumerate(tokens[:-1]):
next_token = tokens[i + 1]
if token.kind == "indent":
next_token.indent = token.s
token.new_s = ""
if token.kind == "newline" and next_token.kind != "indent":
next_token.indent = "" |
Select a bash profile file to add setup code to. | def setup_bash_profile() -> None:
"""Select a bash profile file to add setup code to."""
BASH_PROFILES = [
os.path.expanduser(p) for p in ("~/.bash_profile", "~/.bash_login", "~/.profile")
]
def clear_old_profile() -> None:
# An earlier version of this script would output a fresh .bash_profile
# even though a .profile existed in the image used. As a convenience to
# existing developers (and, perhaps, future developers git-bisecting the
# provisioning scripts), check for this situation, and blow away the
# created .bash_profile if one is found.
BASH_PROFILE = BASH_PROFILES[0]
DOT_PROFILE = BASH_PROFILES[2]
OLD_PROFILE_TEXT = "source /srv/zulip-py3-venv/bin/activate\ncd /srv/zulip\n"
if os.path.exists(DOT_PROFILE):
try:
with open(BASH_PROFILE) as f:
profile_contents = f.read()
if profile_contents == OLD_PROFILE_TEXT:
os.unlink(BASH_PROFILE)
except FileNotFoundError:
pass
clear_old_profile()
for candidate_profile in BASH_PROFILES:
if os.path.exists(candidate_profile):
setup_shell_profile(candidate_profile)
break
else:
# no existing bash profile found; claim .bash_profile
setup_shell_profile(BASH_PROFILES[0]) |
Works for both partials and partial blocks. | def get_handlebars_partial(text: str, i: int) -> str:
"""Works for both partials and partial blocks."""
end = i + 10
unclosed_end = 0
while end <= len(text):
if text[end - 2 : end] == "}}":
return text[i:end]
if not unclosed_end and text[end] == "<":
unclosed_end = end
end += 1
raise TokenizationError("Unclosed partial", text[i:unclosed_end]) |
Registers --skip-provision-check argument to be used with various commands/tests in our tools. | def add_provision_check_override_param(parser: ArgumentParser) -> None:
"""
Registers --skip-provision-check argument to be used with various commands/tests in our tools.
"""
parser.add_argument(
"--skip-provision-check",
action="store_true",
help="Skip check that provision has been run; useful to save time if you know the dependency changes are not relevant to this command and will not cause it to fail",
) |
Get the exit code of the server, or None if it is still running. | def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
"""Get the exit code of the server, or None if it is still running."""
if server.poll() is not None:
message = "Server died unexpectedly!"
if log_file:
message += f"\nSee {log_file}\n"
raise RuntimeError(message) |
Common context used for things like outgoing emails that don't
have a request. | def common_context(user: UserProfile) -> Dict[str, Any]:
"""Common context used for things like outgoing emails that don't
have a request.
"""
return {
"realm_uri": user.realm.uri,
"realm_name": user.realm.name,
"root_domain_url": settings.ROOT_DOMAIN_URI,
"external_url_scheme": settings.EXTERNAL_URI_SCHEME,
"external_host": settings.EXTERNAL_HOST,
"user_name": user.full_name,
"corporate_enabled": settings.CORPORATE_ENABLED,
} |
Accept a GET param `?nav=no` to render an isolated, navless page. | def is_isolated_page(request: HttpRequest) -> bool:
"""Accept a GET param `?nav=no` to render an isolated, navless page."""
return request.GET.get("nav") == "no" |
Context available to all Zulip Jinja2 templates that have a request
passed in. Designed to provide the long list of variables at the
bottom of this function in a wide range of situations: logged-in
or logged-out, subdomains or not, etc.
The main variable in the below is whether we know what realm the
user is trying to interact with. | def zulip_default_context(request: HttpRequest) -> Dict[str, Any]:
"""Context available to all Zulip Jinja2 templates that have a request
passed in. Designed to provide the long list of variables at the
bottom of this function in a wide range of situations: logged-in
or logged-out, subdomains or not, etc.
The main variable in the below is whether we know what realm the
user is trying to interact with.
"""
realm = get_realm_from_request(request)
if realm is None:
realm_uri = settings.ROOT_DOMAIN_URI
realm_name = None
realm_icon = None
else:
realm_uri = realm.uri
realm_name = realm.name
realm_icon = get_realm_icon_url(realm)
skip_footer = False
register_link_disabled = settings.REGISTER_LINK_DISABLED
login_link_disabled = settings.LOGIN_LINK_DISABLED
find_team_link_disabled = settings.FIND_TEAM_LINK_DISABLED
allow_search_engine_indexing = False
if (
settings.ROOT_DOMAIN_LANDING_PAGE
and get_subdomain(request) == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
):
register_link_disabled = True
login_link_disabled = True
find_team_link_disabled = False
allow_search_engine_indexing = True
elif realm is None:
register_link_disabled = True
login_link_disabled = True
find_team_link_disabled = False
skip_footer = True
apps_page_web = settings.ROOT_DOMAIN_URI + "/accounts/go/"
if settings.DEVELOPMENT:
secrets_path = "zproject/dev-secrets.conf"
settings_path = "zproject/dev_settings.py"
settings_comments_path = "zproject/prod_settings_template.py"
else:
secrets_path = "/etc/zulip/zulip-secrets.conf"
settings_path = "/etc/zulip/settings.py"
settings_comments_path = "/etc/zulip/settings.py"
# Used to remove links to Zulip docs and landing page from footer of self-hosted pages.
corporate_enabled = settings.CORPORATE_ENABLED
support_email = FromAddress.SUPPORT
support_email_html_tag = SafeString(
f'<a href="mailto:{escape(support_email)}">{escape(support_email)}</a>'
)
# Sync this with default_params_schema in base_page_params.ts.
default_page_params: Dict[str, Any] = {
**DEFAULT_PAGE_PARAMS,
"server_sentry_dsn": settings.SENTRY_FRONTEND_DSN,
"request_language": get_language(),
}
if settings.SENTRY_FRONTEND_DSN is not None:
if realm is not None:
default_page_params["realm_sentry_key"] = realm.string_id
default_page_params["server_sentry_environment"] = get_config(
"machine", "deploy_type", "development"
)
default_page_params["server_sentry_sample_rate"] = settings.SENTRY_FRONTEND_SAMPLE_RATE
default_page_params["server_sentry_trace_rate"] = settings.SENTRY_FRONTEND_TRACE_RATE
context = {
"root_domain_landing_page": settings.ROOT_DOMAIN_LANDING_PAGE,
"custom_logo_url": settings.CUSTOM_LOGO_URL,
"register_link_disabled": register_link_disabled,
"login_link_disabled": login_link_disabled,
"terms_of_service": settings.TERMS_OF_SERVICE_VERSION is not None,
"login_url": settings.HOME_NOT_LOGGED_IN,
"only_sso": settings.ONLY_SSO,
"external_host": settings.EXTERNAL_HOST,
"external_url_scheme": settings.EXTERNAL_URI_SCHEME,
"realm_uri": realm_uri,
"realm_name": realm_name,
"realm_icon": realm_icon,
"root_domain_url": settings.ROOT_DOMAIN_URI,
"apps_page_url": get_apps_page_url(),
"apps_page_web": apps_page_web,
"open_realm_creation": settings.OPEN_REALM_CREATION,
"development_environment": settings.DEVELOPMENT,
"support_email": support_email,
"support_email_html_tag": support_email_html_tag,
"find_team_link_disabled": find_team_link_disabled,
"password_min_length": settings.PASSWORD_MIN_LENGTH,
"password_min_guesses": settings.PASSWORD_MIN_GUESSES,
"zulip_version": ZULIP_VERSION,
"user_is_authenticated": request.user.is_authenticated,
"settings_path": settings_path,
"secrets_path": secrets_path,
"settings_comments_path": settings_comments_path,
"platform": RequestNotes.get_notes(request).client_name,
"allow_search_engine_indexing": allow_search_engine_indexing,
"landing_page_navbar_message": settings.LANDING_PAGE_NAVBAR_MESSAGE,
"is_isolated_page": is_isolated_page(request),
"skip_footer": skip_footer,
"default_page_params": default_page_params,
"corporate_enabled": corporate_enabled,
}
context["PAGE_METADATA_URL"] = f"{realm_uri}{request.path}"
if realm is not None and realm.icon_source == realm.ICON_UPLOADED:
context["PAGE_METADATA_IMAGE"] = urljoin(realm_uri, realm_icon)
return context |
The optional user parameter requests that a UserActivity row be
created/updated to record this request.
In particular, unauthenticate requests and those authenticated to
a non-user object like RemoteZulipServer should not pass the
`user` parameter. | def process_client(
request: HttpRequest,
user: Union[UserProfile, AnonymousUser, None] = None,
*,
is_browser_view: bool = False,
client_name: Optional[str] = None,
query: Optional[str] = None,
) -> None:
"""The optional user parameter requests that a UserActivity row be
created/updated to record this request.
In particular, unauthenticate requests and those authenticated to
a non-user object like RemoteZulipServer should not pass the
`user` parameter.
"""
request_notes = RequestNotes.get_notes(request)
if client_name is None:
client_name = request_notes.client_name
assert client_name is not None
# We could check for a browser's name being "Mozilla", but
# e.g. Opera and MobileSafari don't set that, and it seems
# more robust to just key off whether it was a browser view
if is_browser_view and not client_name.startswith("Zulip"):
# Avoid changing the client string for browsers, but let
# the Zulip desktop apps be themselves.
client_name = "website"
request_notes.client = get_client(client_name)
if user is not None and user.is_authenticated:
update_user_activity(request, user, query) |
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes. | def user_passes_test(
test_func: Callable[[HttpRequest], bool],
login_url: Optional[str] = None,
redirect_field_name: str = REDIRECT_FIELD_NAME,
) -> Callable[
[Callable[Concatenate[HttpRequest, ParamT], HttpResponse]],
Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
]:
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(
view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
@wraps(view_func)
def _wrapped_view(
request: HttpRequest, /, *args: ParamT.args, **kwargs: ParamT.kwargs
) -> HttpResponse:
if test_func(request):
return view_func(request, *args, **kwargs)
return zulip_redirect_to_login(request, login_url, redirect_field_name)
return _wrapped_view
return decorator |
Creates a session, logging in the user, using the Django method,
and also adds helpful data needed by our server logs. | def do_login(request: HttpRequest, user_profile: UserProfile) -> None:
"""Creates a session, logging in the user, using the Django method,
and also adds helpful data needed by our server logs.
"""
# As a hardening measure, pass the user_profile through the dummy backend,
# which does the minimal validation that the user is allowed to log in.
# This, and stronger validation, should have already been done by the
# caller, so we raise an AssertionError if this doesn't work as expected.
# This is to prevent misuse of this function, as it would pose a major
# security issue.
realm = get_valid_realm_from_request(request)
validated_user_profile = authenticate(
request=request, username=user_profile.delivery_email, realm=realm, use_dummy_backend=True
)
if validated_user_profile is None or validated_user_profile != user_profile:
raise AssertionError("do_login called for a user_profile that shouldn't be able to log in")
assert isinstance(validated_user_profile, UserProfile)
django_login(request, validated_user_profile)
RequestNotes.get_notes(
request
).requester_for_logs = validated_user_profile.format_requester_for_logs()
process_client(request, validated_user_profile, is_browser_view=True)
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
# Log in with two factor authentication as well.
do_two_factor_login(request, validated_user_profile) |
This wrapper adds client info for unauthenticated users but
forces authenticated users to go through 2fa. | def web_public_view(
view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
redirect_field_name: str = REDIRECT_FIELD_NAME,
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
"""
This wrapper adds client info for unauthenticated users but
forces authenticated users to go through 2fa.
"""
actual_decorator = lambda view_func: zulip_otp_required_if_logged_in(
redirect_field_name=redirect_field_name, login_url=login_url
)(add_logging_data(view_func))
return actual_decorator(view_func) |
Extracts the role and API key as a tuple from the Authorization header
for HTTP basic authentication. | def get_basic_credentials(
request: HttpRequest, beanstalk_email_decode: bool = False
) -> Tuple[str, str]:
"""
Extracts the role and API key as a tuple from the Authorization header
for HTTP basic authentication.
"""
try:
# Grab the base64-encoded authentication string, decode it, and split it into
# the email and API key
auth_type, credentials = request.headers["Authorization"].split()
# case insensitive per RFC 1945
if auth_type.lower() != "basic":
raise JsonableError(_("This endpoint requires HTTP basic authentication."))
role, api_key = base64.b64decode(credentials).decode().split(":")
if beanstalk_email_decode:
# Beanstalk's web hook UI rejects URL with a @ in the username section
# So we ask the user to replace them with %40
role = role.replace("%40", "@")
except ValueError:
raise UnauthorizedError(_("Invalid authorization header for basic auth"))
except KeyError:
raise UnauthorizedError(_("Missing authorization header for basic auth"))
return role, api_key |
Used for situations where something running on the Zulip server
needs to make a request to the (other) Django/Tornado processes running on
the server. | def internal_api_view(
is_tornado_view: bool,
) -> Callable[
[Callable[Concatenate[HttpRequest, ParamT], HttpResponse]],
Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
]:
"""Used for situations where something running on the Zulip server
needs to make a request to the (other) Django/Tornado processes running on
the server."""
def _wrapped_view_func(
view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
@csrf_exempt
@require_post
@wraps(view_func)
def _wrapped_func_arguments(
request: HttpRequest, /, *args: ParamT.args, **kwargs: ParamT.kwargs
) -> HttpResponse:
if not authenticate_internal_api(request):
raise AccessDeniedError
request_notes = RequestNotes.get_notes(request)
is_tornado_request = request_notes.tornado_handler_id is not None
# These next 2 are not security checks; they are internal
# assertions to help us find bugs.
if is_tornado_view and not is_tornado_request:
raise RuntimeError("Tornado notify view called with no Tornado handler")
if not is_tornado_view and is_tornado_request:
raise RuntimeError("Django notify view called with Tornado handler")
request_notes.requester_for_logs = "internal"
return view_func(request, *args, **kwargs)
return _wrapped_func_arguments
return _wrapped_view_func |
The reason we need to create this function is that the stock
otp_required decorator doesn't play well with tests. We cannot
enable/disable if_configured parameter during tests since the decorator
retains its value due to closure.
Similar to :func:`~django.contrib.auth.decorators.login_required`, but
requires the user to be :term:`verified`. By default, this redirects users
to :setting:`OTP_LOGIN_URL`. Returns True if the user is not authenticated. | def zulip_otp_required_if_logged_in(
redirect_field_name: str = "next",
login_url: str = settings.HOME_NOT_LOGGED_IN,
) -> Callable[
[Callable[Concatenate[HttpRequest, ParamT], HttpResponse]],
Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
]:
"""
The reason we need to create this function is that the stock
otp_required decorator doesn't play well with tests. We cannot
enable/disable if_configured parameter during tests since the decorator
retains its value due to closure.
Similar to :func:`~django.contrib.auth.decorators.login_required`, but
requires the user to be :term:`verified`. By default, this redirects users
to :setting:`OTP_LOGIN_URL`. Returns True if the user is not authenticated.
"""
def test(user: Union[AbstractBaseUser, AnonymousUser]) -> bool:
"""
:if_configured: If ``True``, an authenticated user with no confirmed
OTP devices will be allowed. Also, non-authenticated users will be
allowed as spectator users. Default is ``False``. If ``False``,
2FA will not do any authentication.
"""
if_configured = settings.TWO_FACTOR_AUTHENTICATION_ENABLED
if not if_configured:
return True
# This request is unauthenticated (logged-out) access; 2FA is
# not required or possible.
if not user.is_authenticated:
return True
assert isinstance(user, UserProfile)
# User has completed 2FA verification
if is_2fa_verified(user):
return True
# If the user doesn't have 2FA set up, we can't enforce 2FA.
if not user_has_device(user):
return True
# User has configured 2FA and is not verified, so the user
# fails the test (and we should redirect to the 2FA view).
return False
decorator = django_user_passes_test(
test, login_url=login_url, redirect_field_name=redirect_field_name
)
return decorator |
Prevent MIT mailing lists from signing up for Zulip | def email_is_not_mit_mailing_list(email: str) -> None:
"""Prevent MIT mailing lists from signing up for Zulip"""
address = Address(addr_spec=email)
if address.domain == "mit.edu":
# Check whether the user exists and can get mail.
try:
DNS.dnslookup(f"{address.username}.pobox.ns.athena.mit.edu", DNS.Type.TXT)
except DNS.Base.ServerError as e:
if e.rcode == DNS.Status.NXDOMAIN:
# This error is Markup only because 1. it needs to render HTML
# 2. It's not formatted with any user input.
raise ValidationError(MIT_VALIDATION_ERROR)
else:
raise AssertionError("Unexpected DNS error") |
This function verifies the request is allowed to make SCIM requests on this subdomain,
by checking the provided bearer token and ensuring it matches a scim client configured
for this subdomain in settings.SCIM_CONFIG.
Returns True if successful. | def validate_scim_bearer_token(request: HttpRequest) -> bool:
"""
This function verifies the request is allowed to make SCIM requests on this subdomain,
by checking the provided bearer token and ensuring it matches a scim client configured
for this subdomain in settings.SCIM_CONFIG.
Returns True if successful.
"""
subdomain = get_subdomain(request)
scim_config_dict = settings.SCIM_CONFIG.get(subdomain)
if not scim_config_dict:
return False
valid_bearer_token = scim_config_dict.get("bearer_token")
scim_client_name = scim_config_dict.get("scim_client_name")
# We really don't want a misconfiguration where this is unset,
# allowing free access to the SCIM API:
assert valid_bearer_token
assert scim_client_name
authorization = request.headers.get("Authorization")
if authorization is None or not constant_time_compare(
authorization, f"Bearer {valid_bearer_token}"
):
return False
request_notes = RequestNotes.get_notes(request)
assert request_notes.realm is not None
request_notes.requester_for_logs = (
f"scim-client:{scim_client_name}:realm:{request_notes.realm.id}"
)
return True |
Changing a realm's subdomain is a highly disruptive operation,
because all existing clients will need to be updated to point to
the new URL. Further, requests to fetch data from existing event
queues will fail with an authentication error when this change
happens (because the old subdomain is no longer associated with
the realm), making it hard for us to provide a graceful update
experience for clients. | def do_change_realm_subdomain(
realm: Realm,
new_subdomain: str,
*,
acting_user: Optional[UserProfile],
add_deactivated_redirect: bool = True,
) -> None:
"""Changing a realm's subdomain is a highly disruptive operation,
because all existing clients will need to be updated to point to
the new URL. Further, requests to fetch data from existing event
queues will fail with an authentication error when this change
happens (because the old subdomain is no longer associated with
the realm), making it hard for us to provide a graceful update
experience for clients.
"""
old_subdomain = realm.subdomain
old_uri = realm.uri
# If the realm had been a demo organization scheduled for
# deleting, clear that state.
realm.demo_organization_scheduled_deletion_date = None
realm.string_id = new_subdomain
with transaction.atomic():
realm.save(update_fields=["string_id", "demo_organization_scheduled_deletion_date"])
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_SUBDOMAIN_CHANGED,
event_time=timezone_now(),
acting_user=acting_user,
extra_data={"old_subdomain": old_subdomain, "new_subdomain": new_subdomain},
)
# If a realm if being renamed multiple times, we should find all the placeholder
# realms and reset their deactivated_redirect field to point to the new realm uri
placeholder_realms = Realm.objects.filter(deactivated_redirect=old_uri, deactivated=True)
for placeholder_realm in placeholder_realms:
do_add_deactivated_redirect(placeholder_realm, realm.uri)
# The below block isn't executed in a transaction with the earlier code due to
# the functions called below being complex and potentially sending events,
# which we don't want to do in atomic blocks.
# When we change a realm's subdomain the realm with old subdomain is basically
# deactivated. We are creating a deactivated realm using old subdomain and setting
# it's deactivated redirect to new_subdomain so that we can tell the users that
# the realm has been moved to a new subdomain.
if add_deactivated_redirect:
placeholder_realm = do_create_realm(old_subdomain, realm.name)
do_deactivate_realm(placeholder_realm, acting_user=None)
do_add_deactivated_redirect(placeholder_realm, realm.uri) |
This function implements overrides for the default configuration
for new organizations when the administrator selected specific
organization types.
This substantially simplifies our /help/ advice for folks setting
up new organizations of these types. | def set_realm_permissions_based_on_org_type(realm: Realm) -> None:
"""This function implements overrides for the default configuration
for new organizations when the administrator selected specific
organization types.
This substantially simplifies our /help/ advice for folks setting
up new organizations of these types.
"""
# Custom configuration for educational organizations. The present
# defaults are designed for a single class, not a department or
# larger institution, since those are more common.
if realm.org_type in (
Realm.ORG_TYPES["education_nonprofit"]["id"],
Realm.ORG_TYPES["education"]["id"],
):
# Limit user creation to administrators.
realm.invite_to_realm_policy = Realm.POLICY_ADMINS_ONLY
# Restrict public stream creation to staff, but allow private
# streams (useful for study groups, etc.).
realm.create_public_stream_policy = Realm.POLICY_ADMINS_ONLY
# Don't allow members (students) to manage user groups or
# stream subscriptions.
realm.user_group_edit_policy = Realm.POLICY_MODERATORS_ONLY
realm.invite_to_stream_policy = Realm.POLICY_MODERATORS_ONLY
# Allow moderators (TAs?) to move topics between streams.
realm.move_messages_between_streams_policy = Realm.POLICY_MODERATORS_ONLY |
Create this realm's internal bots.
This function is idempotent; it does nothing for a bot that
already exists. | def setup_realm_internal_bots(realm: Realm) -> None:
"""Create this realm's internal bots.
This function is idempotent; it does nothing for a bot that
already exists.
"""
internal_bots = [
(bot["name"], bot["email_template"] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.REALM_INTERNAL_BOTS
]
create_users(realm, internal_bots, bot_type=UserProfile.DEFAULT_BOT)
bots = UserProfile.objects.filter(
realm=realm,
email__in=[bot_info[1] for bot_info in internal_bots],
bot_owner__isnull=True,
)
for bot in bots:
bot.bot_owner = bot
bot.save() |
Give the user some messages in their feed, so that they can learn how to
use the home view in a realistic way after finishing the tutorial.
Mark the very most recent messages as unread. | def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None:
"""
Give the user some messages in their feed, so that they can learn how to
use the home view in a realistic way after finishing the tutorial.
Mark the very most recent messages as unread.
"""
# Find recipient ids for the the user's streams, limiting to just
# those where we can access the streams' full history.
#
# TODO: This will do database queries in a loop if many private
# streams are involved.
recipient_ids = [
stream.recipient_id for stream in streams if can_access_stream_history(user_profile, stream)
]
# Start by finding recent messages matching those recipients.
cutoff_date = timezone_now() - ONBOARDING_RECENT_TIMEDELTA
recent_message_ids = set(
Message.objects.filter(
# Uses index: zerver_message_realm_recipient_id
realm_id=user_profile.realm_id,
recipient_id__in=recipient_ids,
date_sent__gt=cutoff_date,
)
.order_by("-id")
.values_list("id", flat=True)[0:MAX_NUM_ONBOARDING_MESSAGES]
)
if len(recent_message_ids) > 0:
# Handle the race condition where a message arrives between
# bulk_add_subscriptions above and the Message query just above
already_used_ids = set(
UserMessage.objects.filter(
message_id__in=recent_message_ids, user_profile=user_profile
).values_list("message_id", flat=True)
)
# Exclude the already-used ids and sort them.
backfill_message_ids = sorted(recent_message_ids - already_used_ids)
# Find which message ids we should mark as read.
# (We don't want too many unread messages.)
older_message_ids = set(backfill_message_ids[:-MAX_NUM_ONBOARDING_UNREAD_MESSAGES])
# Create UserMessage rows for the backfill.
ums_to_create = []
for message_id in backfill_message_ids:
um = UserMessage(user_profile=user_profile, message_id=message_id)
if message_id in older_message_ids:
um.flags = UserMessage.flags.read
ums_to_create.append(um)
UserMessage.objects.bulk_create(ums_to_create) |
Called to have a user "take over" a "mirror dummy" user
(i.e. is_mirror_dummy=True) account when they sign up with the
same email address.
Essentially, the result should be as though we had created the
UserProfile just now with do_create_user, except that the mirror
dummy user may appear as the recipient or sender of messages from
before their account was fully created.
TODO: This function likely has bugs resulting from this being a
parallel code path to do_create_user; e.g. it likely does not
handle preferences or default streams properly. | def do_activate_mirror_dummy_user(
user_profile: UserProfile, *, acting_user: Optional[UserProfile]
) -> None:
"""Called to have a user "take over" a "mirror dummy" user
(i.e. is_mirror_dummy=True) account when they sign up with the
same email address.
Essentially, the result should be as though we had created the
UserProfile just now with do_create_user, except that the mirror
dummy user may appear as the recipient or sender of messages from
before their account was fully created.
TODO: This function likely has bugs resulting from this being a
parallel code path to do_create_user; e.g. it likely does not
handle preferences or default streams properly.
"""
with transaction.atomic():
change_user_is_active(user_profile, True)
user_profile.is_mirror_dummy = False
user_profile.set_unusable_password()
user_profile.date_joined = timezone_now()
user_profile.tos_version = settings.TERMS_OF_SERVICE_VERSION
user_profile.save(
update_fields=["date_joined", "password", "is_mirror_dummy", "tos_version"]
)
event_time = user_profile.date_joined
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_ACTIVATED,
event_time=event_time,
extra_data={
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
},
)
maybe_enqueue_audit_log_upload(user_profile.realm)
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
)
if settings.BILLING_ENABLED:
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
billing_session.update_license_ledger_if_needed(event_time)
notify_created_user(user_profile, []) |
Reactivate a user that had previously been deactivated | def do_reactivate_user(user_profile: UserProfile, *, acting_user: Optional[UserProfile]) -> None:
"""Reactivate a user that had previously been deactivated"""
if user_profile.is_mirror_dummy:
raise JsonableError(
_("Cannot activate a placeholder account; ask the user to sign up, instead.")
)
change_user_is_active(user_profile, True)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_REACTIVATED,
event_time=event_time,
extra_data={
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
},
)
maybe_enqueue_audit_log_upload(user_profile.realm)
bot_owner_changed = False
if (
user_profile.is_bot
and user_profile.bot_owner is not None
and not user_profile.bot_owner.is_active
and acting_user is not None
):
previous_owner = user_profile.bot_owner
user_profile.bot_owner = acting_user
user_profile.save() # Can't use update_fields because of how the foreign key works.
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_BOT_OWNER_CHANGED,
event_time=event_time,
)
bot_owner_changed = True
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
)
if settings.BILLING_ENABLED:
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
billing_session.update_license_ledger_if_needed(event_time)
event = dict(
type="realm_user", op="update", person=dict(user_id=user_profile.id, is_active=True)
)
send_event_on_commit(user_profile.realm, event, active_user_ids(user_profile.realm_id))
if user_profile.is_bot:
event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
is_active=True,
),
)
send_event_on_commit(user_profile.realm, event, bot_owner_user_ids(user_profile))
if bot_owner_changed:
from zerver.actions.bots import send_bot_owner_update_events
assert acting_user is not None
send_bot_owner_update_events(user_profile, acting_user, previous_owner)
if bot_owner_changed:
from zerver.actions.bots import remove_bot_from_inaccessible_private_streams
remove_bot_from_inaccessible_private_streams(user_profile, acting_user=acting_user)
subscribed_recipient_ids = Subscription.objects.filter(
user_profile_id=user_profile.id, active=True, recipient__type=Recipient.STREAM
).values_list("recipient__type_id", flat=True)
subscribed_streams = Stream.objects.filter(id__in=subscribed_recipient_ids, deactivated=False)
subscriber_peer_info = bulk_get_subscriber_peer_info(
realm=user_profile.realm,
streams=subscribed_streams,
)
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
for stream in subscribed_streams:
altered_user_dict[stream.id] = {user_profile.id}
stream_dict = {stream.id: stream for stream in subscribed_streams}
send_peer_subscriber_events(
op="peer_add",
realm=user_profile.realm,
altered_user_dict=altered_user_dict,
stream_dict=stream_dict,
subscriber_peer_info=subscriber_peer_info,
) |
Deleting a field will also delete the user profile data
associated with it in CustomProfileFieldValue model. | def do_remove_realm_custom_profile_field(realm: Realm, field: CustomProfileField) -> None:
"""
Deleting a field will also delete the user profile data
associated with it in CustomProfileFieldValue model.
"""
field.delete()
notify_realm_custom_profile_fields(realm) |
Send the confirmation/welcome e-mail to an invited user. | def do_send_confirmation_email(
invitee: PreregistrationUser,
referrer: UserProfile,
email_language: str,
invite_expires_in_minutes: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
) -> str:
"""
Send the confirmation/welcome e-mail to an invited user.
"""
activation_url = create_confirmation_link(
invitee, Confirmation.INVITATION, validity_in_minutes=invite_expires_in_minutes
)
context = {
"referrer_full_name": referrer.full_name,
"referrer_email": referrer.delivery_email,
"activate_url": activation_url,
"referrer_realm_name": referrer.realm.name,
"corporate_enabled": settings.CORPORATE_ENABLED,
}
send_email(
"zerver/emails/invitation",
to_emails=[invitee.email],
from_address=FromAddress.tokenized_no_reply_address(),
language=email_language,
context=context,
realm=referrer.realm,
)
return activation_url |
An upper bound on the number of invites sent in the last `days` days | def estimate_recent_invites(realms: Collection[Realm] | QuerySet[Realm], *, days: int) -> int:
"""An upper bound on the number of invites sent in the last `days` days"""
recent_invites = RealmCount.objects.filter(
realm__in=realms,
property="invites_sent::day",
end_time__gte=timezone_now() - timedelta(days=days),
).aggregate(Sum("value"))["value__sum"]
if recent_invites is None:
return 0
return recent_invites |
Discourage using invitation emails as a vector for carrying spam. | def check_invite_limit(realm: Realm, num_invitees: int) -> None:
"""Discourage using invitation emails as a vector for carrying spam."""
msg = _(
"To protect users, Zulip limits the number of invitations you can send in one day. Because you have reached the limit, no invitations were sent."
)
if not settings.OPEN_REALM_CREATION:
return
if too_many_recent_realm_invites(realm, num_invitees):
raise InvitationError(
msg,
[],
sent_invitations=False,
daily_limit_reached=True,
)
default_max = settings.INVITES_DEFAULT_REALM_DAILY_MAX
newrealm_age = timedelta(days=settings.INVITES_NEW_REALM_DAYS)
if realm.date_created <= timezone_now() - newrealm_age:
# If this isn't a "newly-created" realm, we're done. The
# remaining code applies an aggregate limit across all
# "new" realms, to address sudden bursts of spam realms.
return
if realm.max_invites > default_max:
# If a user is on a realm where we've bumped up
# max_invites, then we exempt them from invite limits.
return
new_realms = Realm.objects.filter(
date_created__gte=timezone_now() - newrealm_age,
_max_invites__lte=default_max,
).all()
for days, count in settings.INVITES_NEW_REALM_LIMIT_DAYS:
recent_invites = estimate_recent_invites(new_realms, days=days)
if num_invitees + recent_invites > count:
raise InvitationError(
msg,
[],
sent_invitations=False,
daily_limit_reached=True,
) |
Returns a list of dicts representing invitations that can be controlled by user_profile.
This isn't necessarily the same as all the invitations generated by the user, as administrators
can control also invitations that they did not themselves create. | def do_get_invites_controlled_by_user(user_profile: UserProfile) -> List[Dict[str, Any]]:
"""
Returns a list of dicts representing invitations that can be controlled by user_profile.
This isn't necessarily the same as all the invitations generated by the user, as administrators
can control also invitations that they did not themselves create.
"""
if user_profile.is_realm_admin:
prereg_users = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(referred_by__realm=user_profile.realm)
)
else:
prereg_users = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(referred_by=user_profile)
)
invites = []
for invitee in prereg_users:
assert invitee.referred_by is not None
invites.append(
dict(
email=invitee.email,
invited_by_user_id=invitee.referred_by.id,
invited=datetime_to_timestamp(invitee.invited_at),
expiry_date=get_invitation_expiry_date(invitee.confirmation.get()),
id=invitee.id,
invited_as=invitee.invited_as,
is_multiuse=False,
)
)
if user_profile.is_realm_admin:
multiuse_confirmation_objs = Confirmation.objects.filter(
realm=user_profile.realm, type=Confirmation.MULTIUSE_INVITE
).filter(Q(expiry_date__gte=timezone_now()) | Q(expiry_date=None))
else:
multiuse_invite_ids = MultiuseInvite.objects.filter(referred_by=user_profile).values_list(
"id", flat=True
)
multiuse_confirmation_objs = Confirmation.objects.filter(
type=Confirmation.MULTIUSE_INVITE,
object_id__in=multiuse_invite_ids,
).filter(Q(expiry_date__gte=timezone_now()) | Q(expiry_date=None))
for confirmation_obj in multiuse_confirmation_objs:
invite = confirmation_obj.content_object
assert invite is not None
# This should be impossible, because revoking a multiuse invite
# deletes the Confirmation object, so it couldn't have been fetched above.
assert invite.status != confirmation_settings.STATUS_REVOKED
invites.append(
dict(
invited_by_user_id=invite.referred_by.id,
invited=datetime_to_timestamp(confirmation_obj.date_sent),
expiry_date=get_invitation_expiry_date(confirmation_obj),
id=invite.id,
link_url=confirmation_url(
confirmation_obj.confirmation_key,
user_profile.realm,
Confirmation.MULTIUSE_INVITE,
),
invited_as=invite.invited_as,
is_multiuse=True,
)
)
return invites |
Subsets and Splits