body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
567c4681e28f5035e4af93ddc8620806f068c2ef48f068015ffd689628726aca
def mul(value, sample_args, context=None): 'Returns random question for multiplying two numbers.' del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_p, entropy_q) = _entropy_for_pair(entropy) p = number.integer_or_decimal(entropy_p, True) q = number.integer_or_decimal(entropy_q, True) (p, q) = context.sample(sample_args, [p, q]) answer = (p.value * q.value) if is_question: templates = [(('{p}' + ops.MUL_SYMBOL) + '{q}'), (('{p} ' + ops.MUL_SYMBOL) + ' {q}'), (('Hitung {p}' + ops.MUL_SYMBOL) + '{q}.'), (('Kerjakan {p} ' + ops.MUL_SYMBOL) + ' {q}.'), 'Kalikan {p} dan {q}.', 'Produk dari {p} dan {q}.', 'Apa produk dari {p} dan {q}?', '{p} kali {q}', 'Berapakah {p} kali {q}?'] template = random.choice(templates) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} = {p} * {q}.', p=p, q=q)
Returns random question for multiplying two numbers.
mathematics_dataset/modules/arithmetic.py
mul
Wikidepia/mathematics_dataset_id
0
python
def mul(value, sample_args, context=None): del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_p, entropy_q) = _entropy_for_pair(entropy) p = number.integer_or_decimal(entropy_p, True) q = number.integer_or_decimal(entropy_q, True) (p, q) = context.sample(sample_args, [p, q]) answer = (p.value * q.value) if is_question: templates = [(('{p}' + ops.MUL_SYMBOL) + '{q}'), (('{p} ' + ops.MUL_SYMBOL) + ' {q}'), (('Hitung {p}' + ops.MUL_SYMBOL) + '{q}.'), (('Kerjakan {p} ' + ops.MUL_SYMBOL) + ' {q}.'), 'Kalikan {p} dan {q}.', 'Produk dari {p} dan {q}.', 'Apa produk dari {p} dan {q}?', '{p} kali {q}', 'Berapakah {p} kali {q}?'] template = random.choice(templates) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} = {p} * {q}.', p=p, q=q)
def mul(value, sample_args, context=None): del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_p, entropy_q) = _entropy_for_pair(entropy) p = number.integer_or_decimal(entropy_p, True) q = number.integer_or_decimal(entropy_q, True) (p, q) = context.sample(sample_args, [p, q]) answer = (p.value * q.value) if is_question: templates = [(('{p}' + ops.MUL_SYMBOL) + '{q}'), (('{p} ' + ops.MUL_SYMBOL) + ' {q}'), (('Hitung {p}' + ops.MUL_SYMBOL) + '{q}.'), (('Kerjakan {p} ' + ops.MUL_SYMBOL) + ' {q}.'), 'Kalikan {p} dan {q}.', 'Produk dari {p} dan {q}.', 'Apa produk dari {p} dan {q}?', '{p} kali {q}', 'Berapakah {p} kali {q}?'] template = random.choice(templates) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} = {p} * {q}.', p=p, q=q)<|docstring|>Returns random question for multiplying two numbers.<|endoftext|>
2e82878deff902a4b49f2bc23fd36e8e3f36af7304361d9b4277adbe7679b93b
def div(value, sample_args, context=None): 'Returns random question for dividing two numbers.' del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_1, entropy_q) = _entropy_for_pair(entropy) q = number.integer(entropy_q, True, min_abs=1) if random.choice([False, True]): answer = number.integer(entropy_1, True) p = (answer * q) else: p = number.integer(entropy_1, True) answer = (p / q) (p, q) = context.sample(sample_args, [p, q]) if is_question: template = random.choice(['Bagi {p} dengan {q}.', '{p} dibagi dengan {q}', 'Berapakah {p} dibagi dengan {q}?', 'Hitunglah {p} dibagi dengan {q}.']) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} be {p} divided by {q}.', p=p, q=q)
Returns random question for dividing two numbers.
mathematics_dataset/modules/arithmetic.py
div
Wikidepia/mathematics_dataset_id
0
python
def div(value, sample_args, context=None): del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_1, entropy_q) = _entropy_for_pair(entropy) q = number.integer(entropy_q, True, min_abs=1) if random.choice([False, True]): answer = number.integer(entropy_1, True) p = (answer * q) else: p = number.integer(entropy_1, True) answer = (p / q) (p, q) = context.sample(sample_args, [p, q]) if is_question: template = random.choice(['Bagi {p} dengan {q}.', '{p} dibagi dengan {q}', 'Berapakah {p} dibagi dengan {q}?', 'Hitunglah {p} dibagi dengan {q}.']) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} be {p} divided by {q}.', p=p, q=q)
def div(value, sample_args, context=None): del value is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() (entropy_1, entropy_q) = _entropy_for_pair(entropy) q = number.integer(entropy_q, True, min_abs=1) if random.choice([False, True]): answer = number.integer(entropy_1, True) p = (answer * q) else: p = number.integer(entropy_1, True) answer = (p / q) (p, q) = context.sample(sample_args, [p, q]) if is_question: template = random.choice(['Bagi {p} dengan {q}.', '{p} dibagi dengan {q}', 'Berapakah {p} dibagi dengan {q}?', 'Hitunglah {p} dibagi dengan {q}.']) return example.Problem(question=example.question(context, template, p=p, q=q), answer=answer) else: return composition.Entity(context=context, value=answer, description='Misalkan {self} be {p} divided by {q}.', p=p, q=q)<|docstring|>Returns random question for dividing two numbers.<|endoftext|>
36dc22df6b0c3b397f0e88bc946ceeb4c27d943bc17ffe5569753b2d1c3fcdd1
def nearest_integer_root(sample_args): 'E.g., "Calculate the cube root of 35 to the nearest integer.".' context = composition.Context() if random.choice([False, True]): one_over_exponent = random.randint(2, 3) else: one_over_exponent = random.randint(2, 10) (entropy, sample_args) = sample_args.peel() value = number.integer(entropy, signed=False) answer = int(round((value ** (1 / one_over_exponent)))) templates = ['Berapa {value} pangkat 1/{one_over_exponent}, ke bilangan bulat terdekat?'] if (one_over_exponent != 2): ordinal = str() templates += ['Apa akar {ordinal} dari {value} ke bilangan bulat terdekat?'] if (one_over_exponent == 2): templates += ['Berapa akar kuadrat dari {value} ke bilangan bulat terdekat?'] elif (one_over_exponent == 3): templates += ['Berapa akar pangkat tiga dari {value} ke bilangan bulat terdekat?'] template = random.choice(templates) ordinal = display.StringOrdinal(one_over_exponent) return example.Problem(question=example.question(context, template, value=value, ordinal=ordinal, one_over_exponent=one_over_exponent), answer=answer)
E.g., "Calculate the cube root of 35 to the nearest integer.".
mathematics_dataset/modules/arithmetic.py
nearest_integer_root
Wikidepia/mathematics_dataset_id
0
python
def nearest_integer_root(sample_args): context = composition.Context() if random.choice([False, True]): one_over_exponent = random.randint(2, 3) else: one_over_exponent = random.randint(2, 10) (entropy, sample_args) = sample_args.peel() value = number.integer(entropy, signed=False) answer = int(round((value ** (1 / one_over_exponent)))) templates = ['Berapa {value} pangkat 1/{one_over_exponent}, ke bilangan bulat terdekat?'] if (one_over_exponent != 2): ordinal = str() templates += ['Apa akar {ordinal} dari {value} ke bilangan bulat terdekat?'] if (one_over_exponent == 2): templates += ['Berapa akar kuadrat dari {value} ke bilangan bulat terdekat?'] elif (one_over_exponent == 3): templates += ['Berapa akar pangkat tiga dari {value} ke bilangan bulat terdekat?'] template = random.choice(templates) ordinal = display.StringOrdinal(one_over_exponent) return example.Problem(question=example.question(context, template, value=value, ordinal=ordinal, one_over_exponent=one_over_exponent), answer=answer)
def nearest_integer_root(sample_args): context = composition.Context() if random.choice([False, True]): one_over_exponent = random.randint(2, 3) else: one_over_exponent = random.randint(2, 10) (entropy, sample_args) = sample_args.peel() value = number.integer(entropy, signed=False) answer = int(round((value ** (1 / one_over_exponent)))) templates = ['Berapa {value} pangkat 1/{one_over_exponent}, ke bilangan bulat terdekat?'] if (one_over_exponent != 2): ordinal = str() templates += ['Apa akar {ordinal} dari {value} ke bilangan bulat terdekat?'] if (one_over_exponent == 2): templates += ['Berapa akar kuadrat dari {value} ke bilangan bulat terdekat?'] elif (one_over_exponent == 3): templates += ['Berapa akar pangkat tiga dari {value} ke bilangan bulat terdekat?'] template = random.choice(templates) ordinal = display.StringOrdinal(one_over_exponent) return example.Problem(question=example.question(context, template, value=value, ordinal=ordinal, one_over_exponent=one_over_exponent), answer=answer)<|docstring|>E.g., "Calculate the cube root of 35 to the nearest integer.".<|endoftext|>
777c5b4ec53cd781b72e86db8b2ddffe232fa082955aba955856479bd25d0a0a
def _calculate(value, sample_args, context, add_sub, mul_div, length=None): 'Questions for evaluating arithmetic expressions.' is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() if (value in [_INT, _INT_OR_RATIONAL]): value_entropy = max(1.0, (entropy / 4)) entropy = max(1.0, (entropy - value_entropy)) sampler = _value_sampler(value) value = sampler(value_entropy) op = arithmetic.arithmetic(value=value, entropy=entropy, add_sub=add_sub, mul_div=mul_div, length=length) context.sample_by_replacing_constants(sample_args, op) if is_question: template = random.choice(['{op}', 'Berapakah {op}?', 'Evaluasi {op}.', 'Hitung {op}.', 'Berapakah nilai dari {op}?']) return example.Problem(question=example.question(context, template, op=op), answer=value) else: return composition.Entity(context=context, value=value, expression=op, description='Misalkan {self} be {op}.', op=op)
Questions for evaluating arithmetic expressions.
mathematics_dataset/modules/arithmetic.py
_calculate
Wikidepia/mathematics_dataset_id
0
python
def _calculate(value, sample_args, context, add_sub, mul_div, length=None): is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() if (value in [_INT, _INT_OR_RATIONAL]): value_entropy = max(1.0, (entropy / 4)) entropy = max(1.0, (entropy - value_entropy)) sampler = _value_sampler(value) value = sampler(value_entropy) op = arithmetic.arithmetic(value=value, entropy=entropy, add_sub=add_sub, mul_div=mul_div, length=length) context.sample_by_replacing_constants(sample_args, op) if is_question: template = random.choice(['{op}', 'Berapakah {op}?', 'Evaluasi {op}.', 'Hitung {op}.', 'Berapakah nilai dari {op}?']) return example.Problem(question=example.question(context, template, op=op), answer=value) else: return composition.Entity(context=context, value=value, expression=op, description='Misalkan {self} be {op}.', op=op)
def _calculate(value, sample_args, context, add_sub, mul_div, length=None): is_question = (context is None) if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() if (value in [_INT, _INT_OR_RATIONAL]): value_entropy = max(1.0, (entropy / 4)) entropy = max(1.0, (entropy - value_entropy)) sampler = _value_sampler(value) value = sampler(value_entropy) op = arithmetic.arithmetic(value=value, entropy=entropy, add_sub=add_sub, mul_div=mul_div, length=length) context.sample_by_replacing_constants(sample_args, op) if is_question: template = random.choice(['{op}', 'Berapakah {op}?', 'Evaluasi {op}.', 'Hitung {op}.', 'Berapakah nilai dari {op}?']) return example.Problem(question=example.question(context, template, op=op), answer=value) else: return composition.Entity(context=context, value=value, expression=op, description='Misalkan {self} be {op}.', op=op)<|docstring|>Questions for evaluating arithmetic expressions.<|endoftext|>
b467028f968d1cfd62647af5e45bd25601e2511c64e04013fcae426a89d8fbde
def _surd_coefficients(sympy_exp): 'Extracts coefficients a, b, where sympy_exp = a + b * sqrt(base).' sympy_exp = sympy.simplify(sympy.expand(sympy_exp)) def extract_b(b_sqrt_base): 'Returns b from expression of form b * sqrt(base).' if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0] if sympy_exp.is_rational: return (sympy_exp, 0) elif isinstance(sympy_exp, sympy.Add): assert (len(sympy_exp.args) == 2) assert sympy_exp.args[0].is_rational a = sympy_exp.args[0] b = extract_b(sympy_exp.args[1]) return (a, b) else: return (0, extract_b(sympy_exp))
Extracts coefficients a, b, where sympy_exp = a + b * sqrt(base).
mathematics_dataset/modules/arithmetic.py
_surd_coefficients
Wikidepia/mathematics_dataset_id
0
python
def _surd_coefficients(sympy_exp): sympy_exp = sympy.simplify(sympy.expand(sympy_exp)) def extract_b(b_sqrt_base): 'Returns b from expression of form b * sqrt(base).' if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0] if sympy_exp.is_rational: return (sympy_exp, 0) elif isinstance(sympy_exp, sympy.Add): assert (len(sympy_exp.args) == 2) assert sympy_exp.args[0].is_rational a = sympy_exp.args[0] b = extract_b(sympy_exp.args[1]) return (a, b) else: return (0, extract_b(sympy_exp))
def _surd_coefficients(sympy_exp): sympy_exp = sympy.simplify(sympy.expand(sympy_exp)) def extract_b(b_sqrt_base): 'Returns b from expression of form b * sqrt(base).' if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0] if sympy_exp.is_rational: return (sympy_exp, 0) elif isinstance(sympy_exp, sympy.Add): assert (len(sympy_exp.args) == 2) assert sympy_exp.args[0].is_rational a = sympy_exp.args[0] b = extract_b(sympy_exp.args[1]) return (a, b) else: return (0, extract_b(sympy_exp))<|docstring|>Extracts coefficients a, b, where sympy_exp = a + b * sqrt(base).<|endoftext|>
ca8ed9f1afe9456079bb8672e165698791216dd89ece3c1f9b9990a6a290aca9
def _sample_surd(base, entropy, max_power, multiples_only): 'An expression that can be reduced to a + b * sqrt(base).\n\n For example, if base=3, then the following are valid expressions:\n\n * sqrt(12) (reduces to 2 * sqrt(3))\n * sqrt(3) - 10 * sqrt(3) (reduces to -9 * sqrt(3))\n * sqrt(15) / sqrt(5) (reduces to sqrt(3)).\n * 4 * sqrt(3) / 2\n * 2 + sqrt(3)\n * 1 / (1 + sqrt(3)) (reduces to -1/2 + (-1/2) sqrt(3))\n\n However, 1 + 2 * sqrt(3) is not valid, as it does not reduce to the form\n a * sqrt(3).\n\n Args:\n base: The value inside the square root.\n entropy: Float >= 0; used for randomness.\n max_power: Integer >= 1; the max power used in expressions. If 1 then\n disables.\n multiples_only: Whether the surd should be an integer multiple of\n sqrt(base).\n\n Returns:\n Instance of `ops.Op`.\n ' if (entropy <= 0): return ops.Sqrt(base) def add_or_sub_(): (entropy_left, entropy_right) = _surd_split_entropy_two(entropy) left = _sample_surd(base, entropy_left, max_power, multiples_only) right = _sample_surd(base, entropy_right, max_power, multiples_only) op = random.choice([ops.Add, ops.Sub]) return op(left, right) def mul_by_integer(): entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True, min_abs=1) right = _sample_surd(base, (entropy - entropy_k), max_power, multiples_only) if random.choice([False, True]): (left, right) = (right, left) return ops.Mul(left, right) def div_by_sqrt_k(): 'Do sqrt(k * base) / sqrt(k).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr) def square_k(): 'Do sqrt(k * k * base).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only) def surd_plus_integer(): 'Do surd + integer.' entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right) def power(): 'Do surd**2.' assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2) choices = [add_or_sub_, mul_by_integer] if (not multiples_only): choices += [surd_plus_integer] if (max_power > 1): choices += [power] if (base < 64): choices += [div_by_sqrt_k, square_k] which = random.choice(choices) return which()
An expression that can be reduced to a + b * sqrt(base). For example, if base=3, then the following are valid expressions: * sqrt(12) (reduces to 2 * sqrt(3)) * sqrt(3) - 10 * sqrt(3) (reduces to -9 * sqrt(3)) * sqrt(15) / sqrt(5) (reduces to sqrt(3)). * 4 * sqrt(3) / 2 * 2 + sqrt(3) * 1 / (1 + sqrt(3)) (reduces to -1/2 + (-1/2) sqrt(3)) However, 1 + 2 * sqrt(3) is not valid, as it does not reduce to the form a * sqrt(3). Args: base: The value inside the square root. entropy: Float >= 0; used for randomness. max_power: Integer >= 1; the max power used in expressions. If 1 then disables. multiples_only: Whether the surd should be an integer multiple of sqrt(base). Returns: Instance of `ops.Op`.
mathematics_dataset/modules/arithmetic.py
_sample_surd
Wikidepia/mathematics_dataset_id
0
python
def _sample_surd(base, entropy, max_power, multiples_only): 'An expression that can be reduced to a + b * sqrt(base).\n\n For example, if base=3, then the following are valid expressions:\n\n * sqrt(12) (reduces to 2 * sqrt(3))\n * sqrt(3) - 10 * sqrt(3) (reduces to -9 * sqrt(3))\n * sqrt(15) / sqrt(5) (reduces to sqrt(3)).\n * 4 * sqrt(3) / 2\n * 2 + sqrt(3)\n * 1 / (1 + sqrt(3)) (reduces to -1/2 + (-1/2) sqrt(3))\n\n However, 1 + 2 * sqrt(3) is not valid, as it does not reduce to the form\n a * sqrt(3).\n\n Args:\n base: The value inside the square root.\n entropy: Float >= 0; used for randomness.\n max_power: Integer >= 1; the max power used in expressions. If 1 then\n disables.\n multiples_only: Whether the surd should be an integer multiple of\n sqrt(base).\n\n Returns:\n Instance of `ops.Op`.\n ' if (entropy <= 0): return ops.Sqrt(base) def add_or_sub_(): (entropy_left, entropy_right) = _surd_split_entropy_two(entropy) left = _sample_surd(base, entropy_left, max_power, multiples_only) right = _sample_surd(base, entropy_right, max_power, multiples_only) op = random.choice([ops.Add, ops.Sub]) return op(left, right) def mul_by_integer(): entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True, min_abs=1) right = _sample_surd(base, (entropy - entropy_k), max_power, multiples_only) if random.choice([False, True]): (left, right) = (right, left) return ops.Mul(left, right) def div_by_sqrt_k(): 'Do sqrt(k * base) / sqrt(k).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr) def square_k(): 'Do sqrt(k * k * base).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only) def surd_plus_integer(): 'Do surd + integer.' entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right) def power(): 'Do surd**2.' assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2) choices = [add_or_sub_, mul_by_integer] if (not multiples_only): choices += [surd_plus_integer] if (max_power > 1): choices += [power] if (base < 64): choices += [div_by_sqrt_k, square_k] which = random.choice(choices) return which()
def _sample_surd(base, entropy, max_power, multiples_only): 'An expression that can be reduced to a + b * sqrt(base).\n\n For example, if base=3, then the following are valid expressions:\n\n * sqrt(12) (reduces to 2 * sqrt(3))\n * sqrt(3) - 10 * sqrt(3) (reduces to -9 * sqrt(3))\n * sqrt(15) / sqrt(5) (reduces to sqrt(3)).\n * 4 * sqrt(3) / 2\n * 2 + sqrt(3)\n * 1 / (1 + sqrt(3)) (reduces to -1/2 + (-1/2) sqrt(3))\n\n However, 1 + 2 * sqrt(3) is not valid, as it does not reduce to the form\n a * sqrt(3).\n\n Args:\n base: The value inside the square root.\n entropy: Float >= 0; used for randomness.\n max_power: Integer >= 1; the max power used in expressions. If 1 then\n disables.\n multiples_only: Whether the surd should be an integer multiple of\n sqrt(base).\n\n Returns:\n Instance of `ops.Op`.\n ' if (entropy <= 0): return ops.Sqrt(base) def add_or_sub_(): (entropy_left, entropy_right) = _surd_split_entropy_two(entropy) left = _sample_surd(base, entropy_left, max_power, multiples_only) right = _sample_surd(base, entropy_right, max_power, multiples_only) op = random.choice([ops.Add, ops.Sub]) return op(left, right) def mul_by_integer(): entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True, min_abs=1) right = _sample_surd(base, (entropy - entropy_k), max_power, multiples_only) if random.choice([False, True]): (left, right) = (right, left) return ops.Mul(left, right) def div_by_sqrt_k(): 'Do sqrt(k * base) / sqrt(k).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr) def square_k(): 'Do sqrt(k * k * base).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only) def surd_plus_integer(): 'Do surd + integer.' entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right) def power(): 'Do surd**2.' assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2) choices = [add_or_sub_, mul_by_integer] if (not multiples_only): choices += [surd_plus_integer] if (max_power > 1): choices += [power] if (base < 64): choices += [div_by_sqrt_k, square_k] which = random.choice(choices) return which()<|docstring|>An expression that can be reduced to a + b * sqrt(base). For example, if base=3, then the following are valid expressions: * sqrt(12) (reduces to 2 * sqrt(3)) * sqrt(3) - 10 * sqrt(3) (reduces to -9 * sqrt(3)) * sqrt(15) / sqrt(5) (reduces to sqrt(3)). * 4 * sqrt(3) / 2 * 2 + sqrt(3) * 1 / (1 + sqrt(3)) (reduces to -1/2 + (-1/2) sqrt(3)) However, 1 + 2 * sqrt(3) is not valid, as it does not reduce to the form a * sqrt(3). Args: base: The value inside the square root. entropy: Float >= 0; used for randomness. max_power: Integer >= 1; the max power used in expressions. If 1 then disables. multiples_only: Whether the surd should be an integer multiple of sqrt(base). Returns: Instance of `ops.Op`.<|endoftext|>
f343555b99001927fdcce05dba4e65d2aaa9c711623530b155618e4ef1b040a9
def simplify_surd(value, sample_args, context=None): 'E.g., "Simplify (2 + 5*sqrt(3))**2.".' del value if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() while True: base = random.randint(2, 20) if sympy.Integer(base).is_prime: break num_primes_less_than_20 = 8 entropy -= math.log10(num_primes_less_than_20) exp = _sample_surd(base, entropy, max_power=2, multiples_only=False) simplified = sympy.expand(sympy.simplify(exp)) template = random.choice(['Sederhanakan {exp}.']) return example.Problem(question=example.question(context, template, exp=exp), answer=simplified)
E.g., "Simplify (2 + 5*sqrt(3))**2.".
mathematics_dataset/modules/arithmetic.py
simplify_surd
Wikidepia/mathematics_dataset_id
0
python
def simplify_surd(value, sample_args, context=None): del value if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() while True: base = random.randint(2, 20) if sympy.Integer(base).is_prime: break num_primes_less_than_20 = 8 entropy -= math.log10(num_primes_less_than_20) exp = _sample_surd(base, entropy, max_power=2, multiples_only=False) simplified = sympy.expand(sympy.simplify(exp)) template = random.choice(['Sederhanakan {exp}.']) return example.Problem(question=example.question(context, template, exp=exp), answer=simplified)
def simplify_surd(value, sample_args, context=None): del value if (context is None): context = composition.Context() (entropy, sample_args) = sample_args.peel() while True: base = random.randint(2, 20) if sympy.Integer(base).is_prime: break num_primes_less_than_20 = 8 entropy -= math.log10(num_primes_less_than_20) exp = _sample_surd(base, entropy, max_power=2, multiples_only=False) simplified = sympy.expand(sympy.simplify(exp)) template = random.choice(['Sederhanakan {exp}.']) return example.Problem(question=example.question(context, template, exp=exp), answer=simplified)<|docstring|>E.g., "Simplify (2 + 5*sqrt(3))**2.".<|endoftext|>
26ab1f423352ceccf34e2f717570bbfc183b094cbec186a217eee5a14f536d53
def extract_b(b_sqrt_base): 'Returns b from expression of form b * sqrt(base).' if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0]
Returns b from expression of form b * sqrt(base).
mathematics_dataset/modules/arithmetic.py
extract_b
Wikidepia/mathematics_dataset_id
0
python
def extract_b(b_sqrt_base): if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0]
def extract_b(b_sqrt_base): if isinstance(b_sqrt_base, sympy.Pow): return 1 else: assert isinstance(b_sqrt_base, sympy.Mul) assert (len(b_sqrt_base.args) == 2) assert b_sqrt_base.args[0].is_rational assert isinstance(b_sqrt_base.args[1], sympy.Pow) return b_sqrt_base.args[0]<|docstring|>Returns b from expression of form b * sqrt(base).<|endoftext|>
9b73fee7687eaf26991415be04f58ed2f55ef0a1e36ecc41df598f42eed29e38
def div_by_sqrt_k(): 'Do sqrt(k * base) / sqrt(k).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr)
Do sqrt(k * base) / sqrt(k).
mathematics_dataset/modules/arithmetic.py
div_by_sqrt_k
Wikidepia/mathematics_dataset_id
0
python
def div_by_sqrt_k(): entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr)
def div_by_sqrt_k(): entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) (entropy_left, entropy_right) = _surd_split_entropy_two((entropy - entropy_k)) k_base_expr = _sample_surd((k * base), entropy_left, max_power, True) while True: k_expr = _sample_surd(k, entropy_right, max_power, True) if (k_expr.sympy() != 0): break return ops.Div(k_base_expr, k_expr)<|docstring|>Do sqrt(k * base) / sqrt(k).<|endoftext|>
0dbdeba10187afb84efbb9786dd4e45db3ce6f401a486c680042ee225b37abc9
def square_k(): 'Do sqrt(k * k * base).' entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only)
Do sqrt(k * k * base).
mathematics_dataset/modules/arithmetic.py
square_k
Wikidepia/mathematics_dataset_id
0
python
def square_k(): entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only)
def square_k(): entropy_k = min(1, entropy) k = number.integer(entropy_k, signed=False, min_abs=2) return _sample_surd(((k * k) * base), (entropy - entropy_k), max_power, multiples_only)<|docstring|>Do sqrt(k * k * base).<|endoftext|>
6227bd5dbadc9ef63fae08d96d22e3c11af774eafdbe6fbf559baacec894b5fb
def surd_plus_integer(): 'Do surd + integer.' entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right)
Do surd + integer.
mathematics_dataset/modules/arithmetic.py
surd_plus_integer
Wikidepia/mathematics_dataset_id
0
python
def surd_plus_integer(): entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right)
def surd_plus_integer(): entropy_k = min(1, entropy) left = number.integer(entropy_k, signed=True) assert (not multiples_only) right = _sample_surd(base, (entropy - entropy_k), max_power, False) if random.choice([True, False]): (left, right) = (right, left) return ops.Add(left, right)<|docstring|>Do surd + integer.<|endoftext|>
c1db5bf36215a11f5e616c9124f9f6a0f4ae97226e83f4f5cd5a5dfdb17fda3d
def power(): 'Do surd**2.' assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2)
Do surd**2.
mathematics_dataset/modules/arithmetic.py
power
Wikidepia/mathematics_dataset_id
0
python
def power(): assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2)
def power(): assert (not multiples_only) surd = _sample_surd(base, entropy, max_power=1, multiples_only=False) return ops.Pow(surd, 2)<|docstring|>Do surd**2.<|endoftext|>
9de65dfcfe6bc66ba4553631640f0f4ca0217c2ed7ce46df492de486a8ef0674
def php_query_builder(obj: dict[(str, Union[(str, list[str], dict[(str, str)])])]) -> str: '\n {"order": {"publishAt": "desc"}, "translatedLanguages": ["en", "jp"]}\n ->\n "order[publishAt]=desc&translatedLanguages[]=en&translatedLanguages[]=jp"\n ' fmt = [] for (key, value) in obj.items(): if isinstance(value, (str, int, bool)): fmt.append(f'{key}={value}') elif isinstance(value, list): fmt.extend((f'{key}[]={item}' for item in value)) elif isinstance(value, dict): fmt.extend((f'{key}[{subkey}]={subvalue}' for (subkey, subvalue) in value.items())) return '&'.join(fmt)
{"order": {"publishAt": "desc"}, "translatedLanguages": ["en", "jp"]} -> "order[publishAt]=desc&translatedLanguages[]=en&translatedLanguages[]=jp"
mangadex/utils.py
php_query_builder
CuteFwan/mangadex.py
0
python
def php_query_builder(obj: dict[(str, Union[(str, list[str], dict[(str, str)])])]) -> str: '\n {"order": {"publishAt": "desc"}, "translatedLanguages": ["en", "jp"]}\n ->\n "order[publishAt]=desc&translatedLanguages[]=en&translatedLanguages[]=jp"\n ' fmt = [] for (key, value) in obj.items(): if isinstance(value, (str, int, bool)): fmt.append(f'{key}={value}') elif isinstance(value, list): fmt.extend((f'{key}[]={item}' for item in value)) elif isinstance(value, dict): fmt.extend((f'{key}[{subkey}]={subvalue}' for (subkey, subvalue) in value.items())) return '&'.join(fmt)
def php_query_builder(obj: dict[(str, Union[(str, list[str], dict[(str, str)])])]) -> str: '\n {"order": {"publishAt": "desc"}, "translatedLanguages": ["en", "jp"]}\n ->\n "order[publishAt]=desc&translatedLanguages[]=en&translatedLanguages[]=jp"\n ' fmt = [] for (key, value) in obj.items(): if isinstance(value, (str, int, bool)): fmt.append(f'{key}={value}') elif isinstance(value, list): fmt.extend((f'{key}[]={item}' for item in value)) elif isinstance(value, dict): fmt.extend((f'{key}[{subkey}]={subvalue}' for (subkey, subvalue) in value.items())) return '&'.join(fmt)<|docstring|>{"order": {"publishAt": "desc"}, "translatedLanguages": ["en", "jp"]} -> "order[publishAt]=desc&translatedLanguages[]=en&translatedLanguages[]=jp"<|endoftext|>
0f7509b6566abff928cf689e079a6c464c3a8cbe17c6684fae5d5f3ea807bf65
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): '\n Helper to call formfield_for_dbfield for a given model and field name\n and verify that the returned formfield is appropriate.\n ' class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) return ff
Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate.
tests/admin_widgets/tests.py
assertFormfield
August1996/Django
4
python
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): '\n Helper to call formfield_for_dbfield for a given model and field name\n and verify that the returned formfield is appropriate.\n ' class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) return ff
def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides): '\n Helper to call formfield_for_dbfield for a given model and field name\n and verify that the returned formfield is appropriate.\n ' class MyModelAdmin(admin.ModelAdmin): pass for k in admin_overrides: setattr(MyModelAdmin, k, admin_overrides[k]) ma = MyModelAdmin(model, admin.site) ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None) if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper): widget = ff.widget.widget else: widget = ff.widget self.assertIsInstance(widget, widgetclass) return ff<|docstring|>Helper to call formfield_for_dbfield for a given model and field name and verify that the returned formfield is appropriate.<|endoftext|>
5b7adc127d3c476394ad3535b0e5670f087b2ff5ca02dadb3423fa36c24f93dd
def test_formfield_overrides_widget_instances(self): '\n Widget instances in formfield_overrides are not shared between\n different fields. (#19423)\n ' class BandAdmin(admin.ModelAdmin): formfield_overrides = {CharField: {'widget': forms.TextInput(attrs={'size': '10'})}} ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10')
Widget instances in formfield_overrides are not shared between different fields. (#19423)
tests/admin_widgets/tests.py
test_formfield_overrides_widget_instances
August1996/Django
4
python
def test_formfield_overrides_widget_instances(self): '\n Widget instances in formfield_overrides are not shared between\n different fields. (#19423)\n ' class BandAdmin(admin.ModelAdmin): formfield_overrides = {CharField: {'widget': forms.TextInput(attrs={'size': '10'})}} ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10')
def test_formfield_overrides_widget_instances(self): '\n Widget instances in formfield_overrides are not shared between\n different fields. (#19423)\n ' class BandAdmin(admin.ModelAdmin): formfield_overrides = {CharField: {'widget': forms.TextInput(attrs={'size': '10'})}} ma = BandAdmin(Band, admin.site) f1 = ma.formfield_for_dbfield(Band._meta.get_field('name'), request=None) f2 = ma.formfield_for_dbfield(Band._meta.get_field('style'), request=None) self.assertNotEqual(f1.widget, f2.widget) self.assertEqual(f1.widget.attrs['maxlength'], '100') self.assertEqual(f2.widget.attrs['maxlength'], '20') self.assertEqual(f2.widget.attrs['size'], '10')<|docstring|>Widget instances in formfield_overrides are not shared between different fields. (#19423)<|endoftext|>
6b09121faa252c3c32d6f3d210f48f272d00230a5e9170162dba8da5c72e783a
def test_formfield_overrides_for_datetime_field(self): "\n Overriding the widget for DateTimeField doesn't overrides the default\n form_class for that field (#26449).\n " class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField)
Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449).
tests/admin_widgets/tests.py
test_formfield_overrides_for_datetime_field
August1996/Django
4
python
def test_formfield_overrides_for_datetime_field(self): "\n Overriding the widget for DateTimeField doesn't overrides the default\n form_class for that field (#26449).\n " class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField)
def test_formfield_overrides_for_datetime_field(self): "\n Overriding the widget for DateTimeField doesn't overrides the default\n form_class for that field (#26449).\n " class MemberAdmin(admin.ModelAdmin): formfield_overrides = {DateTimeField: {'widget': widgets.AdminSplitDateTime}} ma = MemberAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Member._meta.get_field('birthdate'), request=None) self.assertIsInstance(f1.widget, widgets.AdminSplitDateTime) self.assertIsInstance(f1, forms.SplitDateTimeField)<|docstring|>Overriding the widget for DateTimeField doesn't overrides the default form_class for that field (#26449).<|endoftext|>
bb6d77d2ccc08f95dbc0d3a5fc83c860c8fddbcbbde0cc022697508d495c5e85
def test_formfield_overrides_for_custom_field(self): '\n formfield_overrides works for a custom field class.\n ' class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput)
formfield_overrides works for a custom field class.
tests/admin_widgets/tests.py
test_formfield_overrides_for_custom_field
August1996/Django
4
python
def test_formfield_overrides_for_custom_field(self): '\n \n ' class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput)
def test_formfield_overrides_for_custom_field(self): '\n \n ' class AlbumAdmin(admin.ModelAdmin): formfield_overrides = {MyFileField: {'widget': forms.TextInput()}} ma = AlbumAdmin(Member, admin.site) f1 = ma.formfield_for_dbfield(Album._meta.get_field('backside_art'), request=None) self.assertIsInstance(f1.widget, forms.TextInput)<|docstring|>formfield_overrides works for a custom field class.<|endoftext|>
ce01582fc2400a3fa6e9749b69c045dc656e44cc34038f8654db1df70d6df09f
def test_m2m_widgets(self): 'm2m fields help text as it applies to admin app (#9321).' class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual(f.help_text, 'Hold down "Control", or "Command" on a Mac, to select more than one.')
m2m fields help text as it applies to admin app (#9321).
tests/admin_widgets/tests.py
test_m2m_widgets
August1996/Django
4
python
def test_m2m_widgets(self): class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual(f.help_text, 'Hold down "Control", or "Command" on a Mac, to select more than one.')
def test_m2m_widgets(self): class AdvisorAdmin(admin.ModelAdmin): filter_vertical = ['companies'] self.assertFormfield(Advisor, 'companies', widgets.FilteredSelectMultiple, filter_vertical=['companies']) ma = AdvisorAdmin(Advisor, admin.site) f = ma.formfield_for_dbfield(Advisor._meta.get_field('companies'), request=None) self.assertEqual(f.help_text, 'Hold down "Control", or "Command" on a Mac, to select more than one.')<|docstring|>m2m fields help text as it applies to admin app (#9321).<|endoftext|>
4c2c5f1e5d72a83396caec75eb46b69b6fb69a53a275057e08ed3fec6cc770bf
def test_filter_choices_by_request_user(self): '\n Ensure the user can only see their own cars in the foreign key dropdown.\n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, 'BMW M3') self.assertContains(response, 'Volkswagen Passat')
Ensure the user can only see their own cars in the foreign key dropdown.
tests/admin_widgets/tests.py
test_filter_choices_by_request_user
August1996/Django
4
python
def test_filter_choices_by_request_user(self): '\n \n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, 'BMW M3') self.assertContains(response, 'Volkswagen Passat')
def test_filter_choices_by_request_user(self): '\n \n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_cartire_add')) self.assertNotContains(response, 'BMW M3') self.assertContains(response, 'Volkswagen Passat')<|docstring|>Ensure the user can only see their own cars in the foreign key dropdown.<|endoftext|>
d41f4f0e6ad6e78f8dbaf35ab815f21d57883cd1a8f99a6cd1b63a5d75809aae
def test_render_quoting(self): "\n WARNING: This test doesn't use assertHTMLEqual since it will get rid\n of some escapes which are tested here!\n " HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;')
WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here!
tests/admin_widgets/tests.py
test_render_quoting
August1996/Django
4
python
def test_render_quoting(self): "\n WARNING: This test doesn't use assertHTMLEqual since it will get rid\n of some escapes which are tested here!\n " HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;')
def test_render_quoting(self): "\n WARNING: This test doesn't use assertHTMLEqual since it will get rid\n of some escapes which are tested here!\n " HREF_RE = re.compile('href="([^"]+)"') VALUE_RE = re.compile('value="([^"]+)"') TEXT_RE = re.compile('<a[^>]+>([^>]+)</a>') w = widgets.AdminURLFieldWidget() output = w.render('test', 'http://example.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://example.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://example-äüö.com/<sometag>some text</sometag>') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://xn--example--7za4pnc.com/%3Csometag%3Esome%20text%3C/sometag%3E') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://example-äüö.com/&lt;sometag&gt;some text&lt;/sometag&gt;') output = w.render('test', 'http://www.example.com/%C3%A4"><script>alert("XSS!")</script>"') self.assertEqual(HREF_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4%22%3E%3Cscript%3Ealert(%22XSS!%22)%3C/script%3E%22') self.assertEqual(TEXT_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;') self.assertEqual(VALUE_RE.search(output).groups()[0], 'http://www.example.com/%C3%A4&quot;&gt;&lt;script&gt;alert(&quot;XSS!&quot;)&lt;/script&gt;&quot;')<|docstring|>WARNING: This test doesn't use assertHTMLEqual since it will get rid of some escapes which are tested here!<|endoftext|>
98027c7853538435e70a1f628724cc463109c0f1e7d30a2c22ba64df8e5b80b1
def test_readonly_fields(self): '\n File widgets should render as a link when they\'re marked "read only."\n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains(response, ('<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">albums\\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url('')}), html=True) self.assertNotContains(response, '<input type="file" name="cover_art" id="id_cover_art" />', html=True) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains(response, '<div class="readonly"></div>', html=True)
File widgets should render as a link when they're marked "read only."
tests/admin_widgets/tests.py
test_readonly_fields
August1996/Django
4
python
def test_readonly_fields(self): '\n File widgets should render as a link when they\'re marked "read only."\n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains(response, ('<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">albums\\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url()}), html=True) self.assertNotContains(response, '<input type="file" name="cover_art" id="id_cover_art" />', html=True) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains(response, '<div class="readonly"></div>', html=True)
def test_readonly_fields(self): '\n File widgets should render as a link when they\'re marked "read only."\n ' self.client.force_login(self.superuser) response = self.client.get(reverse('admin:admin_widgets_album_change', args=(self.album.id,))) self.assertContains(response, ('<div class="readonly"><a href="%(STORAGE_URL)salbums/hybrid_theory.jpg">albums\\hybrid_theory.jpg</a></div>' % {'STORAGE_URL': default_storage.url()}), html=True) self.assertNotContains(response, '<input type="file" name="cover_art" id="id_cover_art" />', html=True) response = self.client.get(reverse('admin:admin_widgets_album_add')) self.assertContains(response, '<div class="readonly"></div>', html=True)<|docstring|>File widgets should render as a link when they're marked "read only."<|endoftext|>
7b27d248f3882e09d4520c9dee350885b4caf5c4a32a9a780db57c52661a223b
def test_show_hide_date_time_picker_widgets(self): '\n Pressing the ESC key or clicking on a widget value closes the date and\n time picker widgets.\n ' from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) cal_icon = self.selenium.find_element_by_id('calendarlink0') self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'block') self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), (datetime.today().strftime('%Y-%m-') + '15')) time_icon = self.selenium.find_element_by_id('clocklink0') self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'block') self.assertEqual([x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']) self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00')
Pressing the ESC key or clicking on a widget value closes the date and time picker widgets.
tests/admin_widgets/tests.py
test_show_hide_date_time_picker_widgets
August1996/Django
4
python
def test_show_hide_date_time_picker_widgets(self): '\n Pressing the ESC key or clicking on a widget value closes the date and\n time picker widgets.\n ' from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) cal_icon = self.selenium.find_element_by_id('calendarlink0') self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'block') self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), (datetime.today().strftime('%Y-%m-') + '15')) time_icon = self.selenium.find_element_by_id('clocklink0') self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'block') self.assertEqual([x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']) self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00')
def test_show_hide_date_time_picker_widgets(self): '\n Pressing the ESC key or clicking on a widget value closes the date and\n time picker widgets.\n ' from selenium.webdriver.common.keys import Keys self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) cal_icon = self.selenium.find_element_by_id('calendarlink0') self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'block') self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') cal_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), '15')]").click() self.assertEqual(self.get_css_value('#calendarbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_0').get_attribute('value'), (datetime.today().strftime('%Y-%m-') + '15')) time_icon = self.selenium.find_element_by_id('clocklink0') self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'block') self.assertEqual([x.text for x in self.selenium.find_elements_by_xpath("//ul[@class='timelist']/li/a")], ['Now', 'Midnight', '6 a.m.', 'Noon', '6 p.m.']) self.selenium.find_element_by_tag_name('body').send_keys([Keys.ESCAPE]) self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') time_icon.click() self.selenium.find_element_by_xpath("//a[contains(text(), 'Noon')]").click() self.assertEqual(self.get_css_value('#clockbox0', 'display'), 'none') self.assertEqual(self.selenium.find_element_by_id('id_birthdate_1').get_attribute('value'), '12:00:00')<|docstring|>Pressing the ESC key or clicking on a widget value closes the date and time picker widgets.<|endoftext|>
678847be172986bc14fab2aa71b9d8e06c180190cac0b92c3b3c0748e2480a1e
def test_calendar_nonday_class(self): '\n Ensure cells that are not days of the month have the `nonday` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') for td in (tds[:6] + tds[(- 6):]): self.assertEqual(td.get_attribute('class'), 'nonday')
Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574.
tests/admin_widgets/tests.py
test_calendar_nonday_class
August1996/Django
4
python
def test_calendar_nonday_class(self): '\n Ensure cells that are not days of the month have the `nonday` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') for td in (tds[:6] + tds[(- 6):]): self.assertEqual(td.get_attribute('class'), 'nonday')
def test_calendar_nonday_class(self): '\n Ensure cells that are not days of the month have the `nonday` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') for td in (tds[:6] + tds[(- 6):]): self.assertEqual(td.get_attribute('class'), 'nonday')<|docstring|>Ensure cells that are not days of the month have the `nonday` CSS class. Refs #4574.<|endoftext|>
86fab0277d387d71d580025e1d28f12249ad4cac96eaeeebdaada909b4815a8a
def test_calendar_selected_class(self): '\n Ensure cell for the day in the input has the `selected` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1')
Ensure cell for the day in the input has the `selected` CSS class. Refs #4574.
tests/admin_widgets/tests.py
test_calendar_selected_class
August1996/Django
4
python
def test_calendar_selected_class(self): '\n Ensure cell for the day in the input has the `selected` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1')
def test_calendar_selected_class(self): '\n Ensure cell for the day in the input has the `selected` CSS class.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_birthdate_0').send_keys('2013-06-01') self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = tds[6] self.assertEqual(selected.get_attribute('class'), 'selected') self.assertEqual(selected.text, '1')<|docstring|>Ensure cell for the day in the input has the `selected` CSS class. Refs #4574.<|endoftext|>
eda7ed5541b3213d699ed2879107bc6bbb30e48dccc758dd0afd7100b011e86b
def test_calendar_no_selected_class(self): '\n Ensure no cells are given the selected class when the field is empty.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = [td for td in tds if (td.get_attribute('class') == 'selected')] self.assertEqual(len(selected), 0)
Ensure no cells are given the selected class when the field is empty. Refs #4574.
tests/admin_widgets/tests.py
test_calendar_no_selected_class
August1996/Django
4
python
def test_calendar_no_selected_class(self): '\n Ensure no cells are given the selected class when the field is empty.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = [td for td in tds if (td.get_attribute('class') == 'selected')] self.assertEqual(len(selected), 0)
def test_calendar_no_selected_class(self): '\n Ensure no cells are given the selected class when the field is empty.\n Refs #4574.\n ' self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('calendarlink0').click() calendar0 = self.selenium.find_element_by_id('calendarin0') tds = calendar0.find_elements_by_tag_name('td') selected = [td for td in tds if (td.get_attribute('class') == 'selected')] self.assertEqual(len(selected), 0)<|docstring|>Ensure no cells are given the selected class when the field is empty. Refs #4574.<|endoftext|>
6db7d3cfb5006a908c70f87cbc99f6bf79281d287cbe20e319e00d3248d8574a
def test_calendar_show_date_from_input(self): '\n The calendar shows the date from the input field for every locale\n supported by Django.\n ' self.admin_login(username='super', password='secret', login_url='/') member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for (language_code, language_name) in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except IOError: continue if (month_string in catalog._catalog): month_name = catalog._catalog[month_string] else: month_name = month_string may_translation = month_name expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984) with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get((self.live_server_url + url)) self.selenium.find_element_by_id('calendarlink0').click() self.wait_for_text('#calendarin0 caption', expected_caption)
The calendar shows the date from the input field for every locale supported by Django.
tests/admin_widgets/tests.py
test_calendar_show_date_from_input
August1996/Django
4
python
def test_calendar_show_date_from_input(self): '\n The calendar shows the date from the input field for every locale\n supported by Django.\n ' self.admin_login(username='super', password='secret', login_url='/') member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for (language_code, language_name) in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except IOError: continue if (month_string in catalog._catalog): month_name = catalog._catalog[month_string] else: month_name = month_string may_translation = month_name expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984) with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get((self.live_server_url + url)) self.selenium.find_element_by_id('calendarlink0').click() self.wait_for_text('#calendarin0 caption', expected_caption)
def test_calendar_show_date_from_input(self): '\n The calendar shows the date from the input field for every locale\n supported by Django.\n ' self.admin_login(username='super', password='secret', login_url='/') member = Member.objects.create(name='Bob', birthdate=datetime(1984, 5, 15), gender='M') month_string = 'May' path = os.path.join(os.path.dirname(import_module('django.contrib.admin').__file__), 'locale') for (language_code, language_name) in settings.LANGUAGES: try: catalog = gettext.translation('djangojs', path, [language_code]) except IOError: continue if (month_string in catalog._catalog): month_name = catalog._catalog[month_string] else: month_name = month_string may_translation = month_name expected_caption = '{0:s} {1:d}'.format(may_translation.upper(), 1984) with override_settings(LANGUAGE_CODE=language_code, USE_L10N=True): url = reverse('admin:admin_widgets_member_change', args=(member.pk,)) self.selenium.get((self.live_server_url + url)) self.selenium.find_element_by_id('calendarlink0').click() self.wait_for_text('#calendarin0 caption', expected_caption)<|docstring|>The calendar shows the date from the input field for every locale supported by Django.<|endoftext|>
b7395bbf757c23747eededea62af7126e46ea09874f2892e17f7e62fc131b549
def test_date_time_picker_shortcuts(self): '\n date/time/datetime picker shortcuts work in the current time zone.\n Refs #20663.\n\n This test case is fairly tricky, it relies on selenium still running the browser\n in the default time zone "America/Chicago" despite `override_settings` changing\n the time zone to "Asia/Singapore".\n ' self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if (tz_yesterday != tz_tomorrow): error_margin += timedelta(hours=1) now = datetime.now() self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_name').send_keys('test') shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') self.selenium.find_element_by_tag_name('form').submit() self.wait_page_loaded() member = Member.objects.get(name='test') self.assertGreater(member.birthdate, (now - error_margin)) self.assertLess(member.birthdate, (now + error_margin))
date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore".
tests/admin_widgets/tests.py
test_date_time_picker_shortcuts
August1996/Django
4
python
def test_date_time_picker_shortcuts(self): '\n date/time/datetime picker shortcuts work in the current time zone.\n Refs #20663.\n\n This test case is fairly tricky, it relies on selenium still running the browser\n in the default time zone "America/Chicago" despite `override_settings` changing\n the time zone to "Asia/Singapore".\n ' self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if (tz_yesterday != tz_tomorrow): error_margin += timedelta(hours=1) now = datetime.now() self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_name').send_keys('test') shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') self.selenium.find_element_by_tag_name('form').submit() self.wait_page_loaded() member = Member.objects.get(name='test') self.assertGreater(member.birthdate, (now - error_margin)) self.assertLess(member.birthdate, (now + error_margin))
def test_date_time_picker_shortcuts(self): '\n date/time/datetime picker shortcuts work in the current time zone.\n Refs #20663.\n\n This test case is fairly tricky, it relies on selenium still running the browser\n in the default time zone "America/Chicago" despite `override_settings` changing\n the time zone to "Asia/Singapore".\n ' self.admin_login(username='super', password='secret', login_url='/') error_margin = timedelta(seconds=10) tz = pytz.timezone('America/Chicago') utc_now = datetime.now(pytz.utc) tz_yesterday = (utc_now - timedelta(days=1)).astimezone(tz).tzname() tz_tomorrow = (utc_now + timedelta(days=1)).astimezone(tz).tzname() if (tz_yesterday != tz_tomorrow): error_margin += timedelta(hours=1) now = datetime.now() self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_member_add'))) self.selenium.find_element_by_id('id_name').send_keys('test') shortcuts = self.selenium.find_elements_by_css_selector('.field-birthdate .datetimeshortcuts') for shortcut in shortcuts: shortcut.find_element_by_tag_name('a').click() self.selenium.find_elements_by_css_selector('.field-birthdate .timezonewarning') self.selenium.find_element_by_tag_name('form').submit() self.wait_page_loaded() member = Member.objects.get(name='test') self.assertGreater(member.birthdate, (now - error_margin)) self.assertLess(member.birthdate, (now + error_margin))<|docstring|>date/time/datetime picker shortcuts work in the current time zone. Refs #20663. This test case is fairly tricky, it relies on selenium still running the browser in the default time zone "America/Chicago" despite `override_settings` changing the time zone to "Asia/Singapore".<|endoftext|>
e24e1a082104cb0374061a2bc44335209d7f5b2d3f797dcd0472198db74d3d38
def test_filter(self): "\n Typing in the search box filters out options displayed in the 'from'\n box.\n " from selenium.webdriver.common.keys import Keys self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))) for field_name in ['students', 'alumni']: from_box = ('#id_%s_from' % field_name) to_box = ('#id_%s_to' % field_name) choose_link = ('id_%s_add_link' % field_name) remove_link = ('id_%s_remove_link' % field_name) input = self.selenium.find_element_by_id(('id_%s_input' % field_name)) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.get_select_option(from_box, str(self.jason.id)).click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.lisa.id)).click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.jason.id)).click() self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_loaded() self.school = School.objects.get(id=self.school.id) self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
Typing in the search box filters out options displayed in the 'from' box.
tests/admin_widgets/tests.py
test_filter
August1996/Django
4
python
def test_filter(self): "\n Typing in the search box filters out options displayed in the 'from'\n box.\n " from selenium.webdriver.common.keys import Keys self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))) for field_name in ['students', 'alumni']: from_box = ('#id_%s_from' % field_name) to_box = ('#id_%s_to' % field_name) choose_link = ('id_%s_add_link' % field_name) remove_link = ('id_%s_remove_link' % field_name) input = self.selenium.find_element_by_id(('id_%s_input' % field_name)) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.get_select_option(from_box, str(self.jason.id)).click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.lisa.id)).click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.jason.id)).click() self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_loaded() self.school = School.objects.get(id=self.school.id) self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])
def test_filter(self): "\n Typing in the search box filters out options displayed in the 'from'\n box.\n " from selenium.webdriver.common.keys import Keys self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') self.selenium.get((self.live_server_url + reverse('admin:admin_widgets_school_change', args=(self.school.id,)))) for field_name in ['students', 'alumni']: from_box = ('#id_%s_from' % field_name) to_box = ('#id_%s_to' % field_name) choose_link = ('id_%s_add_link' % field_name) remove_link = ('id_%s_remove_link' % field_name) input = self.selenium.find_element_by_id(('id_%s_input' % field_name)) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys('R') self.assertSelectOptions(from_box, [str(self.arthur.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)]) input.send_keys('a') self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.jason.id)]) self.get_select_option(from_box, str(self.jason.id)).click() self.selenium.find_element_by_id(choose_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id)]) self.assertSelectOptions(to_box, [str(self.lisa.id), str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.lisa.id)).click() self.selenium.find_element_by_id(remove_link).click() self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE]) self.assertSelectOptions(from_box, [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jenny.id), str(self.john.id), str(self.lisa.id)]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) self.get_select_option(to_box, str(self.jason.id)).click() self.selenium.find_element_by_id(remove_link).click() input.send_keys('ja') self.assertSelectOptions(from_box, [str(self.jason.id)]) input.send_keys([Keys.ENTER]) self.assertSelectOptions(to_box, [str(self.peter.id), str(self.jason.id)]) input.send_keys([Keys.BACK_SPACE, Keys.BACK_SPACE]) self.selenium.find_element_by_xpath('//input[@value="Save"]').click() self.wait_page_loaded() self.school = School.objects.get(id=self.school.id) self.assertEqual(list(self.school.students.all()), [self.jason, self.peter]) self.assertEqual(list(self.school.alumni.all()), [self.jason, self.peter])<|docstring|>Typing in the search box filters out options displayed in the 'from' box.<|endoftext|>
f5ec85553f5dcff01269c6043ebeafd7a621e16571c0b33937a68cd0af768a6f
def test_back_button_bug(self): "\n Some browsers had a bug where navigating away from the change page\n and then clicking the browser's back button would clear the\n filter_horizontal/filter_vertical widgets (#13614).\n " self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values)
Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614).
tests/admin_widgets/tests.py
test_back_button_bug
August1996/Django
4
python
def test_back_button_bug(self): "\n Some browsers had a bug where navigating away from the change page\n and then clicking the browser's back button would clear the\n filter_horizontal/filter_vertical widgets (#13614).\n " self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values)
def test_back_button_bug(self): "\n Some browsers had a bug where navigating away from the change page\n and then clicking the browser's back button would clear the\n filter_horizontal/filter_vertical widgets (#13614).\n " self.school.students.set([self.lisa, self.peter]) self.school.alumni.set([self.lisa, self.peter]) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) self.selenium.find_element_by_link_text('Home').click() self.selenium.back() expected_unselected_values = [str(self.arthur.id), str(self.bob.id), str(self.cliff.id), str(self.jason.id), str(self.jenny.id), str(self.john.id)] expected_selected_values = [str(self.lisa.id), str(self.peter.id)] self.assertSelectOptions('#id_students_from', expected_unselected_values) self.assertSelectOptions('#id_students_to', expected_selected_values) self.assertSelectOptions('#id_alumni_from', expected_unselected_values) self.assertSelectOptions('#id_alumni_to', expected_selected_values)<|docstring|>Some browsers had a bug where navigating away from the change page and then clicking the browser's back button would clear the filter_horizontal/filter_vertical widgets (#13614).<|endoftext|>
98ad3b0a881285423c6b0c65acdbf4de13d54bf21b259c9f5e5f4c67a962025e
def test_refresh_page(self): '\n Horizontal and vertical filter widgets keep selected options on page\n reload (#22955).\n ' self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) self.selenium.execute_script('location.reload()') self.wait_page_loaded() options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2)
Horizontal and vertical filter widgets keep selected options on page reload (#22955).
tests/admin_widgets/tests.py
test_refresh_page
August1996/Django
4
python
def test_refresh_page(self): '\n Horizontal and vertical filter widgets keep selected options on page\n reload (#22955).\n ' self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) self.selenium.execute_script('location.reload()') self.wait_page_loaded() options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2)
def test_refresh_page(self): '\n Horizontal and vertical filter widgets keep selected options on page\n reload (#22955).\n ' self.school.students.add(self.arthur, self.jason) self.school.alumni.add(self.arthur, self.jason) self.admin_login(username='super', password='secret', login_url='/') change_url = reverse('admin:admin_widgets_school_change', args=(self.school.id,)) self.selenium.get((self.live_server_url + change_url)) options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2) self.selenium.execute_script('location.reload()') self.wait_page_loaded() options_len = len(self.selenium.find_elements_by_css_selector('#id_students_to > option')) self.assertEqual(options_len, 2)<|docstring|>Horizontal and vertical filter widgets keep selected options on page reload (#22955).<|endoftext|>
16b238a9f236575d4012859ff8f5f270e010df99186078826b413a78edaf6d87
def process_worker(call_queue, result_queue): "\n A copy of Python's process_worker function in :class:`concurrent.futures.ProcessPoolExecutor`.\n \n This copy was changed to not die on KeyboardInterrupt, but to exit gracefully.\n Also, no traceback is printed upon :class:`NoKnownPathwaysError` or :class:`CancelledError`.\n \n Note\n ----\n Copyright © 2001-2018 Python Software Foundation; All Rights Reserved\n " while True: try: call_item = call_queue.get(block=True) if (call_item is None): result_queue.put(os.getpid()) return except KeyboardInterrupt as e: sys.exit() try: r = call_item.fn(*call_item.args, **call_item.kwargs) except BaseException as e: from FEV_KEGG.KEGG.Database import NoKnownPathwaysError if (isinstance(e, NoKnownPathwaysError) or isinstance(e, concurrent.futures._base.CancelledError)): pass elif isinstance(e, Exception): traceback.print_exc() result_queue.put(_ResultItem(call_item.work_id, exception=e)) else: result_queue.put(_ResultItem(call_item.work_id, result=r))
A copy of Python's process_worker function in :class:`concurrent.futures.ProcessPoolExecutor`. This copy was changed to not die on KeyboardInterrupt, but to exit gracefully. Also, no traceback is printed upon :class:`NoKnownPathwaysError` or :class:`CancelledError`. Note ---- Copyright © 2001-2018 Python Software Foundation; All Rights Reserved
FEV_KEGG/lib/Python/concurrent/futures.py
process_worker
ryhaberecht/FEV-KEGG
0
python
def process_worker(call_queue, result_queue): "\n A copy of Python's process_worker function in :class:`concurrent.futures.ProcessPoolExecutor`.\n \n This copy was changed to not die on KeyboardInterrupt, but to exit gracefully.\n Also, no traceback is printed upon :class:`NoKnownPathwaysError` or :class:`CancelledError`.\n \n Note\n ----\n Copyright © 2001-2018 Python Software Foundation; All Rights Reserved\n " while True: try: call_item = call_queue.get(block=True) if (call_item is None): result_queue.put(os.getpid()) return except KeyboardInterrupt as e: sys.exit() try: r = call_item.fn(*call_item.args, **call_item.kwargs) except BaseException as e: from FEV_KEGG.KEGG.Database import NoKnownPathwaysError if (isinstance(e, NoKnownPathwaysError) or isinstance(e, concurrent.futures._base.CancelledError)): pass elif isinstance(e, Exception): traceback.print_exc() result_queue.put(_ResultItem(call_item.work_id, exception=e)) else: result_queue.put(_ResultItem(call_item.work_id, result=r))
def process_worker(call_queue, result_queue): "\n A copy of Python's process_worker function in :class:`concurrent.futures.ProcessPoolExecutor`.\n \n This copy was changed to not die on KeyboardInterrupt, but to exit gracefully.\n Also, no traceback is printed upon :class:`NoKnownPathwaysError` or :class:`CancelledError`.\n \n Note\n ----\n Copyright © 2001-2018 Python Software Foundation; All Rights Reserved\n " while True: try: call_item = call_queue.get(block=True) if (call_item is None): result_queue.put(os.getpid()) return except KeyboardInterrupt as e: sys.exit() try: r = call_item.fn(*call_item.args, **call_item.kwargs) except BaseException as e: from FEV_KEGG.KEGG.Database import NoKnownPathwaysError if (isinstance(e, NoKnownPathwaysError) or isinstance(e, concurrent.futures._base.CancelledError)): pass elif isinstance(e, Exception): traceback.print_exc() result_queue.put(_ResultItem(call_item.work_id, exception=e)) else: result_queue.put(_ResultItem(call_item.work_id, result=r))<|docstring|>A copy of Python's process_worker function in :class:`concurrent.futures.ProcessPoolExecutor`. This copy was changed to not die on KeyboardInterrupt, but to exit gracefully. Also, no traceback is printed upon :class:`NoKnownPathwaysError` or :class:`CancelledError`. Note ---- Copyright © 2001-2018 Python Software Foundation; All Rights Reserved<|endoftext|>
72a6aeed71e4af7a96ddaf0412dda4c0316c3d57eccd66ad6bbdefadb6a9617e
def forward(self, logits, lens): '\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() alpha = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) alpha[(:, self.start_idx)] = 0 alpha = Variable(alpha) c_lens = lens.clone() logits_t = logits.transpose(1, 0) for logit in logits_t: logit_exp = logit.unsqueeze((- 1)).expand(batch_size, *self.transitions.size()) alpha_exp = alpha.unsqueeze(1).expand(batch_size, *self.transitions.size()) trans_exp = self.transitions.unsqueeze(0).expand_as(alpha_exp) mat = ((trans_exp + alpha_exp) + logit_exp) alpha_nxt = log_sum_exp(mat, 2).squeeze((- 1)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(alpha) alpha = ((mask * alpha_nxt) + ((1 - mask) * alpha)) c_lens = (c_lens - 1) alpha = (alpha + self.transitions[self.stop_idx].unsqueeze(0).expand_as(alpha)) norm = log_sum_exp(alpha, 1).squeeze((- 1)) return norm
Arguments: logits: [batch_size, seq_len, n_labels] FloatTensor lens: [batch_size] LongTensor
model.py
forward
kaniblu/pytorch-bilstmcrf
124
python
def forward(self, logits, lens): '\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() alpha = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) alpha[(:, self.start_idx)] = 0 alpha = Variable(alpha) c_lens = lens.clone() logits_t = logits.transpose(1, 0) for logit in logits_t: logit_exp = logit.unsqueeze((- 1)).expand(batch_size, *self.transitions.size()) alpha_exp = alpha.unsqueeze(1).expand(batch_size, *self.transitions.size()) trans_exp = self.transitions.unsqueeze(0).expand_as(alpha_exp) mat = ((trans_exp + alpha_exp) + logit_exp) alpha_nxt = log_sum_exp(mat, 2).squeeze((- 1)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(alpha) alpha = ((mask * alpha_nxt) + ((1 - mask) * alpha)) c_lens = (c_lens - 1) alpha = (alpha + self.transitions[self.stop_idx].unsqueeze(0).expand_as(alpha)) norm = log_sum_exp(alpha, 1).squeeze((- 1)) return norm
def forward(self, logits, lens): '\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() alpha = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) alpha[(:, self.start_idx)] = 0 alpha = Variable(alpha) c_lens = lens.clone() logits_t = logits.transpose(1, 0) for logit in logits_t: logit_exp = logit.unsqueeze((- 1)).expand(batch_size, *self.transitions.size()) alpha_exp = alpha.unsqueeze(1).expand(batch_size, *self.transitions.size()) trans_exp = self.transitions.unsqueeze(0).expand_as(alpha_exp) mat = ((trans_exp + alpha_exp) + logit_exp) alpha_nxt = log_sum_exp(mat, 2).squeeze((- 1)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(alpha) alpha = ((mask * alpha_nxt) + ((1 - mask) * alpha)) c_lens = (c_lens - 1) alpha = (alpha + self.transitions[self.stop_idx].unsqueeze(0).expand_as(alpha)) norm = log_sum_exp(alpha, 1).squeeze((- 1)) return norm<|docstring|>Arguments: logits: [batch_size, seq_len, n_labels] FloatTensor lens: [batch_size] LongTensor<|endoftext|>
7311746be54d07ace998b58c8a39e5d203d54ea5aa3ccfc5e3e5ecd01ec07b28
def viterbi_decode(self, logits, lens): 'Borrowed from pytorch tutorial\n\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() vit = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) vit[(:, self.start_idx)] = 0 vit = Variable(vit) c_lens = lens.clone() logits_t = logits.transpose(1, 0) pointers = [] for logit in logits_t: vit_exp = vit.unsqueeze(1).expand(batch_size, n_labels, n_labels) trn_exp = self.transitions.unsqueeze(0).expand_as(vit_exp) vit_trn_sum = (vit_exp + trn_exp) (vt_max, vt_argmax) = vit_trn_sum.max(2) vt_max = vt_max.squeeze((- 1)) vit_nxt = (vt_max + logit) pointers.append(vt_argmax.squeeze((- 1)).unsqueeze(0)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(vit_nxt) vit = ((mask * vit_nxt) + ((1 - mask) * vit)) mask = (c_lens == 1).float().unsqueeze((- 1)).expand_as(vit_nxt) vit += (mask * self.transitions[self.stop_idx].unsqueeze(0).expand_as(vit_nxt)) c_lens = (c_lens - 1) pointers = torch.cat(pointers) (scores, idx) = vit.max(1) idx = idx.squeeze((- 1)) paths = [idx.unsqueeze(1)] for argmax in reversed(pointers): idx_exp = idx.unsqueeze((- 1)) idx = torch.gather(argmax, 1, idx_exp) idx = idx.squeeze((- 1)) paths.insert(0, idx.unsqueeze(1)) paths = torch.cat(paths[1:], 1) scores = scores.squeeze((- 1)) return (scores, paths)
Borrowed from pytorch tutorial Arguments: logits: [batch_size, seq_len, n_labels] FloatTensor lens: [batch_size] LongTensor
model.py
viterbi_decode
kaniblu/pytorch-bilstmcrf
124
python
def viterbi_decode(self, logits, lens): 'Borrowed from pytorch tutorial\n\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() vit = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) vit[(:, self.start_idx)] = 0 vit = Variable(vit) c_lens = lens.clone() logits_t = logits.transpose(1, 0) pointers = [] for logit in logits_t: vit_exp = vit.unsqueeze(1).expand(batch_size, n_labels, n_labels) trn_exp = self.transitions.unsqueeze(0).expand_as(vit_exp) vit_trn_sum = (vit_exp + trn_exp) (vt_max, vt_argmax) = vit_trn_sum.max(2) vt_max = vt_max.squeeze((- 1)) vit_nxt = (vt_max + logit) pointers.append(vt_argmax.squeeze((- 1)).unsqueeze(0)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(vit_nxt) vit = ((mask * vit_nxt) + ((1 - mask) * vit)) mask = (c_lens == 1).float().unsqueeze((- 1)).expand_as(vit_nxt) vit += (mask * self.transitions[self.stop_idx].unsqueeze(0).expand_as(vit_nxt)) c_lens = (c_lens - 1) pointers = torch.cat(pointers) (scores, idx) = vit.max(1) idx = idx.squeeze((- 1)) paths = [idx.unsqueeze(1)] for argmax in reversed(pointers): idx_exp = idx.unsqueeze((- 1)) idx = torch.gather(argmax, 1, idx_exp) idx = idx.squeeze((- 1)) paths.insert(0, idx.unsqueeze(1)) paths = torch.cat(paths[1:], 1) scores = scores.squeeze((- 1)) return (scores, paths)
def viterbi_decode(self, logits, lens): 'Borrowed from pytorch tutorial\n\n Arguments:\n logits: [batch_size, seq_len, n_labels] FloatTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len, n_labels) = logits.size() vit = logits.data.new(batch_size, self.n_labels).fill_((- 10000)) vit[(:, self.start_idx)] = 0 vit = Variable(vit) c_lens = lens.clone() logits_t = logits.transpose(1, 0) pointers = [] for logit in logits_t: vit_exp = vit.unsqueeze(1).expand(batch_size, n_labels, n_labels) trn_exp = self.transitions.unsqueeze(0).expand_as(vit_exp) vit_trn_sum = (vit_exp + trn_exp) (vt_max, vt_argmax) = vit_trn_sum.max(2) vt_max = vt_max.squeeze((- 1)) vit_nxt = (vt_max + logit) pointers.append(vt_argmax.squeeze((- 1)).unsqueeze(0)) mask = (c_lens > 0).float().unsqueeze((- 1)).expand_as(vit_nxt) vit = ((mask * vit_nxt) + ((1 - mask) * vit)) mask = (c_lens == 1).float().unsqueeze((- 1)).expand_as(vit_nxt) vit += (mask * self.transitions[self.stop_idx].unsqueeze(0).expand_as(vit_nxt)) c_lens = (c_lens - 1) pointers = torch.cat(pointers) (scores, idx) = vit.max(1) idx = idx.squeeze((- 1)) paths = [idx.unsqueeze(1)] for argmax in reversed(pointers): idx_exp = idx.unsqueeze((- 1)) idx = torch.gather(argmax, 1, idx_exp) idx = idx.squeeze((- 1)) paths.insert(0, idx.unsqueeze(1)) paths = torch.cat(paths[1:], 1) scores = scores.squeeze((- 1)) return (scores, paths)<|docstring|>Borrowed from pytorch tutorial Arguments: logits: [batch_size, seq_len, n_labels] FloatTensor lens: [batch_size] LongTensor<|endoftext|>
f0f1be5a1a13814ab90cab68e2c8dcf500fa2100b586accad737ee69d61e5370
def transition_score(self, labels, lens): '\n Arguments:\n labels: [batch_size, seq_len] LongTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len) = labels.size() labels_ext = Variable(labels.data.new(batch_size, (seq_len + 2))) labels_ext[(:, 0)] = self.start_idx labels_ext[(:, 1:(- 1))] = labels mask = sequence_mask((lens + 1), max_len=(seq_len + 2)).long() pad_stop = Variable(labels.data.new(1).fill_(self.stop_idx)) pad_stop = pad_stop.unsqueeze((- 1)).expand(batch_size, (seq_len + 2)) labels_ext = (((1 - mask) * pad_stop) + (mask * labels_ext)) labels = labels_ext trn = self.transitions trn_exp = trn.unsqueeze(0).expand(batch_size, *trn.size()) lbl_r = labels[(:, 1:)] lbl_rexp = lbl_r.unsqueeze((- 1)).expand(*lbl_r.size(), trn.size(0)) trn_row = torch.gather(trn_exp, 1, lbl_rexp) lbl_lexp = labels[(:, :(- 1))].unsqueeze((- 1)) trn_scr = torch.gather(trn_row, 2, lbl_lexp) trn_scr = trn_scr.squeeze((- 1)) mask = sequence_mask((lens + 1)).float() trn_scr = (trn_scr * mask) score = trn_scr.sum(1).squeeze((- 1)) return score
Arguments: labels: [batch_size, seq_len] LongTensor lens: [batch_size] LongTensor
model.py
transition_score
kaniblu/pytorch-bilstmcrf
124
python
def transition_score(self, labels, lens): '\n Arguments:\n labels: [batch_size, seq_len] LongTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len) = labels.size() labels_ext = Variable(labels.data.new(batch_size, (seq_len + 2))) labels_ext[(:, 0)] = self.start_idx labels_ext[(:, 1:(- 1))] = labels mask = sequence_mask((lens + 1), max_len=(seq_len + 2)).long() pad_stop = Variable(labels.data.new(1).fill_(self.stop_idx)) pad_stop = pad_stop.unsqueeze((- 1)).expand(batch_size, (seq_len + 2)) labels_ext = (((1 - mask) * pad_stop) + (mask * labels_ext)) labels = labels_ext trn = self.transitions trn_exp = trn.unsqueeze(0).expand(batch_size, *trn.size()) lbl_r = labels[(:, 1:)] lbl_rexp = lbl_r.unsqueeze((- 1)).expand(*lbl_r.size(), trn.size(0)) trn_row = torch.gather(trn_exp, 1, lbl_rexp) lbl_lexp = labels[(:, :(- 1))].unsqueeze((- 1)) trn_scr = torch.gather(trn_row, 2, lbl_lexp) trn_scr = trn_scr.squeeze((- 1)) mask = sequence_mask((lens + 1)).float() trn_scr = (trn_scr * mask) score = trn_scr.sum(1).squeeze((- 1)) return score
def transition_score(self, labels, lens): '\n Arguments:\n labels: [batch_size, seq_len] LongTensor\n lens: [batch_size] LongTensor\n ' (batch_size, seq_len) = labels.size() labels_ext = Variable(labels.data.new(batch_size, (seq_len + 2))) labels_ext[(:, 0)] = self.start_idx labels_ext[(:, 1:(- 1))] = labels mask = sequence_mask((lens + 1), max_len=(seq_len + 2)).long() pad_stop = Variable(labels.data.new(1).fill_(self.stop_idx)) pad_stop = pad_stop.unsqueeze((- 1)).expand(batch_size, (seq_len + 2)) labels_ext = (((1 - mask) * pad_stop) + (mask * labels_ext)) labels = labels_ext trn = self.transitions trn_exp = trn.unsqueeze(0).expand(batch_size, *trn.size()) lbl_r = labels[(:, 1:)] lbl_rexp = lbl_r.unsqueeze((- 1)).expand(*lbl_r.size(), trn.size(0)) trn_row = torch.gather(trn_exp, 1, lbl_rexp) lbl_lexp = labels[(:, :(- 1))].unsqueeze((- 1)) trn_scr = torch.gather(trn_row, 2, lbl_lexp) trn_scr = trn_scr.squeeze((- 1)) mask = sequence_mask((lens + 1)).float() trn_scr = (trn_scr * mask) score = trn_scr.sum(1).squeeze((- 1)) return score<|docstring|>Arguments: labels: [batch_size, seq_len] LongTensor lens: [batch_size] LongTensor<|endoftext|>
75613a61e6afcab0b21c126b5137f04531838b417ab2fc37abaa1fe5b15a247b
def _embeddings(self, xs): 'Takes raw feature sequences and produces a single word embedding\n\n Arguments:\n xs: [n_feats, batch_size, seq_len] LongTensor\n\n Returns:\n [batch_size, seq_len, word_dim] FloatTensor \n ' (n_feats, batch_size, seq_len) = xs.size() assert (n_feats == self.n_feats) res = [emb(x) for (emb, x) in zip(self.embeddings, xs)] x = torch.cat(res, 2) return x
Takes raw feature sequences and produces a single word embedding Arguments: xs: [n_feats, batch_size, seq_len] LongTensor Returns: [batch_size, seq_len, word_dim] FloatTensor
model.py
_embeddings
kaniblu/pytorch-bilstmcrf
124
python
def _embeddings(self, xs): 'Takes raw feature sequences and produces a single word embedding\n\n Arguments:\n xs: [n_feats, batch_size, seq_len] LongTensor\n\n Returns:\n [batch_size, seq_len, word_dim] FloatTensor \n ' (n_feats, batch_size, seq_len) = xs.size() assert (n_feats == self.n_feats) res = [emb(x) for (emb, x) in zip(self.embeddings, xs)] x = torch.cat(res, 2) return x
def _embeddings(self, xs): 'Takes raw feature sequences and produces a single word embedding\n\n Arguments:\n xs: [n_feats, batch_size, seq_len] LongTensor\n\n Returns:\n [batch_size, seq_len, word_dim] FloatTensor \n ' (n_feats, batch_size, seq_len) = xs.size() assert (n_feats == self.n_feats) res = [emb(x) for (emb, x) in zip(self.embeddings, xs)] x = torch.cat(res, 2) return x<|docstring|>Takes raw feature sequences and produces a single word embedding Arguments: xs: [n_feats, batch_size, seq_len] LongTensor Returns: [batch_size, seq_len, word_dim] FloatTensor<|endoftext|>
97e70dc53d77c29af69eb1c3bd321b2530060947c98125a047ea1adf7298ddbe
@commands.command() async def avatar(self, ctx, *, avamember: discord.Member=None): 'istenilen kullanıcının profil resmini atar.' embed = discord.Embed(color=65450, timestamp=ctx.message.created_at, title=f'Avatar : {avamember}') embed.set_image(url='{}'.format(avamember.avatar_url)) embed.set_footer(text=f'{ctx.author} tarafından istendi') (await ctx.send(embed=embed))
istenilen kullanıcının profil resmini atar.
BOT-ALTYAPI-PYTHON/Dbot/cogs/avatar.py
avatar
Ufuxzk/DBOT
0
python
@commands.command() async def avatar(self, ctx, *, avamember: discord.Member=None): embed = discord.Embed(color=65450, timestamp=ctx.message.created_at, title=f'Avatar : {avamember}') embed.set_image(url='{}'.format(avamember.avatar_url)) embed.set_footer(text=f'{ctx.author} tarafından istendi') (await ctx.send(embed=embed))
@commands.command() async def avatar(self, ctx, *, avamember: discord.Member=None): embed = discord.Embed(color=65450, timestamp=ctx.message.created_at, title=f'Avatar : {avamember}') embed.set_image(url='{}'.format(avamember.avatar_url)) embed.set_footer(text=f'{ctx.author} tarafından istendi') (await ctx.send(embed=embed))<|docstring|>istenilen kullanıcının profil resmini atar.<|endoftext|>
564d8705583bb9c9c5d3a9178ddecd5d4fdf1f25799c42cbb181505340ed73c5
@commands.command() async def infouser(self, ctx, *, member: discord.Member=None): 'Etiketlenen kullanıcının bilgisini atar(etiket atmazsanız kendi bilginizi atar).' if (not member): member = ctx.message.author roles = [role for role in member.roles] embed = discord.Embed(colour=discord.Colour.green(), timestamp=ctx.message.created_at, title=f'Kullanıcı bilgisi - {member}') embed.set_thumbnail(url=member.avatar_url) embed.set_footer(text=f'{ctx.author} tarafından oluşturuldu.') embed.add_field(name='ID:', value=member.id) embed.add_field(name='Görünen adı:', value=member.display_name) embed.add_field(name='Hesap oluşturma tarihi:', value=member.created_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Sunucuya katılma tarihi:', value=member.joined_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Rolleri:', value=''.join([role.mention for role in member.roles[1:]])) embed.add_field(name='En yüksek rolü:', value=member.top_role.mention) print(member.top_role.mention) (await ctx.send(embed=embed))
Etiketlenen kullanıcının bilgisini atar(etiket atmazsanız kendi bilginizi atar).
BOT-ALTYAPI-PYTHON/Dbot/cogs/avatar.py
infouser
Ufuxzk/DBOT
0
python
@commands.command() async def infouser(self, ctx, *, member: discord.Member=None): if (not member): member = ctx.message.author roles = [role for role in member.roles] embed = discord.Embed(colour=discord.Colour.green(), timestamp=ctx.message.created_at, title=f'Kullanıcı bilgisi - {member}') embed.set_thumbnail(url=member.avatar_url) embed.set_footer(text=f'{ctx.author} tarafından oluşturuldu.') embed.add_field(name='ID:', value=member.id) embed.add_field(name='Görünen adı:', value=member.display_name) embed.add_field(name='Hesap oluşturma tarihi:', value=member.created_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Sunucuya katılma tarihi:', value=member.joined_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Rolleri:', value=.join([role.mention for role in member.roles[1:]])) embed.add_field(name='En yüksek rolü:', value=member.top_role.mention) print(member.top_role.mention) (await ctx.send(embed=embed))
@commands.command() async def infouser(self, ctx, *, member: discord.Member=None): if (not member): member = ctx.message.author roles = [role for role in member.roles] embed = discord.Embed(colour=discord.Colour.green(), timestamp=ctx.message.created_at, title=f'Kullanıcı bilgisi - {member}') embed.set_thumbnail(url=member.avatar_url) embed.set_footer(text=f'{ctx.author} tarafından oluşturuldu.') embed.add_field(name='ID:', value=member.id) embed.add_field(name='Görünen adı:', value=member.display_name) embed.add_field(name='Hesap oluşturma tarihi:', value=member.created_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Sunucuya katılma tarihi:', value=member.joined_at.strftime('%a, %#d %B %Y, %I:%M %p UTC')) embed.add_field(name='Rolleri:', value=.join([role.mention for role in member.roles[1:]])) embed.add_field(name='En yüksek rolü:', value=member.top_role.mention) print(member.top_role.mention) (await ctx.send(embed=embed))<|docstring|>Etiketlenen kullanıcının bilgisini atar(etiket atmazsanız kendi bilginizi atar).<|endoftext|>
7b2b1447eec6eafca08bc26d45489e82a6cedc64c93b16c6b6b0f84014471b59
@commands.command() @commands.has_permissions(manage_messages=True) async def cls(self, ctx, amount: int): 'Girilen miktarda mesajı siler' (await ctx.channel.purge(limit=amount)) (await ctx.send('Mesajlar silindi', delete_after=5))
Girilen miktarda mesajı siler
BOT-ALTYAPI-PYTHON/Dbot/cogs/avatar.py
cls
Ufuxzk/DBOT
0
python
@commands.command() @commands.has_permissions(manage_messages=True) async def cls(self, ctx, amount: int): (await ctx.channel.purge(limit=amount)) (await ctx.send('Mesajlar silindi', delete_after=5))
@commands.command() @commands.has_permissions(manage_messages=True) async def cls(self, ctx, amount: int): (await ctx.channel.purge(limit=amount)) (await ctx.send('Mesajlar silindi', delete_after=5))<|docstring|>Girilen miktarda mesajı siler<|endoftext|>
a15d5413678aa12a446a42ef77845a8a5e683d25fbe2f858c2d1e4def8d4ccc4
@commands.command() @commands.has_permissions(manage_messages=True) async def purge(self, ctx): '1000 adet mesaj siler.' (await ctx.channel.purge(limit=1000)) (await ctx.send(f'Sohbet temizlendi!', delete_after=5))
1000 adet mesaj siler.
BOT-ALTYAPI-PYTHON/Dbot/cogs/avatar.py
purge
Ufuxzk/DBOT
0
python
@commands.command() @commands.has_permissions(manage_messages=True) async def purge(self, ctx): (await ctx.channel.purge(limit=1000)) (await ctx.send(f'Sohbet temizlendi!', delete_after=5))
@commands.command() @commands.has_permissions(manage_messages=True) async def purge(self, ctx): (await ctx.channel.purge(limit=1000)) (await ctx.send(f'Sohbet temizlendi!', delete_after=5))<|docstring|>1000 adet mesaj siler.<|endoftext|>
f0e36b13db9825647653f5387012706aa9d6757a7523b5cb3552650c5912b001
def lastUpdateDate(dataset, date): "\n Given a Resource Watch dataset's API ID and a datetime,\n this function will update the dataset's 'last update date' on the API with the given datetime\n INPUT dataset: Resource Watch API dataset ID (string)\n date: date to set as the 'last update date' for the input dataset (datetime)\n " apiUrl = f'http://api.resourcewatch.org/v1/dataset/{dataset}' headers = {'Content-Type': 'application/json', 'Authorization': os.getenv('apiToken')} body = {'dataLastUpdated': date.isoformat()} try: r = requests.patch(url=apiUrl, json=body, headers=headers) logging.info(((('[lastUpdated]: SUCCESS, ' + date.isoformat()) + ' status code ') + str(r.status_code))) return 0 except Exception as e: logging.error(('[lastUpdated]: ' + str(e)))
Given a Resource Watch dataset's API ID and a datetime, this function will update the dataset's 'last update date' on the API with the given datetime INPUT dataset: Resource Watch API dataset ID (string) date: date to set as the 'last update date' for the input dataset (datetime)
soc_016_conflict_protest_events/contents/src/__init__.py
lastUpdateDate
resource-watch/nrt-scripts
6
python
def lastUpdateDate(dataset, date): "\n Given a Resource Watch dataset's API ID and a datetime,\n this function will update the dataset's 'last update date' on the API with the given datetime\n INPUT dataset: Resource Watch API dataset ID (string)\n date: date to set as the 'last update date' for the input dataset (datetime)\n " apiUrl = f'http://api.resourcewatch.org/v1/dataset/{dataset}' headers = {'Content-Type': 'application/json', 'Authorization': os.getenv('apiToken')} body = {'dataLastUpdated': date.isoformat()} try: r = requests.patch(url=apiUrl, json=body, headers=headers) logging.info(((('[lastUpdated]: SUCCESS, ' + date.isoformat()) + ' status code ') + str(r.status_code))) return 0 except Exception as e: logging.error(('[lastUpdated]: ' + str(e)))
def lastUpdateDate(dataset, date): "\n Given a Resource Watch dataset's API ID and a datetime,\n this function will update the dataset's 'last update date' on the API with the given datetime\n INPUT dataset: Resource Watch API dataset ID (string)\n date: date to set as the 'last update date' for the input dataset (datetime)\n " apiUrl = f'http://api.resourcewatch.org/v1/dataset/{dataset}' headers = {'Content-Type': 'application/json', 'Authorization': os.getenv('apiToken')} body = {'dataLastUpdated': date.isoformat()} try: r = requests.patch(url=apiUrl, json=body, headers=headers) logging.info(((('[lastUpdated]: SUCCESS, ' + date.isoformat()) + ' status code ') + str(r.status_code))) return 0 except Exception as e: logging.error(('[lastUpdated]: ' + str(e)))<|docstring|>Given a Resource Watch dataset's API ID and a datetime, this function will update the dataset's 'last update date' on the API with the given datetime INPUT dataset: Resource Watch API dataset ID (string) date: date to set as the 'last update date' for the input dataset (datetime)<|endoftext|>
2788281155298e46332fa8d63878b784ee61df1320d3679b2235a587e81ceefb
def checkCreateTable(table, schema, id_field, time_field=''): "\n Create the table if it does not exist, and pull list of IDs already in the table if it does\n INPUT table: Carto table to check or create (string)\n schema: dictionary of column names and types, used if we are creating the table for the first time (dictionary)\n id_field: name of column that we want to use as a unique ID for this table; this will be used to compare the\n source data to the our table each time we run the script so that we only have to pull data we\n haven't previously uploaded (string)\n time_field: optional, name of column that will store datetime information (string)\n RETURN list of existing IDs in the table, pulled from the id_field column (list of strings)\n " if cartosql.tableExists(table, user=CARTO_USER, key=CARTO_KEY): logging.info('Fetching existing IDs') r = cartosql.getFields(id_field, table, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) return r.text.split('\r\n')[1:(- 1)] else: logging.info('Table {} does not exist, creating'.format(table)) cartosql.createTable(table, schema, user=CARTO_USER, key=CARTO_KEY) if id_field: cartosql.createIndex(table, id_field, unique=True, user=CARTO_USER, key=CARTO_KEY) if time_field: cartosql.createIndex(table, time_field, user=CARTO_USER, key=CARTO_KEY) return []
Create the table if it does not exist, and pull list of IDs already in the table if it does INPUT table: Carto table to check or create (string) schema: dictionary of column names and types, used if we are creating the table for the first time (dictionary) id_field: name of column that we want to use as a unique ID for this table; this will be used to compare the source data to the our table each time we run the script so that we only have to pull data we haven't previously uploaded (string) time_field: optional, name of column that will store datetime information (string) RETURN list of existing IDs in the table, pulled from the id_field column (list of strings)
soc_016_conflict_protest_events/contents/src/__init__.py
checkCreateTable
resource-watch/nrt-scripts
6
python
def checkCreateTable(table, schema, id_field, time_field=): "\n Create the table if it does not exist, and pull list of IDs already in the table if it does\n INPUT table: Carto table to check or create (string)\n schema: dictionary of column names and types, used if we are creating the table for the first time (dictionary)\n id_field: name of column that we want to use as a unique ID for this table; this will be used to compare the\n source data to the our table each time we run the script so that we only have to pull data we\n haven't previously uploaded (string)\n time_field: optional, name of column that will store datetime information (string)\n RETURN list of existing IDs in the table, pulled from the id_field column (list of strings)\n " if cartosql.tableExists(table, user=CARTO_USER, key=CARTO_KEY): logging.info('Fetching existing IDs') r = cartosql.getFields(id_field, table, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) return r.text.split('\r\n')[1:(- 1)] else: logging.info('Table {} does not exist, creating'.format(table)) cartosql.createTable(table, schema, user=CARTO_USER, key=CARTO_KEY) if id_field: cartosql.createIndex(table, id_field, unique=True, user=CARTO_USER, key=CARTO_KEY) if time_field: cartosql.createIndex(table, time_field, user=CARTO_USER, key=CARTO_KEY) return []
def checkCreateTable(table, schema, id_field, time_field=): "\n Create the table if it does not exist, and pull list of IDs already in the table if it does\n INPUT table: Carto table to check or create (string)\n schema: dictionary of column names and types, used if we are creating the table for the first time (dictionary)\n id_field: name of column that we want to use as a unique ID for this table; this will be used to compare the\n source data to the our table each time we run the script so that we only have to pull data we\n haven't previously uploaded (string)\n time_field: optional, name of column that will store datetime information (string)\n RETURN list of existing IDs in the table, pulled from the id_field column (list of strings)\n " if cartosql.tableExists(table, user=CARTO_USER, key=CARTO_KEY): logging.info('Fetching existing IDs') r = cartosql.getFields(id_field, table, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) return r.text.split('\r\n')[1:(- 1)] else: logging.info('Table {} does not exist, creating'.format(table)) cartosql.createTable(table, schema, user=CARTO_USER, key=CARTO_KEY) if id_field: cartosql.createIndex(table, id_field, unique=True, user=CARTO_USER, key=CARTO_KEY) if time_field: cartosql.createIndex(table, time_field, user=CARTO_USER, key=CARTO_KEY) return []<|docstring|>Create the table if it does not exist, and pull list of IDs already in the table if it does INPUT table: Carto table to check or create (string) schema: dictionary of column names and types, used if we are creating the table for the first time (dictionary) id_field: name of column that we want to use as a unique ID for this table; this will be used to compare the source data to the our table each time we run the script so that we only have to pull data we haven't previously uploaded (string) time_field: optional, name of column that will store datetime information (string) RETURN list of existing IDs in the table, pulled from the id_field column (list of strings)<|endoftext|>
e201844d9a599da098b99a9d4a992b5892e60dad8182fe94d41cd06d265d460a
def fetch_data(src_url): '\n Fetch ACLED data via the API\n INPUT src_url: the url to fetch data from (string)\n RETURN data_gdf: ACLED data during the past 12 months (geopandas dataframe)\n ' src_url = SOURCE_URL date_start = get_date_range()[0].strftime('%Y-%m-%d') date_end = get_date_range()[1].strftime('%Y-%m-%d') page = 0 new_count = 1 new_ids = [] data_df = pd.DataFrame() while ((page <= MIN_PAGES) or (new_count and (page < MAX_PAGES))): try: page += 1 logging.info('Fetching page {}'.format(page)) new_rows = [] r = requests.get(src_url.format(key=ACLED_KEY, user=ACLED_USER, date_start=date_start, date_end=date_end, page=page)) ' cols = ["data_id", "event_date", "year", "time_precision", "event_type", "sub_event_type", "actor1", "assoc_actor_1", "inter1", \n "actor2", "assoc_actor_2", "inter2", "interaction", "country", "iso3", "region", "admin1", "admin2", "admin3", "location", \n "geo_precision", "time_precision", "source", "source_scale", "notes", "fatalities", "latitude", "longitude"] ' cols = ['data_id', 'event_type', 'latitude', 'longitude'] for obs in r.json()['data']: new_ids.append(obs['data_id']) row = [] for col in cols: try: row.append(obs[col]) except: logging.debug('{} not available for this row'.format(col)) row.append('') new_rows.append(row) new_count = len(new_rows) data_df = data_df.append(pd.DataFrame(new_rows, columns=cols)) except: logging.error('Could not fetch or process page {}'.format(page)) data_df = data_df.drop_duplicates(['data_id']).iloc[(:, 1:)] data_gdf = gpd.GeoDataFrame(data_df, geometry=gpd.points_from_xy(data_df.longitude, data_df.latitude)) data_gdf.set_crs(epsg=3857, inplace=True) return data_gdf
Fetch ACLED data via the API INPUT src_url: the url to fetch data from (string) RETURN data_gdf: ACLED data during the past 12 months (geopandas dataframe)
soc_016_conflict_protest_events/contents/src/__init__.py
fetch_data
resource-watch/nrt-scripts
6
python
def fetch_data(src_url): '\n Fetch ACLED data via the API\n INPUT src_url: the url to fetch data from (string)\n RETURN data_gdf: ACLED data during the past 12 months (geopandas dataframe)\n ' src_url = SOURCE_URL date_start = get_date_range()[0].strftime('%Y-%m-%d') date_end = get_date_range()[1].strftime('%Y-%m-%d') page = 0 new_count = 1 new_ids = [] data_df = pd.DataFrame() while ((page <= MIN_PAGES) or (new_count and (page < MAX_PAGES))): try: page += 1 logging.info('Fetching page {}'.format(page)) new_rows = [] r = requests.get(src_url.format(key=ACLED_KEY, user=ACLED_USER, date_start=date_start, date_end=date_end, page=page)) ' cols = ["data_id", "event_date", "year", "time_precision", "event_type", "sub_event_type", "actor1", "assoc_actor_1", "inter1", \n "actor2", "assoc_actor_2", "inter2", "interaction", "country", "iso3", "region", "admin1", "admin2", "admin3", "location", \n "geo_precision", "time_precision", "source", "source_scale", "notes", "fatalities", "latitude", "longitude"] ' cols = ['data_id', 'event_type', 'latitude', 'longitude'] for obs in r.json()['data']: new_ids.append(obs['data_id']) row = [] for col in cols: try: row.append(obs[col]) except: logging.debug('{} not available for this row'.format(col)) row.append() new_rows.append(row) new_count = len(new_rows) data_df = data_df.append(pd.DataFrame(new_rows, columns=cols)) except: logging.error('Could not fetch or process page {}'.format(page)) data_df = data_df.drop_duplicates(['data_id']).iloc[(:, 1:)] data_gdf = gpd.GeoDataFrame(data_df, geometry=gpd.points_from_xy(data_df.longitude, data_df.latitude)) data_gdf.set_crs(epsg=3857, inplace=True) return data_gdf
def fetch_data(src_url): '\n Fetch ACLED data via the API\n INPUT src_url: the url to fetch data from (string)\n RETURN data_gdf: ACLED data during the past 12 months (geopandas dataframe)\n ' src_url = SOURCE_URL date_start = get_date_range()[0].strftime('%Y-%m-%d') date_end = get_date_range()[1].strftime('%Y-%m-%d') page = 0 new_count = 1 new_ids = [] data_df = pd.DataFrame() while ((page <= MIN_PAGES) or (new_count and (page < MAX_PAGES))): try: page += 1 logging.info('Fetching page {}'.format(page)) new_rows = [] r = requests.get(src_url.format(key=ACLED_KEY, user=ACLED_USER, date_start=date_start, date_end=date_end, page=page)) ' cols = ["data_id", "event_date", "year", "time_precision", "event_type", "sub_event_type", "actor1", "assoc_actor_1", "inter1", \n "actor2", "assoc_actor_2", "inter2", "interaction", "country", "iso3", "region", "admin1", "admin2", "admin3", "location", \n "geo_precision", "time_precision", "source", "source_scale", "notes", "fatalities", "latitude", "longitude"] ' cols = ['data_id', 'event_type', 'latitude', 'longitude'] for obs in r.json()['data']: new_ids.append(obs['data_id']) row = [] for col in cols: try: row.append(obs[col]) except: logging.debug('{} not available for this row'.format(col)) row.append() new_rows.append(row) new_count = len(new_rows) data_df = data_df.append(pd.DataFrame(new_rows, columns=cols)) except: logging.error('Could not fetch or process page {}'.format(page)) data_df = data_df.drop_duplicates(['data_id']).iloc[(:, 1:)] data_gdf = gpd.GeoDataFrame(data_df, geometry=gpd.points_from_xy(data_df.longitude, data_df.latitude)) data_gdf.set_crs(epsg=3857, inplace=True) return data_gdf<|docstring|>Fetch ACLED data via the API INPUT src_url: the url to fetch data from (string) RETURN data_gdf: ACLED data during the past 12 months (geopandas dataframe)<|endoftext|>
3dddc5e7152d6518d0f7ea6a96566a78976d2a520bb44b22574f0c3f265fd6e9
def processNewData(data_gdf): '\n Process and upload new data\n INPUT data_gdf: geopandas dataframe storing the point ACLED data (geopandas dataframe)\n RETURN new_ids: list of unique ids of new data sent to Carto table (list of strings)\n ' r = cartosql.getFields('objectid', CARTO_GEO, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) geo_id = r.text.split('\r\n')[1:(- 1)] uploaded_ids = [] acled_coverage = fetch_acled_iso() slice = 1000 for i in range(0, len(geo_id), slice): if ((len(geo_id) - i) < slice): slice = (len(geo_id) - i) geo_gdf = get_admin_area(CARTO_GEO, geo_id[i:(i + slice)]) joined = spatial_join(data_gdf, geo_gdf) joined['geometry'] = [convert_geometry(geom) for geom in joined.geometry] with ThreadPoolExecutor(max_workers=8) as executor: futures = [] for (index, row) in joined.iterrows(): row = row.where(row.notnull(), None) if ((row['objectid'] not in uploaded_ids) and (row['gid_0'] in acled_coverage)): futures.append(executor.submit(upload_to_carto, row)) for future in as_completed(futures): uploaded_ids.append(future.result()) logging.info('{} of rows uploaded to Carto.'.format(slice)) return uploaded_ids
Process and upload new data INPUT data_gdf: geopandas dataframe storing the point ACLED data (geopandas dataframe) RETURN new_ids: list of unique ids of new data sent to Carto table (list of strings)
soc_016_conflict_protest_events/contents/src/__init__.py
processNewData
resource-watch/nrt-scripts
6
python
def processNewData(data_gdf): '\n Process and upload new data\n INPUT data_gdf: geopandas dataframe storing the point ACLED data (geopandas dataframe)\n RETURN new_ids: list of unique ids of new data sent to Carto table (list of strings)\n ' r = cartosql.getFields('objectid', CARTO_GEO, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) geo_id = r.text.split('\r\n')[1:(- 1)] uploaded_ids = [] acled_coverage = fetch_acled_iso() slice = 1000 for i in range(0, len(geo_id), slice): if ((len(geo_id) - i) < slice): slice = (len(geo_id) - i) geo_gdf = get_admin_area(CARTO_GEO, geo_id[i:(i + slice)]) joined = spatial_join(data_gdf, geo_gdf) joined['geometry'] = [convert_geometry(geom) for geom in joined.geometry] with ThreadPoolExecutor(max_workers=8) as executor: futures = [] for (index, row) in joined.iterrows(): row = row.where(row.notnull(), None) if ((row['objectid'] not in uploaded_ids) and (row['gid_0'] in acled_coverage)): futures.append(executor.submit(upload_to_carto, row)) for future in as_completed(futures): uploaded_ids.append(future.result()) logging.info('{} of rows uploaded to Carto.'.format(slice)) return uploaded_ids
def processNewData(data_gdf): '\n Process and upload new data\n INPUT data_gdf: geopandas dataframe storing the point ACLED data (geopandas dataframe)\n RETURN new_ids: list of unique ids of new data sent to Carto table (list of strings)\n ' r = cartosql.getFields('objectid', CARTO_GEO, f='csv', post=True, user=CARTO_USER, key=CARTO_KEY) geo_id = r.text.split('\r\n')[1:(- 1)] uploaded_ids = [] acled_coverage = fetch_acled_iso() slice = 1000 for i in range(0, len(geo_id), slice): if ((len(geo_id) - i) < slice): slice = (len(geo_id) - i) geo_gdf = get_admin_area(CARTO_GEO, geo_id[i:(i + slice)]) joined = spatial_join(data_gdf, geo_gdf) joined['geometry'] = [convert_geometry(geom) for geom in joined.geometry] with ThreadPoolExecutor(max_workers=8) as executor: futures = [] for (index, row) in joined.iterrows(): row = row.where(row.notnull(), None) if ((row['objectid'] not in uploaded_ids) and (row['gid_0'] in acled_coverage)): futures.append(executor.submit(upload_to_carto, row)) for future in as_completed(futures): uploaded_ids.append(future.result()) logging.info('{} of rows uploaded to Carto.'.format(slice)) return uploaded_ids<|docstring|>Process and upload new data INPUT data_gdf: geopandas dataframe storing the point ACLED data (geopandas dataframe) RETURN new_ids: list of unique ids of new data sent to Carto table (list of strings)<|endoftext|>
ce4ff85f635b665d0b12a4945e8e4bee3189658de250d29c293930637d3519c6
def fetch_acled_iso(): '\n Fetch the countries covered by the ACLED dataset \n RETURN iso_list: list of isos for countries that are covered by ACLED (list of strings)\n ' iso_url = 'https://api.acleddata.com/country/read?key={key}&email={user}' r = requests.get(iso_url.format(key=ACLED_KEY, user=ACLED_USER)) iso_list = [country['iso3'] for country in r.json()['data']] return iso_list
Fetch the countries covered by the ACLED dataset RETURN iso_list: list of isos for countries that are covered by ACLED (list of strings)
soc_016_conflict_protest_events/contents/src/__init__.py
fetch_acled_iso
resource-watch/nrt-scripts
6
python
def fetch_acled_iso(): '\n Fetch the countries covered by the ACLED dataset \n RETURN iso_list: list of isos for countries that are covered by ACLED (list of strings)\n ' iso_url = 'https://api.acleddata.com/country/read?key={key}&email={user}' r = requests.get(iso_url.format(key=ACLED_KEY, user=ACLED_USER)) iso_list = [country['iso3'] for country in r.json()['data']] return iso_list
def fetch_acled_iso(): '\n Fetch the countries covered by the ACLED dataset \n RETURN iso_list: list of isos for countries that are covered by ACLED (list of strings)\n ' iso_url = 'https://api.acleddata.com/country/read?key={key}&email={user}' r = requests.get(iso_url.format(key=ACLED_KEY, user=ACLED_USER)) iso_list = [country['iso3'] for country in r.json()['data']] return iso_list<|docstring|>Fetch the countries covered by the ACLED dataset RETURN iso_list: list of isos for countries that are covered by ACLED (list of strings)<|endoftext|>
bccb5826052bc59b0dfbfb238ac9fe91d45c757ad6b7e0a59a21fc7270905f02
def convert_geometry(geom): '\n Function to convert shapely geometries to geojsons\n INPUT geom: shapely geometry \n RETURN output: geojson \n ' return geom.__geo_interface__
Function to convert shapely geometries to geojsons INPUT geom: shapely geometry RETURN output: geojson
soc_016_conflict_protest_events/contents/src/__init__.py
convert_geometry
resource-watch/nrt-scripts
6
python
def convert_geometry(geom): '\n Function to convert shapely geometries to geojsons\n INPUT geom: shapely geometry \n RETURN output: geojson \n ' return geom.__geo_interface__
def convert_geometry(geom): '\n Function to convert shapely geometries to geojsons\n INPUT geom: shapely geometry \n RETURN output: geojson \n ' return geom.__geo_interface__<|docstring|>Function to convert shapely geometries to geojsons INPUT geom: shapely geometry RETURN output: geojson<|endoftext|>
a7c9f596faa3ac292f4a953eb081bbe9c30c973d72d43c8b631502bf07a919d5
def upload_to_carto(row): '\n Function to upload data to the Carto table \n INPUT row: the geopandas dataframe of data we want to upload (geopandas dataframe)\n RETURN the objectid of the row just uploaded (string)\n ' n_tries = 4 retry_wait_time = 6 insert_exception = None fields = CARTO_SCHEMA.keys() values = cartosql._dumpRows([row.values.tolist()], tuple(CARTO_SCHEMA.values())) payload = {'api_key': CARTO_KEY, 'q': 'INSERT INTO "{}" ({}) VALUES {}'.format(CARTO_TABLE, ', '.join(fields), values)} for i in range(n_tries): try: r = session.post('https://{}.carto.com/api/v2/sql'.format(CARTO_USER), json=payload) r.raise_for_status() except Exception as e: insert_exception = e if (r.status_code != 429): try: logging.error(r.content) except: pass logging.warning('Attempt #{} to upload row #{} unsuccessful. Trying again after {} seconds'.format(i, row['objectid'], retry_wait_time)) logging.debug(('Exception encountered during upload attempt: ' + str(e))) time.sleep(retry_wait_time) else: return row['objectid'] else: logging.error('Upload of row #{} has failed after {} attempts'.format(row['objectid'], n_tries)) logging.error(('Problematic row: ' + str(row))) logging.error('Raising exception encountered during last upload attempt') logging.error(insert_exception) raise insert_exception return row['objectid']
Function to upload data to the Carto table INPUT row: the geopandas dataframe of data we want to upload (geopandas dataframe) RETURN the objectid of the row just uploaded (string)
soc_016_conflict_protest_events/contents/src/__init__.py
upload_to_carto
resource-watch/nrt-scripts
6
python
def upload_to_carto(row): '\n Function to upload data to the Carto table \n INPUT row: the geopandas dataframe of data we want to upload (geopandas dataframe)\n RETURN the objectid of the row just uploaded (string)\n ' n_tries = 4 retry_wait_time = 6 insert_exception = None fields = CARTO_SCHEMA.keys() values = cartosql._dumpRows([row.values.tolist()], tuple(CARTO_SCHEMA.values())) payload = {'api_key': CARTO_KEY, 'q': 'INSERT INTO "{}" ({}) VALUES {}'.format(CARTO_TABLE, ', '.join(fields), values)} for i in range(n_tries): try: r = session.post('https://{}.carto.com/api/v2/sql'.format(CARTO_USER), json=payload) r.raise_for_status() except Exception as e: insert_exception = e if (r.status_code != 429): try: logging.error(r.content) except: pass logging.warning('Attempt #{} to upload row #{} unsuccessful. Trying again after {} seconds'.format(i, row['objectid'], retry_wait_time)) logging.debug(('Exception encountered during upload attempt: ' + str(e))) time.sleep(retry_wait_time) else: return row['objectid'] else: logging.error('Upload of row #{} has failed after {} attempts'.format(row['objectid'], n_tries)) logging.error(('Problematic row: ' + str(row))) logging.error('Raising exception encountered during last upload attempt') logging.error(insert_exception) raise insert_exception return row['objectid']
def upload_to_carto(row): '\n Function to upload data to the Carto table \n INPUT row: the geopandas dataframe of data we want to upload (geopandas dataframe)\n RETURN the objectid of the row just uploaded (string)\n ' n_tries = 4 retry_wait_time = 6 insert_exception = None fields = CARTO_SCHEMA.keys() values = cartosql._dumpRows([row.values.tolist()], tuple(CARTO_SCHEMA.values())) payload = {'api_key': CARTO_KEY, 'q': 'INSERT INTO "{}" ({}) VALUES {}'.format(CARTO_TABLE, ', '.join(fields), values)} for i in range(n_tries): try: r = session.post('https://{}.carto.com/api/v2/sql'.format(CARTO_USER), json=payload) r.raise_for_status() except Exception as e: insert_exception = e if (r.status_code != 429): try: logging.error(r.content) except: pass logging.warning('Attempt #{} to upload row #{} unsuccessful. Trying again after {} seconds'.format(i, row['objectid'], retry_wait_time)) logging.debug(('Exception encountered during upload attempt: ' + str(e))) time.sleep(retry_wait_time) else: return row['objectid'] else: logging.error('Upload of row #{} has failed after {} attempts'.format(row['objectid'], n_tries)) logging.error(('Problematic row: ' + str(row))) logging.error('Raising exception encountered during last upload attempt') logging.error(insert_exception) raise insert_exception return row['objectid']<|docstring|>Function to upload data to the Carto table INPUT row: the geopandas dataframe of data we want to upload (geopandas dataframe) RETURN the objectid of the row just uploaded (string)<|endoftext|>
dfc3636da8481c27aad147586743a38c1935f08927f66ccc8fc7823ad48c67ef
def get_admin_area(admin_table, id_list): '\n Obtain entries in Carto table based on values in a specified column\n INPUT admin_table: the name of the carto table storing the administrative areas (string)\n id_list: list of ids for rows to fetch from the table (list of strings)\n RETURN admin_gdf: data fetched from the table (geopandas dataframe)\n ' where = None column = 'objectid' for id in id_list: if where: where += f" OR {column} = '{id}'" else: where = f"{column} = '{id}'" sql = 'SELECT * FROM "{}" WHERE {}'.format(admin_table, where) r = cartosql.sendSql(sql, user=CARTO_USER, key=CARTO_KEY, f='GeoJSON', post=True) data = r.json() admin_gdf = gpd.GeoDataFrame.from_features(data) return admin_gdf
Obtain entries in Carto table based on values in a specified column INPUT admin_table: the name of the carto table storing the administrative areas (string) id_list: list of ids for rows to fetch from the table (list of strings) RETURN admin_gdf: data fetched from the table (geopandas dataframe)
soc_016_conflict_protest_events/contents/src/__init__.py
get_admin_area
resource-watch/nrt-scripts
6
python
def get_admin_area(admin_table, id_list): '\n Obtain entries in Carto table based on values in a specified column\n INPUT admin_table: the name of the carto table storing the administrative areas (string)\n id_list: list of ids for rows to fetch from the table (list of strings)\n RETURN admin_gdf: data fetched from the table (geopandas dataframe)\n ' where = None column = 'objectid' for id in id_list: if where: where += f" OR {column} = '{id}'" else: where = f"{column} = '{id}'" sql = 'SELECT * FROM "{}" WHERE {}'.format(admin_table, where) r = cartosql.sendSql(sql, user=CARTO_USER, key=CARTO_KEY, f='GeoJSON', post=True) data = r.json() admin_gdf = gpd.GeoDataFrame.from_features(data) return admin_gdf
def get_admin_area(admin_table, id_list): '\n Obtain entries in Carto table based on values in a specified column\n INPUT admin_table: the name of the carto table storing the administrative areas (string)\n id_list: list of ids for rows to fetch from the table (list of strings)\n RETURN admin_gdf: data fetched from the table (geopandas dataframe)\n ' where = None column = 'objectid' for id in id_list: if where: where += f" OR {column} = '{id}'" else: where = f"{column} = '{id}'" sql = 'SELECT * FROM "{}" WHERE {}'.format(admin_table, where) r = cartosql.sendSql(sql, user=CARTO_USER, key=CARTO_KEY, f='GeoJSON', post=True) data = r.json() admin_gdf = gpd.GeoDataFrame.from_features(data) return admin_gdf<|docstring|>Obtain entries in Carto table based on values in a specified column INPUT admin_table: the name of the carto table storing the administrative areas (string) id_list: list of ids for rows to fetch from the table (list of strings) RETURN admin_gdf: data fetched from the table (geopandas dataframe)<|endoftext|>
9af20a3965615f007f65036910355a221fa304d7d54622ca419e8810487e1083
def spatial_join(gdf_pt, gdf_poly): '\n Spatial join two geopandas dataframes \n INPUT gdf_pt: the point data from ACLED (geopandas dataframe)\n gdf_poly: the polygons of administrative areas (geopandas dataframe)\n RETURN pt_poly: number of events per polygon (geopandas dataframe)\n ' dfsjoin = gpd.sjoin(gdf_poly, gdf_pt) pt_count = dfsjoin.groupby(['objectid', 'event_type']).size().reset_index(name='counts') pt_count = pd.pivot_table(pt_count, index='objectid', columns='event_type') pt_count.columns = [x.lower().replace(' ', '_').replace('/', '_') for x in pt_count.columns.droplevel(0)] pt_poly = gdf_poly.merge(pt_count, how='left', on='objectid') pt_poly[pt_count.columns] = pt_poly[pt_count.columns].fillna(value=0) for event_type in ['battles', 'protests', 'riots', 'strategic_developments', 'explosions_remote_violence', 'violence_against_civilians']: if (event_type not in pt_poly.columns): pt_poly[event_type] = 0 pt_poly['total'] = (((((pt_poly['battles'] + pt_poly['protests']) + pt_poly['riots']) + pt_poly['strategic_developments']) + pt_poly['explosions_remote_violence']) + pt_poly['violence_against_civilians']) pt_poly = pt_poly[[('geometry' if (x == 'the_geom') else x) for x in list(CARTO_SCHEMA.keys())]] return pt_poly
Spatial join two geopandas dataframes INPUT gdf_pt: the point data from ACLED (geopandas dataframe) gdf_poly: the polygons of administrative areas (geopandas dataframe) RETURN pt_poly: number of events per polygon (geopandas dataframe)
soc_016_conflict_protest_events/contents/src/__init__.py
spatial_join
resource-watch/nrt-scripts
6
python
def spatial_join(gdf_pt, gdf_poly): '\n Spatial join two geopandas dataframes \n INPUT gdf_pt: the point data from ACLED (geopandas dataframe)\n gdf_poly: the polygons of administrative areas (geopandas dataframe)\n RETURN pt_poly: number of events per polygon (geopandas dataframe)\n ' dfsjoin = gpd.sjoin(gdf_poly, gdf_pt) pt_count = dfsjoin.groupby(['objectid', 'event_type']).size().reset_index(name='counts') pt_count = pd.pivot_table(pt_count, index='objectid', columns='event_type') pt_count.columns = [x.lower().replace(' ', '_').replace('/', '_') for x in pt_count.columns.droplevel(0)] pt_poly = gdf_poly.merge(pt_count, how='left', on='objectid') pt_poly[pt_count.columns] = pt_poly[pt_count.columns].fillna(value=0) for event_type in ['battles', 'protests', 'riots', 'strategic_developments', 'explosions_remote_violence', 'violence_against_civilians']: if (event_type not in pt_poly.columns): pt_poly[event_type] = 0 pt_poly['total'] = (((((pt_poly['battles'] + pt_poly['protests']) + pt_poly['riots']) + pt_poly['strategic_developments']) + pt_poly['explosions_remote_violence']) + pt_poly['violence_against_civilians']) pt_poly = pt_poly[[('geometry' if (x == 'the_geom') else x) for x in list(CARTO_SCHEMA.keys())]] return pt_poly
def spatial_join(gdf_pt, gdf_poly): '\n Spatial join two geopandas dataframes \n INPUT gdf_pt: the point data from ACLED (geopandas dataframe)\n gdf_poly: the polygons of administrative areas (geopandas dataframe)\n RETURN pt_poly: number of events per polygon (geopandas dataframe)\n ' dfsjoin = gpd.sjoin(gdf_poly, gdf_pt) pt_count = dfsjoin.groupby(['objectid', 'event_type']).size().reset_index(name='counts') pt_count = pd.pivot_table(pt_count, index='objectid', columns='event_type') pt_count.columns = [x.lower().replace(' ', '_').replace('/', '_') for x in pt_count.columns.droplevel(0)] pt_poly = gdf_poly.merge(pt_count, how='left', on='objectid') pt_poly[pt_count.columns] = pt_poly[pt_count.columns].fillna(value=0) for event_type in ['battles', 'protests', 'riots', 'strategic_developments', 'explosions_remote_violence', 'violence_against_civilians']: if (event_type not in pt_poly.columns): pt_poly[event_type] = 0 pt_poly['total'] = (((((pt_poly['battles'] + pt_poly['protests']) + pt_poly['riots']) + pt_poly['strategic_developments']) + pt_poly['explosions_remote_violence']) + pt_poly['violence_against_civilians']) pt_poly = pt_poly[[('geometry' if (x == 'the_geom') else x) for x in list(CARTO_SCHEMA.keys())]] return pt_poly<|docstring|>Spatial join two geopandas dataframes INPUT gdf_pt: the point data from ACLED (geopandas dataframe) gdf_poly: the polygons of administrative areas (geopandas dataframe) RETURN pt_poly: number of events per polygon (geopandas dataframe)<|endoftext|>
4cda57645c519e8a6e7c826b5209e9dc50c49c922c3c700e907617433aa0d7d7
def get_date_range(): '\n Get the dates between which we want to fetch data for\n RETURN date_start: the first date for which we want to fetch data (datetime object)\n date_end: the last date for which we want to fetch data (datetime object)\n ' date_end = (datetime.date.today() + relativedelta(days=(- 1))) date_start = (date_end + relativedelta(months=(- 12))) return (date_start, date_end)
Get the dates between which we want to fetch data for RETURN date_start: the first date for which we want to fetch data (datetime object) date_end: the last date for which we want to fetch data (datetime object)
soc_016_conflict_protest_events/contents/src/__init__.py
get_date_range
resource-watch/nrt-scripts
6
python
def get_date_range(): '\n Get the dates between which we want to fetch data for\n RETURN date_start: the first date for which we want to fetch data (datetime object)\n date_end: the last date for which we want to fetch data (datetime object)\n ' date_end = (datetime.date.today() + relativedelta(days=(- 1))) date_start = (date_end + relativedelta(months=(- 12))) return (date_start, date_end)
def get_date_range(): '\n Get the dates between which we want to fetch data for\n RETURN date_start: the first date for which we want to fetch data (datetime object)\n date_end: the last date for which we want to fetch data (datetime object)\n ' date_end = (datetime.date.today() + relativedelta(days=(- 1))) date_start = (date_end + relativedelta(months=(- 12))) return (date_start, date_end)<|docstring|>Get the dates between which we want to fetch data for RETURN date_start: the first date for which we want to fetch data (datetime object) date_end: the last date for which we want to fetch data (datetime object)<|endoftext|>
dcb6f31c3729f5e56863e22c11742943f680618a6adb8d9892bf976a5acee32e
def create_headers(): '\n Create headers to perform authorized actions on API\n\n ' return {'Content-Type': 'application/json', 'Authorization': '{}'.format(os.getenv('apiToken'))}
Create headers to perform authorized actions on API
soc_016_conflict_protest_events/contents/src/__init__.py
create_headers
resource-watch/nrt-scripts
6
python
def create_headers(): '\n \n\n ' return {'Content-Type': 'application/json', 'Authorization': '{}'.format(os.getenv('apiToken'))}
def create_headers(): '\n \n\n ' return {'Content-Type': 'application/json', 'Authorization': '{}'.format(os.getenv('apiToken'))}<|docstring|>Create headers to perform authorized actions on API<|endoftext|>
2d731aeaa93987d6cea4a9e4b89b98622277db35a67701ec461af3242abe7c67
def pull_layers_from_API(dataset_id): '\n Pull dictionary of current layers from API\n INPUT dataset_id: Resource Watch API dataset ID (string)\n RETURN layer_dict: dictionary of layers (dictionary of strings)\n ' rw_api_url = 'https://api.resourcewatch.org/v1/dataset/{}/layer?page[size]=100'.format(dataset_id) r = requests.get(rw_api_url) layer_dict = json.loads(r.content.decode('utf-8'))['data'] return layer_dict
Pull dictionary of current layers from API INPUT dataset_id: Resource Watch API dataset ID (string) RETURN layer_dict: dictionary of layers (dictionary of strings)
soc_016_conflict_protest_events/contents/src/__init__.py
pull_layers_from_API
resource-watch/nrt-scripts
6
python
def pull_layers_from_API(dataset_id): '\n Pull dictionary of current layers from API\n INPUT dataset_id: Resource Watch API dataset ID (string)\n RETURN layer_dict: dictionary of layers (dictionary of strings)\n ' rw_api_url = 'https://api.resourcewatch.org/v1/dataset/{}/layer?page[size]=100'.format(dataset_id) r = requests.get(rw_api_url) layer_dict = json.loads(r.content.decode('utf-8'))['data'] return layer_dict
def pull_layers_from_API(dataset_id): '\n Pull dictionary of current layers from API\n INPUT dataset_id: Resource Watch API dataset ID (string)\n RETURN layer_dict: dictionary of layers (dictionary of strings)\n ' rw_api_url = 'https://api.resourcewatch.org/v1/dataset/{}/layer?page[size]=100'.format(dataset_id) r = requests.get(rw_api_url) layer_dict = json.loads(r.content.decode('utf-8'))['data'] return layer_dict<|docstring|>Pull dictionary of current layers from API INPUT dataset_id: Resource Watch API dataset ID (string) RETURN layer_dict: dictionary of layers (dictionary of strings)<|endoftext|>
a435906854cedc6b525dd2c584f7b4c57ed2c22f943bf5580b7984490b60f776
def update_layer(layer, title): '\n Update layers in Resource Watch back office.\n INPUT layer: layer that will be updated (string)\n title: current title of the layer (string)\n ' old_date_text = title.split(' ACLED')[0] current_date = get_date_range()[1] new_date_end = current_date.strftime('%B %d, %Y') new_date_start = get_date_range()[0] new_date_start = new_date_start.strftime('%B %d, %Y') new_date_text = ((new_date_start + ' - ') + new_date_end) layer['attributes']['name'] = layer['attributes']['name'].replace(old_date_text, new_date_text) rw_api_url_layer = 'https://api.resourcewatch.org/v1/dataset/{dataset_id}/layer/{layer_id}'.format(dataset_id=layer['attributes']['dataset'], layer_id=layer['id']) payload = {'application': ['rw'], 'name': layer['attributes']['name']} r = requests.request('PATCH', rw_api_url_layer, data=json.dumps(payload), headers=create_headers()) if (r.ok or (r.status_code == 504)): logging.info('Layer replaced: {}'.format(layer['id'])) else: logging.error('Error replacing layer: {} ({})'.format(layer['id'], r.status_code))
Update layers in Resource Watch back office. INPUT layer: layer that will be updated (string) title: current title of the layer (string)
soc_016_conflict_protest_events/contents/src/__init__.py
update_layer
resource-watch/nrt-scripts
6
python
def update_layer(layer, title): '\n Update layers in Resource Watch back office.\n INPUT layer: layer that will be updated (string)\n title: current title of the layer (string)\n ' old_date_text = title.split(' ACLED')[0] current_date = get_date_range()[1] new_date_end = current_date.strftime('%B %d, %Y') new_date_start = get_date_range()[0] new_date_start = new_date_start.strftime('%B %d, %Y') new_date_text = ((new_date_start + ' - ') + new_date_end) layer['attributes']['name'] = layer['attributes']['name'].replace(old_date_text, new_date_text) rw_api_url_layer = 'https://api.resourcewatch.org/v1/dataset/{dataset_id}/layer/{layer_id}'.format(dataset_id=layer['attributes']['dataset'], layer_id=layer['id']) payload = {'application': ['rw'], 'name': layer['attributes']['name']} r = requests.request('PATCH', rw_api_url_layer, data=json.dumps(payload), headers=create_headers()) if (r.ok or (r.status_code == 504)): logging.info('Layer replaced: {}'.format(layer['id'])) else: logging.error('Error replacing layer: {} ({})'.format(layer['id'], r.status_code))
def update_layer(layer, title): '\n Update layers in Resource Watch back office.\n INPUT layer: layer that will be updated (string)\n title: current title of the layer (string)\n ' old_date_text = title.split(' ACLED')[0] current_date = get_date_range()[1] new_date_end = current_date.strftime('%B %d, %Y') new_date_start = get_date_range()[0] new_date_start = new_date_start.strftime('%B %d, %Y') new_date_text = ((new_date_start + ' - ') + new_date_end) layer['attributes']['name'] = layer['attributes']['name'].replace(old_date_text, new_date_text) rw_api_url_layer = 'https://api.resourcewatch.org/v1/dataset/{dataset_id}/layer/{layer_id}'.format(dataset_id=layer['attributes']['dataset'], layer_id=layer['id']) payload = {'application': ['rw'], 'name': layer['attributes']['name']} r = requests.request('PATCH', rw_api_url_layer, data=json.dumps(payload), headers=create_headers()) if (r.ok or (r.status_code == 504)): logging.info('Layer replaced: {}'.format(layer['id'])) else: logging.error('Error replacing layer: {} ({})'.format(layer['id'], r.status_code))<|docstring|>Update layers in Resource Watch back office. INPUT layer: layer that will be updated (string) title: current title of the layer (string)<|endoftext|>
605182ecea80fe36f3e0985e934476e4017670b79d66aeae62b53709a1a9260f
def updateResourceWatch(num_new): "\n This function should update Resource Watch to reflect the new data.\n This may include updating the 'last update date' and updating any dates on layers\n INPUT num_new: number of new rows in Carto table (integer)\n " if (num_new > 0): most_recent_date = get_date_range()[1] lastUpdateDate(DATASET_ID, most_recent_date) logging.info('Updating {}'.format(CARTO_TABLE)) layer_dict = pull_layers_from_API(DATASET_ID) for layer in layer_dict: cur_title = layer['attributes']['name'] update_layer(layer, cur_title)
This function should update Resource Watch to reflect the new data. This may include updating the 'last update date' and updating any dates on layers INPUT num_new: number of new rows in Carto table (integer)
soc_016_conflict_protest_events/contents/src/__init__.py
updateResourceWatch
resource-watch/nrt-scripts
6
python
def updateResourceWatch(num_new): "\n This function should update Resource Watch to reflect the new data.\n This may include updating the 'last update date' and updating any dates on layers\n INPUT num_new: number of new rows in Carto table (integer)\n " if (num_new > 0): most_recent_date = get_date_range()[1] lastUpdateDate(DATASET_ID, most_recent_date) logging.info('Updating {}'.format(CARTO_TABLE)) layer_dict = pull_layers_from_API(DATASET_ID) for layer in layer_dict: cur_title = layer['attributes']['name'] update_layer(layer, cur_title)
def updateResourceWatch(num_new): "\n This function should update Resource Watch to reflect the new data.\n This may include updating the 'last update date' and updating any dates on layers\n INPUT num_new: number of new rows in Carto table (integer)\n " if (num_new > 0): most_recent_date = get_date_range()[1] lastUpdateDate(DATASET_ID, most_recent_date) logging.info('Updating {}'.format(CARTO_TABLE)) layer_dict = pull_layers_from_API(DATASET_ID) for layer in layer_dict: cur_title = layer['attributes']['name'] update_layer(layer, cur_title)<|docstring|>This function should update Resource Watch to reflect the new data. This may include updating the 'last update date' and updating any dates on layers INPUT num_new: number of new rows in Carto table (integer)<|endoftext|>
b5338e536ffd22ea2d79065d44a3dc677d8d5e9f2fd22597ccb584662580b1d6
def delete_carto_entries(id_list): '\n Delete entries in Carto table based on values in a specified column\n INPUT id_list: list of column values for which you want to delete entries in table (list of strings)\n ' where = None column = UID_FIELD for delete_id in id_list: if where: where += f" OR {column} = '{delete_id}'" else: where = f"{column} = '{delete_id}'" if ((len(where) > 15000) or (delete_id == id_list[(- 1)])): cartosql.deleteRows(CARTO_TABLE, where=where, user=CARTO_USER, key=CARTO_KEY) where = None
Delete entries in Carto table based on values in a specified column INPUT id_list: list of column values for which you want to delete entries in table (list of strings)
soc_016_conflict_protest_events/contents/src/__init__.py
delete_carto_entries
resource-watch/nrt-scripts
6
python
def delete_carto_entries(id_list): '\n Delete entries in Carto table based on values in a specified column\n INPUT id_list: list of column values for which you want to delete entries in table (list of strings)\n ' where = None column = UID_FIELD for delete_id in id_list: if where: where += f" OR {column} = '{delete_id}'" else: where = f"{column} = '{delete_id}'" if ((len(where) > 15000) or (delete_id == id_list[(- 1)])): cartosql.deleteRows(CARTO_TABLE, where=where, user=CARTO_USER, key=CARTO_KEY) where = None
def delete_carto_entries(id_list): '\n Delete entries in Carto table based on values in a specified column\n INPUT id_list: list of column values for which you want to delete entries in table (list of strings)\n ' where = None column = UID_FIELD for delete_id in id_list: if where: where += f" OR {column} = '{delete_id}'" else: where = f"{column} = '{delete_id}'" if ((len(where) > 15000) or (delete_id == id_list[(- 1)])): cartosql.deleteRows(CARTO_TABLE, where=where, user=CARTO_USER, key=CARTO_KEY) where = None<|docstring|>Delete entries in Carto table based on values in a specified column INPUT id_list: list of column values for which you want to delete entries in table (list of strings)<|endoftext|>
6041928728946cf4d8084184f2ec429bf8468df8ca50ccd54088bb586daeec52
@click.command() @click.option('--output-file', default='notebook.ipynb', help='The output notebook file') @click.option('--title', help='A title to give the Notebook') @click.argument('examples', nargs=(- 1)) def cli(examples, output_file=None, title=None): "Generate a notebook from a list of example functions.\n\n EXAMPLES are expected to be a list of files and the functions located\n within those files, for example, to inline the 'reserve_node' function\n located in 'tests/test_lease.py':\n\n generate_notebook.py tests/test_lease.py:reserve_node\n\n The outputted notebook will have a Markdown cell with the function's\n docstring, while the source of the function will be included in to a\n following code cell.\n\n Multiple examples can be provided, in which case the outputted Notebook\n file will have multiple code and markdown cells.\n " generate(examples, output_file=output_file, title=title)
Generate a notebook from a list of example functions. EXAMPLES are expected to be a list of files and the functions located within those files, for example, to inline the 'reserve_node' function located in 'tests/test_lease.py': generate_notebook.py tests/test_lease.py:reserve_node The outputted notebook will have a Markdown cell with the function's docstring, while the source of the function will be included in to a following code cell. Multiple examples can be provided, in which case the outputted Notebook file will have multiple code and markdown cells.
docs/generate_notebook.py
cli
ChameleonCloud/python-chi
0
python
@click.command() @click.option('--output-file', default='notebook.ipynb', help='The output notebook file') @click.option('--title', help='A title to give the Notebook') @click.argument('examples', nargs=(- 1)) def cli(examples, output_file=None, title=None): "Generate a notebook from a list of example functions.\n\n EXAMPLES are expected to be a list of files and the functions located\n within those files, for example, to inline the 'reserve_node' function\n located in 'tests/test_lease.py':\n\n generate_notebook.py tests/test_lease.py:reserve_node\n\n The outputted notebook will have a Markdown cell with the function's\n docstring, while the source of the function will be included in to a\n following code cell.\n\n Multiple examples can be provided, in which case the outputted Notebook\n file will have multiple code and markdown cells.\n " generate(examples, output_file=output_file, title=title)
@click.command() @click.option('--output-file', default='notebook.ipynb', help='The output notebook file') @click.option('--title', help='A title to give the Notebook') @click.argument('examples', nargs=(- 1)) def cli(examples, output_file=None, title=None): "Generate a notebook from a list of example functions.\n\n EXAMPLES are expected to be a list of files and the functions located\n within those files, for example, to inline the 'reserve_node' function\n located in 'tests/test_lease.py':\n\n generate_notebook.py tests/test_lease.py:reserve_node\n\n The outputted notebook will have a Markdown cell with the function's\n docstring, while the source of the function will be included in to a\n following code cell.\n\n Multiple examples can be provided, in which case the outputted Notebook\n file will have multiple code and markdown cells.\n " generate(examples, output_file=output_file, title=title)<|docstring|>Generate a notebook from a list of example functions. EXAMPLES are expected to be a list of files and the functions located within those files, for example, to inline the 'reserve_node' function located in 'tests/test_lease.py': generate_notebook.py tests/test_lease.py:reserve_node The outputted notebook will have a Markdown cell with the function's docstring, while the source of the function will be included in to a following code cell. Multiple examples can be provided, in which case the outputted Notebook file will have multiple code and markdown cells.<|endoftext|>
90b7f101978d112f3ef03799f970d8d17bd0c3f85cac23d4df56eab90d1806a3
def read(fname): '\n Returns path to README\n ' return open(os.path.join(os.path.dirname(__file__), fname)).read()
Returns path to README
setup.py
read
Kai-Bailey/websocket
23
python
def read(fname): '\n \n ' return open(os.path.join(os.path.dirname(__file__), fname)).read()
def read(fname): '\n \n ' return open(os.path.join(os.path.dirname(__file__), fname)).read()<|docstring|>Returns path to README<|endoftext|>
bfbf2cb383bf448941ae0abfa0c4b3139995689294bbc6a2d969d06568c0b66c
def get_file_content(self, file): "\n This method gets content of a file as a string. This method throws an error if the file doesn't exits.\n " try: if os.path.isfile(file): file_txt = Path(file).read_text() return file_txt else: raise Exception((("File doesn't exist " + file) + '. Check markupFileConfig file.')) except Exception as e: logging.error(e) sys.exit(1)
This method gets content of a file as a string. This method throws an error if the file doesn't exits.
script/MarkupFilesUtils.py
get_file_content
rajaram5/CDE-repo-markup-file-generator
0
python
def get_file_content(self, file): "\n \n " try: if os.path.isfile(file): file_txt = Path(file).read_text() return file_txt else: raise Exception((("File doesn't exist " + file) + '. Check markupFileConfig file.')) except Exception as e: logging.error(e) sys.exit(1)
def get_file_content(self, file): "\n \n " try: if os.path.isfile(file): file_txt = Path(file).read_text() return file_txt else: raise Exception((("File doesn't exist " + file) + '. Check markupFileConfig file.')) except Exception as e: logging.error(e) sys.exit(1)<|docstring|>This method gets content of a file as a string. This method throws an error if the file doesn't exits.<|endoftext|>
444ed7c7adb88359a1cb243d90e9ff6b29bb3701257725364f18259a901f636c
def check_md_support_files(self): "\n This method checks if the supporting files of markup exits. Like example RDF and ShEx files.\n The method throws an error and exit if file doesn't exits.\n " for m in self.MODULES: rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) self.get_file_content(rdf_file) shex_file = (self.BASE_PATH + m.SHEX['file-path']) self.get_file_content(shex_file) logging.info("All supporting files to generate markup's exits")
This method checks if the supporting files of markup exits. Like example RDF and ShEx files. The method throws an error and exit if file doesn't exits.
script/MarkupFilesUtils.py
check_md_support_files
rajaram5/CDE-repo-markup-file-generator
0
python
def check_md_support_files(self): "\n This method checks if the supporting files of markup exits. Like example RDF and ShEx files.\n The method throws an error and exit if file doesn't exits.\n " for m in self.MODULES: rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) self.get_file_content(rdf_file) shex_file = (self.BASE_PATH + m.SHEX['file-path']) self.get_file_content(shex_file) logging.info("All supporting files to generate markup's exits")
def check_md_support_files(self): "\n This method checks if the supporting files of markup exits. Like example RDF and ShEx files.\n The method throws an error and exit if file doesn't exits.\n " for m in self.MODULES: rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) self.get_file_content(rdf_file) shex_file = (self.BASE_PATH + m.SHEX['file-path']) self.get_file_content(shex_file) logging.info("All supporting files to generate markup's exits")<|docstring|>This method checks if the supporting files of markup exits. Like example RDF and ShEx files. The method throws an error and exit if file doesn't exits.<|endoftext|>
dba1818f58c08394a070393f740effa93e8b19683a7e2b77d6bf86584c330c95
def generate_md_files(self): "\n This method generates CDE model's repositories markup files. This methods throws an error if the file doesn't\n exits.\n " for m in self.MODULES: rdf_figure_path = (self.LINK_BASE_PATH + m.EXAMPLE_RDF['figure-file-path']) rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) rdf_txt = self.get_file_content(rdf_file) shex_figure_path = (self.LINK_BASE_PATH + m.SHEX['figure-file-path']) shex_file = (self.BASE_PATH + m.SHEX['file-path']) shex_txt = self.get_file_content(shex_file) markup_files = {} with open(m.TEMPLATE_FILE, 'r') as f: md_text = chevron.render(f, {'text': m.EXAMPLE_RDF['text'], 'semantic-model-figure-path': rdf_figure_path, 'example-rdf': rdf_txt, 'shex-figure-path': shex_figure_path, 'shex': shex_txt, 'title': m.NAME}) print(md_text) output_file = (self.OUTPUT_DIR + m.MD_FILE_NAME) markup_files[output_file] = md_text for (file_path, content) in markup_files.items(): file = open(file_path, 'w') file.write(content) file.close()
This method generates CDE model's repositories markup files. This methods throws an error if the file doesn't exits.
script/MarkupFilesUtils.py
generate_md_files
rajaram5/CDE-repo-markup-file-generator
0
python
def generate_md_files(self): "\n This method generates CDE model's repositories markup files. This methods throws an error if the file doesn't\n exits.\n " for m in self.MODULES: rdf_figure_path = (self.LINK_BASE_PATH + m.EXAMPLE_RDF['figure-file-path']) rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) rdf_txt = self.get_file_content(rdf_file) shex_figure_path = (self.LINK_BASE_PATH + m.SHEX['figure-file-path']) shex_file = (self.BASE_PATH + m.SHEX['file-path']) shex_txt = self.get_file_content(shex_file) markup_files = {} with open(m.TEMPLATE_FILE, 'r') as f: md_text = chevron.render(f, {'text': m.EXAMPLE_RDF['text'], 'semantic-model-figure-path': rdf_figure_path, 'example-rdf': rdf_txt, 'shex-figure-path': shex_figure_path, 'shex': shex_txt, 'title': m.NAME}) print(md_text) output_file = (self.OUTPUT_DIR + m.MD_FILE_NAME) markup_files[output_file] = md_text for (file_path, content) in markup_files.items(): file = open(file_path, 'w') file.write(content) file.close()
def generate_md_files(self): "\n This method generates CDE model's repositories markup files. This methods throws an error if the file doesn't\n exits.\n " for m in self.MODULES: rdf_figure_path = (self.LINK_BASE_PATH + m.EXAMPLE_RDF['figure-file-path']) rdf_file = (self.BASE_PATH + m.EXAMPLE_RDF['file-path']) rdf_txt = self.get_file_content(rdf_file) shex_figure_path = (self.LINK_BASE_PATH + m.SHEX['figure-file-path']) shex_file = (self.BASE_PATH + m.SHEX['file-path']) shex_txt = self.get_file_content(shex_file) markup_files = {} with open(m.TEMPLATE_FILE, 'r') as f: md_text = chevron.render(f, {'text': m.EXAMPLE_RDF['text'], 'semantic-model-figure-path': rdf_figure_path, 'example-rdf': rdf_txt, 'shex-figure-path': shex_figure_path, 'shex': shex_txt, 'title': m.NAME}) print(md_text) output_file = (self.OUTPUT_DIR + m.MD_FILE_NAME) markup_files[output_file] = md_text for (file_path, content) in markup_files.items(): file = open(file_path, 'w') file.write(content) file.close()<|docstring|>This method generates CDE model's repositories markup files. This methods throws an error if the file doesn't exits.<|endoftext|>
ba8627d0d3d1cb6aeb5a434d32ff2b256c6eaa99e11ae7efda764d2f20ace147
def conv3x3(in_planes, out_planes, stride=1): '3x3 convolution with padding' return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
3x3 convolution with padding
src/backbone/senet.py
conv3x3
Darshan-Ramesh/EmpRecognition
0
python
def conv3x3(in_planes, out_planes, stride=1): return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)
def conv3x3(in_planes, out_planes, stride=1): return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)<|docstring|>3x3 convolution with padding<|endoftext|>
839bb8ac4e9d046174749c6b9a52059d874dc5476d9341f16fd877103b3bfac1
def senet50(**kwargs): 'Constructs a SENet-50 model.\n ' senet = SENet(Bottleneck, [3, 4, 6, 3], **kwargs) model_dict = senet.state_dict() pretrined_weights_path = '..\\models\\pretrained_models\\senet50_scratch_weight.pkl' model_dict = load_pretrained_weights(pretrined_weights_path, model_dict) print('[INFO] Loading the pre-trained weight to the model..') senet.load_state_dict(model_dict) print('Done loading the weights!') print(senet) return senet
Constructs a SENet-50 model.
src/backbone/senet.py
senet50
Darshan-Ramesh/EmpRecognition
0
python
def senet50(**kwargs): '\n ' senet = SENet(Bottleneck, [3, 4, 6, 3], **kwargs) model_dict = senet.state_dict() pretrined_weights_path = '..\\models\\pretrained_models\\senet50_scratch_weight.pkl' model_dict = load_pretrained_weights(pretrined_weights_path, model_dict) print('[INFO] Loading the pre-trained weight to the model..') senet.load_state_dict(model_dict) print('Done loading the weights!') print(senet) return senet
def senet50(**kwargs): '\n ' senet = SENet(Bottleneck, [3, 4, 6, 3], **kwargs) model_dict = senet.state_dict() pretrined_weights_path = '..\\models\\pretrained_models\\senet50_scratch_weight.pkl' model_dict = load_pretrained_weights(pretrined_weights_path, model_dict) print('[INFO] Loading the pre-trained weight to the model..') senet.load_state_dict(model_dict) print('Done loading the weights!') print(senet) return senet<|docstring|>Constructs a SENet-50 model.<|endoftext|>
1bdb41902734d60dddde82a9022cfc159fd296d17b5c5e5f9c432dff11519fa8
def resolve(label): '\n Given a result from htseq-count, resolve label\n ' if (len(label) == 0): return 'ID' elif ('__' not in label): pieces = label.split(':') return '{}:{}'.format(pieces[2], pieces[(- 1)][:(- 1)]) elif ('no_feature' in label): return 'no_feature' elif ('not_aligned' in label): return 'not_aligned' elif ('ambiguous' in label): if ('exon' in label): if ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') gene_ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) == 2): transcript_ids = [x.split(':')[1] for x in ids] if (transcript_ids[0] == transcript_ids[1]): return 'intronexonjunction:{}'.format(gene_ids[0]) if (len(gene_ids) == 1): return 'ambiguous:{}'.format(gene_ids[0]) return 'ambiguous_intron_exon' ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'exon:{}'.format(ids[0]) elif ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'intron:{}'.format(ids[0]) else: return 'other'
Given a result from htseq-count, resolve label
map_reads/count_reads.py
resolve
kslin/rna-seq-scripts
0
python
def resolve(label): '\n \n ' if (len(label) == 0): return 'ID' elif ('__' not in label): pieces = label.split(':') return '{}:{}'.format(pieces[2], pieces[(- 1)][:(- 1)]) elif ('no_feature' in label): return 'no_feature' elif ('not_aligned' in label): return 'not_aligned' elif ('ambiguous' in label): if ('exon' in label): if ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') gene_ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) == 2): transcript_ids = [x.split(':')[1] for x in ids] if (transcript_ids[0] == transcript_ids[1]): return 'intronexonjunction:{}'.format(gene_ids[0]) if (len(gene_ids) == 1): return 'ambiguous:{}'.format(gene_ids[0]) return 'ambiguous_intron_exon' ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'exon:{}'.format(ids[0]) elif ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'intron:{}'.format(ids[0]) else: return 'other'
def resolve(label): '\n \n ' if (len(label) == 0): return 'ID' elif ('__' not in label): pieces = label.split(':') return '{}:{}'.format(pieces[2], pieces[(- 1)][:(- 1)]) elif ('no_feature' in label): return 'no_feature' elif ('not_aligned' in label): return 'not_aligned' elif ('ambiguous' in label): if ('exon' in label): if ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') gene_ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) == 2): transcript_ids = [x.split(':')[1] for x in ids] if (transcript_ids[0] == transcript_ids[1]): return 'intronexonjunction:{}'.format(gene_ids[0]) if (len(gene_ids) == 1): return 'ambiguous:{}'.format(gene_ids[0]) return 'ambiguous_intron_exon' ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'exon:{}'.format(ids[0]) elif ('intron' in label): ids = label.split('[')[1][:(- 2)].split('+') ids = list(set([x.split(':')[(- 1)] for x in ids])) if (len(ids) != 1): return 'ambiguous_mult_genes' else: return 'intron:{}'.format(ids[0]) else: return 'other'<|docstring|>Given a result from htseq-count, resolve label<|endoftext|>
7719ef2d212ad8421dfa10845c1939d6fcdfcc5b6f4aa3049c62b40cd4db1185
def running_from_ipython(): '\n Check whether we are currently running under ipython.\n \n Returns\n -------\n ipython : bool\n Whether running under ipython or not.\n ' return getattr(builtins, '__IPYTHON__', False)
Check whether we are currently running under ipython. Returns ------- ipython : bool Whether running under ipython or not.
brian2/utils/environment.py
running_from_ipython
chbehrens/brian2
1
python
def running_from_ipython(): '\n Check whether we are currently running under ipython.\n \n Returns\n -------\n ipython : bool\n Whether running under ipython or not.\n ' return getattr(builtins, '__IPYTHON__', False)
def running_from_ipython(): '\n Check whether we are currently running under ipython.\n \n Returns\n -------\n ipython : bool\n Whether running under ipython or not.\n ' return getattr(builtins, '__IPYTHON__', False)<|docstring|>Check whether we are currently running under ipython. Returns ------- ipython : bool Whether running under ipython or not.<|endoftext|>
c3580e05e0b12a9fb3f74bb49f78f8ef4e29f8b75c7f5272e5e1a8debb2a1444
def plot(self, energy_range=None, ax=None, energy_power=0, energy_unit='TeV', flux_unit='cm-2 s-1 TeV-1', **kwargs): 'Plot.\n\n ``kwargs`` are passed to ``matplotlib.pyplot.errorbar``.\n ' import matplotlib.pyplot as plt ax = (plt.gca() if (ax is None) else ax) kwargs.setdefault('color', 'black') kwargs.setdefault('alpha', 0.2) kwargs.setdefault('linewidth', 0) energy = self['energy'].to(energy_unit) flux_lo = self['flux_lo'].to(flux_unit) flux_hi = self['flux_hi'].to(flux_unit) y_lo = (flux_lo * np.power(energy, energy_power)) y_hi = (flux_hi * np.power(energy, energy_power)) eunit = [_ for _ in flux_lo.unit.bases if (_.physical_type == 'energy')][0] y_lo = y_lo.to(((eunit ** energy_power) * flux_lo.unit)) y_hi = y_hi.to(((eunit ** energy_power) * flux_hi.unit)) if (energy_range is None): energy_range = (np.min(energy), np.max(energy)) where = (((y_hi > 0) & (energy >= energy_range[0])) & (energy <= energy_range[1])) ax.fill_between(energy.value, y_lo.value, y_hi.value, where=where, **kwargs) ax.set_xlabel('Energy [{}]'.format(self['energy'].unit)) if (energy_power > 0): ax.set_ylabel('E{0} * Flux [{1}]'.format(energy_power, y_lo.unit)) else: ax.set_ylabel('Flux [{}]'.format(y_lo.unit)) ax.set_xscale('log', nonposx='clip') ax.set_yscale('log', nonposy='clip') return ax
Plot. ``kwargs`` are passed to ``matplotlib.pyplot.errorbar``.
gammapy/spectrum/butterfly.py
plot
grburgess/gammapy
3
python
def plot(self, energy_range=None, ax=None, energy_power=0, energy_unit='TeV', flux_unit='cm-2 s-1 TeV-1', **kwargs): 'Plot.\n\n ``kwargs`` are passed to ``matplotlib.pyplot.errorbar``.\n ' import matplotlib.pyplot as plt ax = (plt.gca() if (ax is None) else ax) kwargs.setdefault('color', 'black') kwargs.setdefault('alpha', 0.2) kwargs.setdefault('linewidth', 0) energy = self['energy'].to(energy_unit) flux_lo = self['flux_lo'].to(flux_unit) flux_hi = self['flux_hi'].to(flux_unit) y_lo = (flux_lo * np.power(energy, energy_power)) y_hi = (flux_hi * np.power(energy, energy_power)) eunit = [_ for _ in flux_lo.unit.bases if (_.physical_type == 'energy')][0] y_lo = y_lo.to(((eunit ** energy_power) * flux_lo.unit)) y_hi = y_hi.to(((eunit ** energy_power) * flux_hi.unit)) if (energy_range is None): energy_range = (np.min(energy), np.max(energy)) where = (((y_hi > 0) & (energy >= energy_range[0])) & (energy <= energy_range[1])) ax.fill_between(energy.value, y_lo.value, y_hi.value, where=where, **kwargs) ax.set_xlabel('Energy [{}]'.format(self['energy'].unit)) if (energy_power > 0): ax.set_ylabel('E{0} * Flux [{1}]'.format(energy_power, y_lo.unit)) else: ax.set_ylabel('Flux [{}]'.format(y_lo.unit)) ax.set_xscale('log', nonposx='clip') ax.set_yscale('log', nonposy='clip') return ax
def plot(self, energy_range=None, ax=None, energy_power=0, energy_unit='TeV', flux_unit='cm-2 s-1 TeV-1', **kwargs): 'Plot.\n\n ``kwargs`` are passed to ``matplotlib.pyplot.errorbar``.\n ' import matplotlib.pyplot as plt ax = (plt.gca() if (ax is None) else ax) kwargs.setdefault('color', 'black') kwargs.setdefault('alpha', 0.2) kwargs.setdefault('linewidth', 0) energy = self['energy'].to(energy_unit) flux_lo = self['flux_lo'].to(flux_unit) flux_hi = self['flux_hi'].to(flux_unit) y_lo = (flux_lo * np.power(energy, energy_power)) y_hi = (flux_hi * np.power(energy, energy_power)) eunit = [_ for _ in flux_lo.unit.bases if (_.physical_type == 'energy')][0] y_lo = y_lo.to(((eunit ** energy_power) * flux_lo.unit)) y_hi = y_hi.to(((eunit ** energy_power) * flux_hi.unit)) if (energy_range is None): energy_range = (np.min(energy), np.max(energy)) where = (((y_hi > 0) & (energy >= energy_range[0])) & (energy <= energy_range[1])) ax.fill_between(energy.value, y_lo.value, y_hi.value, where=where, **kwargs) ax.set_xlabel('Energy [{}]'.format(self['energy'].unit)) if (energy_power > 0): ax.set_ylabel('E{0} * Flux [{1}]'.format(energy_power, y_lo.unit)) else: ax.set_ylabel('Flux [{}]'.format(y_lo.unit)) ax.set_xscale('log', nonposx='clip') ax.set_yscale('log', nonposy='clip') return ax<|docstring|>Plot. ``kwargs`` are passed to ``matplotlib.pyplot.errorbar``.<|endoftext|>
f16becd7287bef0d05c271315b669c33ad7d19964900409ac43565b5e5d55eca
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)
Return the enum member matching `name` We use __getattr__ instead of descriptors or inserting into the enum class' __dict__ in order to support `name` and `value` being both properties for enum members (which live in the class' __dict__) and enum members themselves.
msgraph-cli-extensions/beta/financials_beta/azext_financials_beta/vendored_sdks/financials/models/_financials_enums.py
__getattr__
BrianTJackett/msgraph-cli
0
python
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)
def __getattr__(cls, name): "Return the enum member matching `name`\n We use __getattr__ instead of descriptors or inserting into the enum\n class' __dict__ in order to support `name` and `value` being both\n properties for enum members (which live in the class' __dict__) and\n enum members themselves.\n " try: return cls._member_map_[name.upper()] except KeyError: raise AttributeError(name)<|docstring|>Return the enum member matching `name` We use __getattr__ instead of descriptors or inserting into the enum class' __dict__ in order to support `name` and `value` being both properties for enum members (which live in the class' __dict__) and enum members themselves.<|endoftext|>
2e4e07267be3e06182def3721d02d2ddc8bfd16f46e78860d3bb20f5620520af
def main(args=None): '\n main function for carla simulator ROS bridge\n maintaining the communication client and the CarlaBridge object\n ' roscomp.init('bridge', args=args) carla_bridge = None carla_world = None carla_client = None executor = None parameters = {} executor = roscomp.executors.MultiThreadedExecutor() carla_bridge = CarlaRosBridge() executor.add_node(carla_bridge) roscomp.on_shutdown(carla_bridge.destroy) parameters['host'] = carla_bridge.get_param('host', 'localhost') parameters['port'] = carla_bridge.get_param('port', 2000) parameters['timeout'] = carla_bridge.get_param('timeout', 2) parameters['passive'] = carla_bridge.get_param('passive', False) parameters['synchronous_mode'] = carla_bridge.get_param('synchronous_mode', True) parameters['synchronous_mode_wait_for_vehicle_control_command'] = carla_bridge.get_param('synchronous_mode_wait_for_vehicle_control_command', False) parameters['fixed_delta_seconds'] = carla_bridge.get_param('fixed_delta_seconds', 0.05) parameters['register_all_sensors'] = carla_bridge.get_param('register_all_sensors', True) parameters['town'] = carla_bridge.get_param('town', 'Town01') role_name = carla_bridge.get_param('ego_vehicle_role_name', ['hero', 'ego_vehicle', 'hero1', 'hero2', 'hero3']) parameters['ego_vehicle'] = {'role_name': role_name} carla_bridge.loginfo('Trying to connect to {host}:{port}'.format(host=parameters['host'], port=parameters['port'])) try: carla_client = carla.Client(host=parameters['host'], port=parameters['port']) carla_client.set_timeout(parameters['timeout']) dist = pkg_resources.get_distribution('carla') if (LooseVersion(dist.version) != LooseVersion(CarlaRosBridge.CARLA_VERSION)): carla_bridge.logfatal('CARLA python module version {} required. Found: {}'.format(CarlaRosBridge.CARLA_VERSION, dist.version)) sys.exit(1) if (LooseVersion(carla_client.get_server_version()) != LooseVersion(carla_client.get_client_version())): carla_bridge.logwarn('Version mismatch detected: You are trying to connect to a simulator that might be incompatible with this API. Client API version: {}. Simulator API version: {}'.format(carla_client.get_client_version(), carla_client.get_server_version())) carla_world = carla_client.get_world() if (('town' in parameters) and (not parameters['passive'])): if parameters['town'].endswith('.xodr'): carla_bridge.loginfo("Loading opendrive world from file '{}'".format(parameters['town'])) with open(parameters['town']) as od_file: data = od_file.read() carla_world = carla_client.generate_opendrive_world(str(data)) elif (carla_world.get_map().name != parameters['town']): carla_bridge.loginfo("Loading town '{}' (previous: '{}').".format(parameters['town'], carla_world.get_map().name)) carla_world = carla_client.load_world(parameters['town']) carla_world.tick() speed_limit_prc = carla_bridge.get_param('speed_limit_percent', (- 20)) carla_bridge.loginfo('Setting speed limit percent to {}'.format(speed_limit_prc)) tm = carla_client.get_trafficmanager() tm.global_percentage_speed_difference(speed_limit_prc) carla_bridge.initialize_bridge(carla_client.get_world(), parameters) carla_bridge.spin() except (IOError, RuntimeError) as e: carla_bridge.logerr('Error: {}'.format(e)) except KeyboardInterrupt: pass finally: roscomp.shutdown() del carla_world del carla_client
main function for carla simulator ROS bridge maintaining the communication client and the CarlaBridge object
carla_ros_bridge/src/carla_ros_bridge/bridge.py
main
ibrahim-kudan/ros-bridge
0
python
def main(args=None): '\n main function for carla simulator ROS bridge\n maintaining the communication client and the CarlaBridge object\n ' roscomp.init('bridge', args=args) carla_bridge = None carla_world = None carla_client = None executor = None parameters = {} executor = roscomp.executors.MultiThreadedExecutor() carla_bridge = CarlaRosBridge() executor.add_node(carla_bridge) roscomp.on_shutdown(carla_bridge.destroy) parameters['host'] = carla_bridge.get_param('host', 'localhost') parameters['port'] = carla_bridge.get_param('port', 2000) parameters['timeout'] = carla_bridge.get_param('timeout', 2) parameters['passive'] = carla_bridge.get_param('passive', False) parameters['synchronous_mode'] = carla_bridge.get_param('synchronous_mode', True) parameters['synchronous_mode_wait_for_vehicle_control_command'] = carla_bridge.get_param('synchronous_mode_wait_for_vehicle_control_command', False) parameters['fixed_delta_seconds'] = carla_bridge.get_param('fixed_delta_seconds', 0.05) parameters['register_all_sensors'] = carla_bridge.get_param('register_all_sensors', True) parameters['town'] = carla_bridge.get_param('town', 'Town01') role_name = carla_bridge.get_param('ego_vehicle_role_name', ['hero', 'ego_vehicle', 'hero1', 'hero2', 'hero3']) parameters['ego_vehicle'] = {'role_name': role_name} carla_bridge.loginfo('Trying to connect to {host}:{port}'.format(host=parameters['host'], port=parameters['port'])) try: carla_client = carla.Client(host=parameters['host'], port=parameters['port']) carla_client.set_timeout(parameters['timeout']) dist = pkg_resources.get_distribution('carla') if (LooseVersion(dist.version) != LooseVersion(CarlaRosBridge.CARLA_VERSION)): carla_bridge.logfatal('CARLA python module version {} required. Found: {}'.format(CarlaRosBridge.CARLA_VERSION, dist.version)) sys.exit(1) if (LooseVersion(carla_client.get_server_version()) != LooseVersion(carla_client.get_client_version())): carla_bridge.logwarn('Version mismatch detected: You are trying to connect to a simulator that might be incompatible with this API. Client API version: {}. Simulator API version: {}'.format(carla_client.get_client_version(), carla_client.get_server_version())) carla_world = carla_client.get_world() if (('town' in parameters) and (not parameters['passive'])): if parameters['town'].endswith('.xodr'): carla_bridge.loginfo("Loading opendrive world from file '{}'".format(parameters['town'])) with open(parameters['town']) as od_file: data = od_file.read() carla_world = carla_client.generate_opendrive_world(str(data)) elif (carla_world.get_map().name != parameters['town']): carla_bridge.loginfo("Loading town '{}' (previous: '{}').".format(parameters['town'], carla_world.get_map().name)) carla_world = carla_client.load_world(parameters['town']) carla_world.tick() speed_limit_prc = carla_bridge.get_param('speed_limit_percent', (- 20)) carla_bridge.loginfo('Setting speed limit percent to {}'.format(speed_limit_prc)) tm = carla_client.get_trafficmanager() tm.global_percentage_speed_difference(speed_limit_prc) carla_bridge.initialize_bridge(carla_client.get_world(), parameters) carla_bridge.spin() except (IOError, RuntimeError) as e: carla_bridge.logerr('Error: {}'.format(e)) except KeyboardInterrupt: pass finally: roscomp.shutdown() del carla_world del carla_client
def main(args=None): '\n main function for carla simulator ROS bridge\n maintaining the communication client and the CarlaBridge object\n ' roscomp.init('bridge', args=args) carla_bridge = None carla_world = None carla_client = None executor = None parameters = {} executor = roscomp.executors.MultiThreadedExecutor() carla_bridge = CarlaRosBridge() executor.add_node(carla_bridge) roscomp.on_shutdown(carla_bridge.destroy) parameters['host'] = carla_bridge.get_param('host', 'localhost') parameters['port'] = carla_bridge.get_param('port', 2000) parameters['timeout'] = carla_bridge.get_param('timeout', 2) parameters['passive'] = carla_bridge.get_param('passive', False) parameters['synchronous_mode'] = carla_bridge.get_param('synchronous_mode', True) parameters['synchronous_mode_wait_for_vehicle_control_command'] = carla_bridge.get_param('synchronous_mode_wait_for_vehicle_control_command', False) parameters['fixed_delta_seconds'] = carla_bridge.get_param('fixed_delta_seconds', 0.05) parameters['register_all_sensors'] = carla_bridge.get_param('register_all_sensors', True) parameters['town'] = carla_bridge.get_param('town', 'Town01') role_name = carla_bridge.get_param('ego_vehicle_role_name', ['hero', 'ego_vehicle', 'hero1', 'hero2', 'hero3']) parameters['ego_vehicle'] = {'role_name': role_name} carla_bridge.loginfo('Trying to connect to {host}:{port}'.format(host=parameters['host'], port=parameters['port'])) try: carla_client = carla.Client(host=parameters['host'], port=parameters['port']) carla_client.set_timeout(parameters['timeout']) dist = pkg_resources.get_distribution('carla') if (LooseVersion(dist.version) != LooseVersion(CarlaRosBridge.CARLA_VERSION)): carla_bridge.logfatal('CARLA python module version {} required. Found: {}'.format(CarlaRosBridge.CARLA_VERSION, dist.version)) sys.exit(1) if (LooseVersion(carla_client.get_server_version()) != LooseVersion(carla_client.get_client_version())): carla_bridge.logwarn('Version mismatch detected: You are trying to connect to a simulator that might be incompatible with this API. Client API version: {}. Simulator API version: {}'.format(carla_client.get_client_version(), carla_client.get_server_version())) carla_world = carla_client.get_world() if (('town' in parameters) and (not parameters['passive'])): if parameters['town'].endswith('.xodr'): carla_bridge.loginfo("Loading opendrive world from file '{}'".format(parameters['town'])) with open(parameters['town']) as od_file: data = od_file.read() carla_world = carla_client.generate_opendrive_world(str(data)) elif (carla_world.get_map().name != parameters['town']): carla_bridge.loginfo("Loading town '{}' (previous: '{}').".format(parameters['town'], carla_world.get_map().name)) carla_world = carla_client.load_world(parameters['town']) carla_world.tick() speed_limit_prc = carla_bridge.get_param('speed_limit_percent', (- 20)) carla_bridge.loginfo('Setting speed limit percent to {}'.format(speed_limit_prc)) tm = carla_client.get_trafficmanager() tm.global_percentage_speed_difference(speed_limit_prc) carla_bridge.initialize_bridge(carla_client.get_world(), parameters) carla_bridge.spin() except (IOError, RuntimeError) as e: carla_bridge.logerr('Error: {}'.format(e)) except KeyboardInterrupt: pass finally: roscomp.shutdown() del carla_world del carla_client<|docstring|>main function for carla simulator ROS bridge maintaining the communication client and the CarlaBridge object<|endoftext|>
19c9dd264c49c82c0d63aedaa0e55be7bb899f5fb8859ec72efe25e9a092e3b4
def __init__(self): '\n Constructor\n\n :param carla_world: carla world object\n :type carla_world: carla.World\n :param params: dict of parameters, see settings.yaml\n :type params: dict\n ' super(CarlaRosBridge, self).__init__('ros_bridge_node')
Constructor :param carla_world: carla world object :type carla_world: carla.World :param params: dict of parameters, see settings.yaml :type params: dict
carla_ros_bridge/src/carla_ros_bridge/bridge.py
__init__
ibrahim-kudan/ros-bridge
0
python
def __init__(self): '\n Constructor\n\n :param carla_world: carla world object\n :type carla_world: carla.World\n :param params: dict of parameters, see settings.yaml\n :type params: dict\n ' super(CarlaRosBridge, self).__init__('ros_bridge_node')
def __init__(self): '\n Constructor\n\n :param carla_world: carla world object\n :type carla_world: carla.World\n :param params: dict of parameters, see settings.yaml\n :type params: dict\n ' super(CarlaRosBridge, self).__init__('ros_bridge_node')<|docstring|>Constructor :param carla_world: carla world object :type carla_world: carla.World :param params: dict of parameters, see settings.yaml :type params: dict<|endoftext|>
e26d71464426fa81f52252377eb9882a154bedc94176b14707cc9cdbcb0f2224
def initialize_bridge(self, carla_world, params): '\n Initialize the bridge\n ' self.parameters = params self.carla_world = carla_world self.ros_timestamp = roscomp.ros_timestamp() self.callback_group = roscomp.callback_groups.ReentrantCallbackGroup() self.synchronous_mode_update_thread = None self.shutdown = Event() self.carla_settings = carla_world.get_settings() if (not self.parameters['passive']): if self.carla_settings.synchronous_mode: self.carla_settings.synchronous_mode = False carla_world.apply_settings(self.carla_settings) self.loginfo('synchronous_mode: {}'.format(self.parameters['synchronous_mode'])) self.carla_settings.synchronous_mode = self.parameters['synchronous_mode'] self.loginfo('fixed_delta_seconds: {}'.format(self.parameters['fixed_delta_seconds'])) self.carla_settings.fixed_delta_seconds = self.parameters['fixed_delta_seconds'] carla_world.apply_settings(self.carla_settings) self.loginfo('Parameters:') for key in self.parameters: self.loginfo(' {}: {}'.format(key, self.parameters[key])) self.sync_mode = (self.carla_settings.synchronous_mode and (not self.parameters['passive'])) if (self.carla_settings.synchronous_mode and self.parameters['passive']): self.loginfo('Passive mode is enabled and CARLA world is configured in synchronous mode. This configuration requires another client ticking the CARLA world.') self.carla_control_queue = queue.Queue() self.actor_factory = ActorFactory(self, carla_world, self.sync_mode) self.world_info = WorldInfo(carla_world=self.carla_world, node=self) self.debug_helper = DebugHelper(carla_world.debug, self) self.clock_publisher = self.new_publisher(Clock, 'clock', 10) self.status_publisher = CarlaStatusPublisher(self.carla_settings.synchronous_mode, self.carla_settings.fixed_delta_seconds, self) self._all_vehicle_control_commands_received = Event() self._expected_ego_vehicle_control_command_ids = [] self._expected_ego_vehicle_control_command_ids_lock = Lock() if self.sync_mode: self.carla_run_state = CarlaControl.PLAY self.carla_control_subscriber = self.new_subscription(CarlaControl, '/carla/control', (lambda control: self.carla_control_queue.put(control.command)), qos_profile=10, callback_group=self.callback_group) self.synchronous_mode_update_thread = Thread(target=self._synchronous_mode_update) self.synchronous_mode_update_thread.start() else: self.timestamp_last_run = 0.0 self.actor_factory.start() self.on_tick_id = self.carla_world.on_tick(self._carla_time_tick) self._registered_actors = [] self.spawn_object_service = self.new_service(SpawnObject, '/carla/spawn_object', self.spawn_object) self.destroy_object_service = self.new_service(DestroyObject, '/carla/destroy_object', self.destroy_object) self.get_blueprints_service = self.new_service(GetBlueprints, '/carla/get_blueprints', self.get_blueprints, callback_group=self.callback_group) self.carla_weather_subscriber = self.new_subscription(CarlaWeatherParameters, '/carla/weather_control', self.on_weather_changed, qos_profile=10, callback_group=self.callback_group)
Initialize the bridge
carla_ros_bridge/src/carla_ros_bridge/bridge.py
initialize_bridge
ibrahim-kudan/ros-bridge
0
python
def initialize_bridge(self, carla_world, params): '\n \n ' self.parameters = params self.carla_world = carla_world self.ros_timestamp = roscomp.ros_timestamp() self.callback_group = roscomp.callback_groups.ReentrantCallbackGroup() self.synchronous_mode_update_thread = None self.shutdown = Event() self.carla_settings = carla_world.get_settings() if (not self.parameters['passive']): if self.carla_settings.synchronous_mode: self.carla_settings.synchronous_mode = False carla_world.apply_settings(self.carla_settings) self.loginfo('synchronous_mode: {}'.format(self.parameters['synchronous_mode'])) self.carla_settings.synchronous_mode = self.parameters['synchronous_mode'] self.loginfo('fixed_delta_seconds: {}'.format(self.parameters['fixed_delta_seconds'])) self.carla_settings.fixed_delta_seconds = self.parameters['fixed_delta_seconds'] carla_world.apply_settings(self.carla_settings) self.loginfo('Parameters:') for key in self.parameters: self.loginfo(' {}: {}'.format(key, self.parameters[key])) self.sync_mode = (self.carla_settings.synchronous_mode and (not self.parameters['passive'])) if (self.carla_settings.synchronous_mode and self.parameters['passive']): self.loginfo('Passive mode is enabled and CARLA world is configured in synchronous mode. This configuration requires another client ticking the CARLA world.') self.carla_control_queue = queue.Queue() self.actor_factory = ActorFactory(self, carla_world, self.sync_mode) self.world_info = WorldInfo(carla_world=self.carla_world, node=self) self.debug_helper = DebugHelper(carla_world.debug, self) self.clock_publisher = self.new_publisher(Clock, 'clock', 10) self.status_publisher = CarlaStatusPublisher(self.carla_settings.synchronous_mode, self.carla_settings.fixed_delta_seconds, self) self._all_vehicle_control_commands_received = Event() self._expected_ego_vehicle_control_command_ids = [] self._expected_ego_vehicle_control_command_ids_lock = Lock() if self.sync_mode: self.carla_run_state = CarlaControl.PLAY self.carla_control_subscriber = self.new_subscription(CarlaControl, '/carla/control', (lambda control: self.carla_control_queue.put(control.command)), qos_profile=10, callback_group=self.callback_group) self.synchronous_mode_update_thread = Thread(target=self._synchronous_mode_update) self.synchronous_mode_update_thread.start() else: self.timestamp_last_run = 0.0 self.actor_factory.start() self.on_tick_id = self.carla_world.on_tick(self._carla_time_tick) self._registered_actors = [] self.spawn_object_service = self.new_service(SpawnObject, '/carla/spawn_object', self.spawn_object) self.destroy_object_service = self.new_service(DestroyObject, '/carla/destroy_object', self.destroy_object) self.get_blueprints_service = self.new_service(GetBlueprints, '/carla/get_blueprints', self.get_blueprints, callback_group=self.callback_group) self.carla_weather_subscriber = self.new_subscription(CarlaWeatherParameters, '/carla/weather_control', self.on_weather_changed, qos_profile=10, callback_group=self.callback_group)
def initialize_bridge(self, carla_world, params): '\n \n ' self.parameters = params self.carla_world = carla_world self.ros_timestamp = roscomp.ros_timestamp() self.callback_group = roscomp.callback_groups.ReentrantCallbackGroup() self.synchronous_mode_update_thread = None self.shutdown = Event() self.carla_settings = carla_world.get_settings() if (not self.parameters['passive']): if self.carla_settings.synchronous_mode: self.carla_settings.synchronous_mode = False carla_world.apply_settings(self.carla_settings) self.loginfo('synchronous_mode: {}'.format(self.parameters['synchronous_mode'])) self.carla_settings.synchronous_mode = self.parameters['synchronous_mode'] self.loginfo('fixed_delta_seconds: {}'.format(self.parameters['fixed_delta_seconds'])) self.carla_settings.fixed_delta_seconds = self.parameters['fixed_delta_seconds'] carla_world.apply_settings(self.carla_settings) self.loginfo('Parameters:') for key in self.parameters: self.loginfo(' {}: {}'.format(key, self.parameters[key])) self.sync_mode = (self.carla_settings.synchronous_mode and (not self.parameters['passive'])) if (self.carla_settings.synchronous_mode and self.parameters['passive']): self.loginfo('Passive mode is enabled and CARLA world is configured in synchronous mode. This configuration requires another client ticking the CARLA world.') self.carla_control_queue = queue.Queue() self.actor_factory = ActorFactory(self, carla_world, self.sync_mode) self.world_info = WorldInfo(carla_world=self.carla_world, node=self) self.debug_helper = DebugHelper(carla_world.debug, self) self.clock_publisher = self.new_publisher(Clock, 'clock', 10) self.status_publisher = CarlaStatusPublisher(self.carla_settings.synchronous_mode, self.carla_settings.fixed_delta_seconds, self) self._all_vehicle_control_commands_received = Event() self._expected_ego_vehicle_control_command_ids = [] self._expected_ego_vehicle_control_command_ids_lock = Lock() if self.sync_mode: self.carla_run_state = CarlaControl.PLAY self.carla_control_subscriber = self.new_subscription(CarlaControl, '/carla/control', (lambda control: self.carla_control_queue.put(control.command)), qos_profile=10, callback_group=self.callback_group) self.synchronous_mode_update_thread = Thread(target=self._synchronous_mode_update) self.synchronous_mode_update_thread.start() else: self.timestamp_last_run = 0.0 self.actor_factory.start() self.on_tick_id = self.carla_world.on_tick(self._carla_time_tick) self._registered_actors = [] self.spawn_object_service = self.new_service(SpawnObject, '/carla/spawn_object', self.spawn_object) self.destroy_object_service = self.new_service(DestroyObject, '/carla/destroy_object', self.destroy_object) self.get_blueprints_service = self.new_service(GetBlueprints, '/carla/get_blueprints', self.get_blueprints, callback_group=self.callback_group) self.carla_weather_subscriber = self.new_subscription(CarlaWeatherParameters, '/carla/weather_control', self.on_weather_changed, qos_profile=10, callback_group=self.callback_group)<|docstring|>Initialize the bridge<|endoftext|>
190639a62bc5b1e757592eccdb60dad13cfdbd68173a01e7e5c274fb8254eda4
def on_weather_changed(self, weather_parameters): '\n Callback on new weather parameters\n :return:\n ' if (not self.carla_world): return self.loginfo('Applying weather parameters...') weather = carla.WeatherParameters() weather.cloudiness = weather_parameters.cloudiness weather.precipitation = weather_parameters.precipitation weather.precipitation_deposits = weather_parameters.precipitation_deposits weather.wind_intensity = weather_parameters.wind_intensity weather.fog_density = weather_parameters.fog_density weather.fog_distance = weather_parameters.fog_distance weather.wetness = weather_parameters.wetness weather.sun_azimuth_angle = weather_parameters.sun_azimuth_angle weather.sun_altitude_angle = weather_parameters.sun_altitude_angle self.carla_world.set_weather(weather)
Callback on new weather parameters :return:
carla_ros_bridge/src/carla_ros_bridge/bridge.py
on_weather_changed
ibrahim-kudan/ros-bridge
0
python
def on_weather_changed(self, weather_parameters): '\n Callback on new weather parameters\n :return:\n ' if (not self.carla_world): return self.loginfo('Applying weather parameters...') weather = carla.WeatherParameters() weather.cloudiness = weather_parameters.cloudiness weather.precipitation = weather_parameters.precipitation weather.precipitation_deposits = weather_parameters.precipitation_deposits weather.wind_intensity = weather_parameters.wind_intensity weather.fog_density = weather_parameters.fog_density weather.fog_distance = weather_parameters.fog_distance weather.wetness = weather_parameters.wetness weather.sun_azimuth_angle = weather_parameters.sun_azimuth_angle weather.sun_altitude_angle = weather_parameters.sun_altitude_angle self.carla_world.set_weather(weather)
def on_weather_changed(self, weather_parameters): '\n Callback on new weather parameters\n :return:\n ' if (not self.carla_world): return self.loginfo('Applying weather parameters...') weather = carla.WeatherParameters() weather.cloudiness = weather_parameters.cloudiness weather.precipitation = weather_parameters.precipitation weather.precipitation_deposits = weather_parameters.precipitation_deposits weather.wind_intensity = weather_parameters.wind_intensity weather.fog_density = weather_parameters.fog_density weather.fog_distance = weather_parameters.fog_distance weather.wetness = weather_parameters.wetness weather.sun_azimuth_angle = weather_parameters.sun_azimuth_angle weather.sun_altitude_angle = weather_parameters.sun_altitude_angle self.carla_world.set_weather(weather)<|docstring|>Callback on new weather parameters :return:<|endoftext|>
184da00ffd1f926bb3beb9459974c1d1f4f896ac1a588f88c80611bb80a7fb95
def process_run_state(self): '\n process state changes\n ' command = None while (not self.carla_control_queue.empty()): command = self.carla_control_queue.get() while ((command is not None) and roscomp.ok()): self.carla_run_state = command if (self.carla_run_state == CarlaControl.PAUSE): self.loginfo('State set to PAUSED') self.status_publisher.set_synchronous_mode_running(False) command = self.carla_control_queue.get() elif (self.carla_run_state == CarlaControl.PLAY): self.loginfo('State set to PLAY') self.status_publisher.set_synchronous_mode_running(True) return elif (self.carla_run_state == CarlaControl.STEP_ONCE): self.loginfo('Execute single step.') self.status_publisher.set_synchronous_mode_running(True) self.carla_control_queue.put(CarlaControl.PAUSE) return
process state changes
carla_ros_bridge/src/carla_ros_bridge/bridge.py
process_run_state
ibrahim-kudan/ros-bridge
0
python
def process_run_state(self): '\n \n ' command = None while (not self.carla_control_queue.empty()): command = self.carla_control_queue.get() while ((command is not None) and roscomp.ok()): self.carla_run_state = command if (self.carla_run_state == CarlaControl.PAUSE): self.loginfo('State set to PAUSED') self.status_publisher.set_synchronous_mode_running(False) command = self.carla_control_queue.get() elif (self.carla_run_state == CarlaControl.PLAY): self.loginfo('State set to PLAY') self.status_publisher.set_synchronous_mode_running(True) return elif (self.carla_run_state == CarlaControl.STEP_ONCE): self.loginfo('Execute single step.') self.status_publisher.set_synchronous_mode_running(True) self.carla_control_queue.put(CarlaControl.PAUSE) return
def process_run_state(self): '\n \n ' command = None while (not self.carla_control_queue.empty()): command = self.carla_control_queue.get() while ((command is not None) and roscomp.ok()): self.carla_run_state = command if (self.carla_run_state == CarlaControl.PAUSE): self.loginfo('State set to PAUSED') self.status_publisher.set_synchronous_mode_running(False) command = self.carla_control_queue.get() elif (self.carla_run_state == CarlaControl.PLAY): self.loginfo('State set to PLAY') self.status_publisher.set_synchronous_mode_running(True) return elif (self.carla_run_state == CarlaControl.STEP_ONCE): self.loginfo('Execute single step.') self.status_publisher.set_synchronous_mode_running(True) self.carla_control_queue.put(CarlaControl.PAUSE) return<|docstring|>process state changes<|endoftext|>
cbc507c4293194610c53e84168a48a76e25ef626f84eaeb19f5b7a54a2a20adc
def _synchronous_mode_update(self): '\n execution loop for synchronous mode\n ' while ((not self.shutdown.is_set()) and roscomp.ok()): self.process_run_state() if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: self._expected_ego_vehicle_control_command_ids = [] with self._expected_ego_vehicle_control_command_ids_lock: for (actor_id, actor) in self.actor_factory.actors.items(): if isinstance(actor, EgoVehicle): self._expected_ego_vehicle_control_command_ids.append(actor_id) self.actor_factory.update_available_objects() frame = self.carla_world.tick() world_snapshot = self.carla_world.get_snapshot() self.status_publisher.set_frame(frame) self.update_clock(world_snapshot.timestamp) self.logdebug('Tick for frame {} returned. Waiting for sensor data...'.format(frame)) self._update(frame, world_snapshot.timestamp.elapsed_seconds) self.logdebug('Waiting for sensor data finished.') if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: if self._expected_ego_vehicle_control_command_ids: if (not self._all_vehicle_control_commands_received.wait(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT)): self.logwarn('Timeout ({}s) while waiting for vehicle control commands. Missing command from actor ids {}'.format(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT, self._expected_ego_vehicle_control_command_ids)) self._all_vehicle_control_commands_received.clear()
execution loop for synchronous mode
carla_ros_bridge/src/carla_ros_bridge/bridge.py
_synchronous_mode_update
ibrahim-kudan/ros-bridge
0
python
def _synchronous_mode_update(self): '\n \n ' while ((not self.shutdown.is_set()) and roscomp.ok()): self.process_run_state() if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: self._expected_ego_vehicle_control_command_ids = [] with self._expected_ego_vehicle_control_command_ids_lock: for (actor_id, actor) in self.actor_factory.actors.items(): if isinstance(actor, EgoVehicle): self._expected_ego_vehicle_control_command_ids.append(actor_id) self.actor_factory.update_available_objects() frame = self.carla_world.tick() world_snapshot = self.carla_world.get_snapshot() self.status_publisher.set_frame(frame) self.update_clock(world_snapshot.timestamp) self.logdebug('Tick for frame {} returned. Waiting for sensor data...'.format(frame)) self._update(frame, world_snapshot.timestamp.elapsed_seconds) self.logdebug('Waiting for sensor data finished.') if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: if self._expected_ego_vehicle_control_command_ids: if (not self._all_vehicle_control_commands_received.wait(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT)): self.logwarn('Timeout ({}s) while waiting for vehicle control commands. Missing command from actor ids {}'.format(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT, self._expected_ego_vehicle_control_command_ids)) self._all_vehicle_control_commands_received.clear()
def _synchronous_mode_update(self): '\n \n ' while ((not self.shutdown.is_set()) and roscomp.ok()): self.process_run_state() if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: self._expected_ego_vehicle_control_command_ids = [] with self._expected_ego_vehicle_control_command_ids_lock: for (actor_id, actor) in self.actor_factory.actors.items(): if isinstance(actor, EgoVehicle): self._expected_ego_vehicle_control_command_ids.append(actor_id) self.actor_factory.update_available_objects() frame = self.carla_world.tick() world_snapshot = self.carla_world.get_snapshot() self.status_publisher.set_frame(frame) self.update_clock(world_snapshot.timestamp) self.logdebug('Tick for frame {} returned. Waiting for sensor data...'.format(frame)) self._update(frame, world_snapshot.timestamp.elapsed_seconds) self.logdebug('Waiting for sensor data finished.') if self.parameters['synchronous_mode_wait_for_vehicle_control_command']: if self._expected_ego_vehicle_control_command_ids: if (not self._all_vehicle_control_commands_received.wait(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT)): self.logwarn('Timeout ({}s) while waiting for vehicle control commands. Missing command from actor ids {}'.format(CarlaRosBridge.VEHICLE_CONTROL_TIMEOUT, self._expected_ego_vehicle_control_command_ids)) self._all_vehicle_control_commands_received.clear()<|docstring|>execution loop for synchronous mode<|endoftext|>
2309db9c8784416ce4cd779e15c52fe067f224fef36ea1b9915145dc558c3ed8
def _carla_time_tick(self, carla_snapshot): '\n Private callback registered at carla.World.on_tick()\n to trigger cyclic updates.\n\n After successful locking the update mutex\n (only perform trylock to respect bridge processing time)\n the clock and the children are updated.\n Finally the ROS messages collected to be published are sent out.\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if (not self.shutdown.is_set()): if (self.timestamp_last_run < carla_snapshot.timestamp.elapsed_seconds): self.timestamp_last_run = carla_snapshot.timestamp.elapsed_seconds self.update_clock(carla_snapshot.timestamp) self.status_publisher.set_frame(carla_snapshot.frame) self._update(carla_snapshot.frame, carla_snapshot.timestamp.elapsed_seconds)
Private callback registered at carla.World.on_tick() to trigger cyclic updates. After successful locking the update mutex (only perform trylock to respect bridge processing time) the clock and the children are updated. Finally the ROS messages collected to be published are sent out. :param carla_timestamp: the current carla time :type carla_timestamp: carla.Timestamp :return:
carla_ros_bridge/src/carla_ros_bridge/bridge.py
_carla_time_tick
ibrahim-kudan/ros-bridge
0
python
def _carla_time_tick(self, carla_snapshot): '\n Private callback registered at carla.World.on_tick()\n to trigger cyclic updates.\n\n After successful locking the update mutex\n (only perform trylock to respect bridge processing time)\n the clock and the children are updated.\n Finally the ROS messages collected to be published are sent out.\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if (not self.shutdown.is_set()): if (self.timestamp_last_run < carla_snapshot.timestamp.elapsed_seconds): self.timestamp_last_run = carla_snapshot.timestamp.elapsed_seconds self.update_clock(carla_snapshot.timestamp) self.status_publisher.set_frame(carla_snapshot.frame) self._update(carla_snapshot.frame, carla_snapshot.timestamp.elapsed_seconds)
def _carla_time_tick(self, carla_snapshot): '\n Private callback registered at carla.World.on_tick()\n to trigger cyclic updates.\n\n After successful locking the update mutex\n (only perform trylock to respect bridge processing time)\n the clock and the children are updated.\n Finally the ROS messages collected to be published are sent out.\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if (not self.shutdown.is_set()): if (self.timestamp_last_run < carla_snapshot.timestamp.elapsed_seconds): self.timestamp_last_run = carla_snapshot.timestamp.elapsed_seconds self.update_clock(carla_snapshot.timestamp) self.status_publisher.set_frame(carla_snapshot.frame) self._update(carla_snapshot.frame, carla_snapshot.timestamp.elapsed_seconds)<|docstring|>Private callback registered at carla.World.on_tick() to trigger cyclic updates. After successful locking the update mutex (only perform trylock to respect bridge processing time) the clock and the children are updated. Finally the ROS messages collected to be published are sent out. :param carla_timestamp: the current carla time :type carla_timestamp: carla.Timestamp :return:<|endoftext|>
eddea590a2ad6fd41717e4b65d8338823eff623e6e80c13ca907db1af1d76e70
def _update(self, frame_id, timestamp): '\n update all actors\n :return:\n ' self.world_info.update(frame_id, timestamp) self.actor_factory.update_actor_states(frame_id, timestamp)
update all actors :return:
carla_ros_bridge/src/carla_ros_bridge/bridge.py
_update
ibrahim-kudan/ros-bridge
0
python
def _update(self, frame_id, timestamp): '\n update all actors\n :return:\n ' self.world_info.update(frame_id, timestamp) self.actor_factory.update_actor_states(frame_id, timestamp)
def _update(self, frame_id, timestamp): '\n update all actors\n :return:\n ' self.world_info.update(frame_id, timestamp) self.actor_factory.update_actor_states(frame_id, timestamp)<|docstring|>update all actors :return:<|endoftext|>
276cc1bc27960aa6ba3d463dd56f37095664bd43874776ff77551cbf97fa541a
def update_clock(self, carla_timestamp): '\n perform the update of the clock\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if roscomp.ok(): self.ros_timestamp = roscomp.ros_timestamp(carla_timestamp.elapsed_seconds, from_sec=True) self.clock_publisher.publish(Clock(clock=self.ros_timestamp))
perform the update of the clock :param carla_timestamp: the current carla time :type carla_timestamp: carla.Timestamp :return:
carla_ros_bridge/src/carla_ros_bridge/bridge.py
update_clock
ibrahim-kudan/ros-bridge
0
python
def update_clock(self, carla_timestamp): '\n perform the update of the clock\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if roscomp.ok(): self.ros_timestamp = roscomp.ros_timestamp(carla_timestamp.elapsed_seconds, from_sec=True) self.clock_publisher.publish(Clock(clock=self.ros_timestamp))
def update_clock(self, carla_timestamp): '\n perform the update of the clock\n\n :param carla_timestamp: the current carla time\n :type carla_timestamp: carla.Timestamp\n :return:\n ' if roscomp.ok(): self.ros_timestamp = roscomp.ros_timestamp(carla_timestamp.elapsed_seconds, from_sec=True) self.clock_publisher.publish(Clock(clock=self.ros_timestamp))<|docstring|>perform the update of the clock :param carla_timestamp: the current carla time :type carla_timestamp: carla.Timestamp :return:<|endoftext|>
0aba805613aa67f76e0b410833976d08b614765cc18afda1b1cfa8e3dc6e9d0a
def destroy(self): '\n Function to destroy this object.\n\n :return:\n ' self.loginfo('Shutting down...') self.shutdown.set() if (not self.sync_mode): if self.on_tick_id: self.carla_world.remove_on_tick(self.on_tick_id) self.actor_factory.thread.join() else: self.synchronous_mode_update_thread.join() self.loginfo('Object update finished.') self.debug_helper.destroy() self.status_publisher.destroy() self.destroy_service(self.spawn_object_service) self.destroy_service(self.destroy_object_service) self.destroy_subscription(self.carla_weather_subscriber) self.carla_control_queue.put(CarlaControl.STEP_ONCE) for uid in self._registered_actors: self.actor_factory.destroy_actor(uid) self.actor_factory.update_available_objects() self.actor_factory.clear() super(CarlaRosBridge, self).destroy()
Function to destroy this object. :return:
carla_ros_bridge/src/carla_ros_bridge/bridge.py
destroy
ibrahim-kudan/ros-bridge
0
python
def destroy(self): '\n Function to destroy this object.\n\n :return:\n ' self.loginfo('Shutting down...') self.shutdown.set() if (not self.sync_mode): if self.on_tick_id: self.carla_world.remove_on_tick(self.on_tick_id) self.actor_factory.thread.join() else: self.synchronous_mode_update_thread.join() self.loginfo('Object update finished.') self.debug_helper.destroy() self.status_publisher.destroy() self.destroy_service(self.spawn_object_service) self.destroy_service(self.destroy_object_service) self.destroy_subscription(self.carla_weather_subscriber) self.carla_control_queue.put(CarlaControl.STEP_ONCE) for uid in self._registered_actors: self.actor_factory.destroy_actor(uid) self.actor_factory.update_available_objects() self.actor_factory.clear() super(CarlaRosBridge, self).destroy()
def destroy(self): '\n Function to destroy this object.\n\n :return:\n ' self.loginfo('Shutting down...') self.shutdown.set() if (not self.sync_mode): if self.on_tick_id: self.carla_world.remove_on_tick(self.on_tick_id) self.actor_factory.thread.join() else: self.synchronous_mode_update_thread.join() self.loginfo('Object update finished.') self.debug_helper.destroy() self.status_publisher.destroy() self.destroy_service(self.spawn_object_service) self.destroy_service(self.destroy_object_service) self.destroy_subscription(self.carla_weather_subscriber) self.carla_control_queue.put(CarlaControl.STEP_ONCE) for uid in self._registered_actors: self.actor_factory.destroy_actor(uid) self.actor_factory.update_available_objects() self.actor_factory.clear() super(CarlaRosBridge, self).destroy()<|docstring|>Function to destroy this object. :return:<|endoftext|>
0a7fd057556edb541bcbfe0db39254d2e7454302f6165caf13a7e981ea0ce3a7
def __init__(self, huber_loss, n_heads, **kwargs): '\n Args:\n obs_shape: list. Shape of the observation tensor\n n_actions: int. Number of possible actions\n opt_conf: rltf.optimizers.OptimizerConf. Configuration for the optimizer\n gamma: float. Discount factor\n huber_loss: bool. Whether to use huber loss or not\n n_heads: Number of bootstrap heads\n ' super().__init__(**kwargs) self.huber_loss = huber_loss self.n_heads = n_heads self._conv_out = None
Args: obs_shape: list. Shape of the observation tensor n_actions: int. Number of possible actions opt_conf: rltf.optimizers.OptimizerConf. Configuration for the optimizer gamma: float. Discount factor huber_loss: bool. Whether to use huber loss or not n_heads: Number of bootstrap heads
rltf/models/bstrap_dqn.py
__init__
nikonikolov/rltf
90
python
def __init__(self, huber_loss, n_heads, **kwargs): '\n Args:\n obs_shape: list. Shape of the observation tensor\n n_actions: int. Number of possible actions\n opt_conf: rltf.optimizers.OptimizerConf. Configuration for the optimizer\n gamma: float. Discount factor\n huber_loss: bool. Whether to use huber loss or not\n n_heads: Number of bootstrap heads\n ' super().__init__(**kwargs) self.huber_loss = huber_loss self.n_heads = n_heads self._conv_out = None
def __init__(self, huber_loss, n_heads, **kwargs): '\n Args:\n obs_shape: list. Shape of the observation tensor\n n_actions: int. Number of possible actions\n opt_conf: rltf.optimizers.OptimizerConf. Configuration for the optimizer\n gamma: float. Discount factor\n huber_loss: bool. Whether to use huber loss or not\n n_heads: Number of bootstrap heads\n ' super().__init__(**kwargs) self.huber_loss = huber_loss self.n_heads = n_heads self._conv_out = None<|docstring|>Args: obs_shape: list. Shape of the observation tensor n_actions: int. Number of possible actions opt_conf: rltf.optimizers.OptimizerConf. Configuration for the optimizer gamma: float. Discount factor huber_loss: bool. Whether to use huber loss or not n_heads: Number of bootstrap heads<|endoftext|>
4cb11874912864fb2fcb6a98b8436ad276bac9c1f5d411db57b9824dddaeba9e
def _conv_nn(self, x): ' Build the Bootstrapped DQN architecture - as described in the original paper\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, n_heads, n_actions]`. Contains the Q-function for each action\n ' n_actions = self.n_actions def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x with tf.variable_scope('conv_net'): x = tf.layers.conv2d(x, filters=32, kernel_size=8, strides=4, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=4, strides=2, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=3, strides=1, padding='SAME', activation=tf.nn.relu) x = tf.layers.flatten(x) if (('agent_net' in tf.get_variable_scope().name) and (self._conv_out is None)): self._conv_out = x with tf.variable_scope('action_value'): heads = [build_head(x) for _ in range(self.n_heads)] x = tf.concat(heads, axis=(- 2)) return x
Build the Bootstrapped DQN architecture - as described in the original paper Args: x: tf.Tensor. Tensor for the input Returns: `tf.Tensor` of shape `[batch_size, n_heads, n_actions]`. Contains the Q-function for each action
rltf/models/bstrap_dqn.py
_conv_nn
nikonikolov/rltf
90
python
def _conv_nn(self, x): ' Build the Bootstrapped DQN architecture - as described in the original paper\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, n_heads, n_actions]`. Contains the Q-function for each action\n ' n_actions = self.n_actions def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x with tf.variable_scope('conv_net'): x = tf.layers.conv2d(x, filters=32, kernel_size=8, strides=4, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=4, strides=2, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=3, strides=1, padding='SAME', activation=tf.nn.relu) x = tf.layers.flatten(x) if (('agent_net' in tf.get_variable_scope().name) and (self._conv_out is None)): self._conv_out = x with tf.variable_scope('action_value'): heads = [build_head(x) for _ in range(self.n_heads)] x = tf.concat(heads, axis=(- 2)) return x
def _conv_nn(self, x): ' Build the Bootstrapped DQN architecture - as described in the original paper\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, n_heads, n_actions]`. Contains the Q-function for each action\n ' n_actions = self.n_actions def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x with tf.variable_scope('conv_net'): x = tf.layers.conv2d(x, filters=32, kernel_size=8, strides=4, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=4, strides=2, padding='SAME', activation=tf.nn.relu) x = tf.layers.conv2d(x, filters=64, kernel_size=3, strides=1, padding='SAME', activation=tf.nn.relu) x = tf.layers.flatten(x) if (('agent_net' in tf.get_variable_scope().name) and (self._conv_out is None)): self._conv_out = x with tf.variable_scope('action_value'): heads = [build_head(x) for _ in range(self.n_heads)] x = tf.concat(heads, axis=(- 2)) return x<|docstring|>Build the Bootstrapped DQN architecture - as described in the original paper Args: x: tf.Tensor. Tensor for the input Returns: `tf.Tensor` of shape `[batch_size, n_heads, n_actions]`. Contains the Q-function for each action<|endoftext|>
beac557bcd8de37f8a69ade804385b3c7c3368ca7305122deee619a12fc2917c
def _compute_estimate(self, agent_net): 'Get the Q value for the selected action\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' a_mask = tf.one_hot(self.act_t_ph, self.n_actions, dtype=tf.float32) a_mask = tf.tile(tf.expand_dims(a_mask, axis=(- 2)), [1, self.n_heads, 1]) qf = tf.reduce_sum((agent_net * a_mask), axis=(- 1)) return qf
Get the Q value for the selected action Returns: `tf.Tensor` of shape `[None, n_heads]`
rltf/models/bstrap_dqn.py
_compute_estimate
nikonikolov/rltf
90
python
def _compute_estimate(self, agent_net): 'Get the Q value for the selected action\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' a_mask = tf.one_hot(self.act_t_ph, self.n_actions, dtype=tf.float32) a_mask = tf.tile(tf.expand_dims(a_mask, axis=(- 2)), [1, self.n_heads, 1]) qf = tf.reduce_sum((agent_net * a_mask), axis=(- 1)) return qf
def _compute_estimate(self, agent_net): 'Get the Q value for the selected action\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' a_mask = tf.one_hot(self.act_t_ph, self.n_actions, dtype=tf.float32) a_mask = tf.tile(tf.expand_dims(a_mask, axis=(- 2)), [1, self.n_heads, 1]) qf = tf.reduce_sum((agent_net * a_mask), axis=(- 1)) return qf<|docstring|>Get the Q value for the selected action Returns: `tf.Tensor` of shape `[None, n_heads]`<|endoftext|>
ea575dafadd7a051f481f805c3d4b2f13de295abdaeb71a3c81a07943cb88749
def _select_target(self, target_net): 'Select the Double DQN target\n Args:\n target_net: `tf.Tensor`. shape `[None, n_heads, n_actions]. The output from `self._nn_model()`\n for the target\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' n_actions = self.n_actions agent_net = self._nn_model(self.obs_tp1, scope='agent_net') target_act = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) target_mask = tf.one_hot(target_act, n_actions, dtype=tf.float32) target_q = tf.reduce_sum((target_net * target_mask), axis=(- 1)) return target_q
Select the Double DQN target Args: target_net: `tf.Tensor`. shape `[None, n_heads, n_actions]. The output from `self._nn_model()` for the target Returns: `tf.Tensor` of shape `[None, n_heads]`
rltf/models/bstrap_dqn.py
_select_target
nikonikolov/rltf
90
python
def _select_target(self, target_net): 'Select the Double DQN target\n Args:\n target_net: `tf.Tensor`. shape `[None, n_heads, n_actions]. The output from `self._nn_model()`\n for the target\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' n_actions = self.n_actions agent_net = self._nn_model(self.obs_tp1, scope='agent_net') target_act = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) target_mask = tf.one_hot(target_act, n_actions, dtype=tf.float32) target_q = tf.reduce_sum((target_net * target_mask), axis=(- 1)) return target_q
def _select_target(self, target_net): 'Select the Double DQN target\n Args:\n target_net: `tf.Tensor`. shape `[None, n_heads, n_actions]. The output from `self._nn_model()`\n for the target\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' n_actions = self.n_actions agent_net = self._nn_model(self.obs_tp1, scope='agent_net') target_act = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) target_mask = tf.one_hot(target_act, n_actions, dtype=tf.float32) target_q = tf.reduce_sum((target_net * target_mask), axis=(- 1)) return target_q<|docstring|>Select the Double DQN target Args: target_net: `tf.Tensor`. shape `[None, n_heads, n_actions]. The output from `self._nn_model()` for the target Returns: `tf.Tensor` of shape `[None, n_heads]`<|endoftext|>
97d0bab809ba38350bb896f301b4243f05982a51207e3187190ae96f4727f305
def _compute_backup(self, target): 'Compute the backup Q-value for each head\n Args:\n target: `tf.Tensor`, shape `[None, n_heads]. The output from `self._select_target()`\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' done_mask = tf.cast(tf.logical_not(self.done_ph), tf.float32) done_mask = tf.expand_dims(done_mask, axis=(- 1)) rew_t = tf.expand_dims(self.rew_t_ph, axis=(- 1)) target_q = (rew_t + ((self.gamma * done_mask) * target)) return target_q
Compute the backup Q-value for each head Args: target: `tf.Tensor`, shape `[None, n_heads]. The output from `self._select_target()` Returns: `tf.Tensor` of shape `[None, n_heads]`
rltf/models/bstrap_dqn.py
_compute_backup
nikonikolov/rltf
90
python
def _compute_backup(self, target): 'Compute the backup Q-value for each head\n Args:\n target: `tf.Tensor`, shape `[None, n_heads]. The output from `self._select_target()`\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' done_mask = tf.cast(tf.logical_not(self.done_ph), tf.float32) done_mask = tf.expand_dims(done_mask, axis=(- 1)) rew_t = tf.expand_dims(self.rew_t_ph, axis=(- 1)) target_q = (rew_t + ((self.gamma * done_mask) * target)) return target_q
def _compute_backup(self, target): 'Compute the backup Q-value for each head\n Args:\n target: `tf.Tensor`, shape `[None, n_heads]. The output from `self._select_target()`\n Returns:\n `tf.Tensor` of shape `[None, n_heads]`\n ' done_mask = tf.cast(tf.logical_not(self.done_ph), tf.float32) done_mask = tf.expand_dims(done_mask, axis=(- 1)) rew_t = tf.expand_dims(self.rew_t_ph, axis=(- 1)) target_q = (rew_t + ((self.gamma * done_mask) * target)) return target_q<|docstring|>Compute the backup Q-value for each head Args: target: `tf.Tensor`, shape `[None, n_heads]. The output from `self._select_target()` Returns: `tf.Tensor` of shape `[None, n_heads]`<|endoftext|>
354894fa8cc6da2df312f6b5e908916d7201ead7215c6bd8d5689d6dea24cd46
def _compute_loss(self, estimate, target, name): '\n Args: shape `[None, n_heads]`\n Returns:\n List of size `n_heads` with a scalar tensor loss for each head\n ' if self.huber_loss: loss = tf.losses.huber_loss(target, estimate, reduction=tf.losses.Reduction.NONE) else: loss = tf.losses.mean_squared_error(target, estimate, reduction=tf.losses.Reduction.NONE) losses = tf.split(loss, self.n_heads, axis=(- 1)) losses = [tf.reduce_mean(loss) for loss in losses] tf.summary.scalar(name, (tf.add_n(losses) / self.n_heads)) return losses
Args: shape `[None, n_heads]` Returns: List of size `n_heads` with a scalar tensor loss for each head
rltf/models/bstrap_dqn.py
_compute_loss
nikonikolov/rltf
90
python
def _compute_loss(self, estimate, target, name): '\n Args: shape `[None, n_heads]`\n Returns:\n List of size `n_heads` with a scalar tensor loss for each head\n ' if self.huber_loss: loss = tf.losses.huber_loss(target, estimate, reduction=tf.losses.Reduction.NONE) else: loss = tf.losses.mean_squared_error(target, estimate, reduction=tf.losses.Reduction.NONE) losses = tf.split(loss, self.n_heads, axis=(- 1)) losses = [tf.reduce_mean(loss) for loss in losses] tf.summary.scalar(name, (tf.add_n(losses) / self.n_heads)) return losses
def _compute_loss(self, estimate, target, name): '\n Args: shape `[None, n_heads]`\n Returns:\n List of size `n_heads` with a scalar tensor loss for each head\n ' if self.huber_loss: loss = tf.losses.huber_loss(target, estimate, reduction=tf.losses.Reduction.NONE) else: loss = tf.losses.mean_squared_error(target, estimate, reduction=tf.losses.Reduction.NONE) losses = tf.split(loss, self.n_heads, axis=(- 1)) losses = [tf.reduce_mean(loss) for loss in losses] tf.summary.scalar(name, (tf.add_n(losses) / self.n_heads)) return losses<|docstring|>Args: shape `[None, n_heads]` Returns: List of size `n_heads` with a scalar tensor loss for each head<|endoftext|>
ab62c998ea418f75689407b5f2d174c042fd5fa9097eea26069099307f71cef5
def _act_eval_vote(self, agent_net, name): 'Evaluation action based on voting policy from the heads' def count_value(votes, i): count = tf.equal(votes, i) count = tf.cast(count, tf.int32) count = tf.reduce_sum(count, axis=(- 1), keepdims=True) return count votes = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) votes = [count_value(votes, i) for i in range(self.n_actions)] votes = tf.concat(votes, axis=(- 1)) action = tf.argmax(votes, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_vote = tf.identity(votes[0], name='plot/eval/vote') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_vote=dict(height=p_vote, a=p_a)))) return action
Evaluation action based on voting policy from the heads
rltf/models/bstrap_dqn.py
_act_eval_vote
nikonikolov/rltf
90
python
def _act_eval_vote(self, agent_net, name): def count_value(votes, i): count = tf.equal(votes, i) count = tf.cast(count, tf.int32) count = tf.reduce_sum(count, axis=(- 1), keepdims=True) return count votes = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) votes = [count_value(votes, i) for i in range(self.n_actions)] votes = tf.concat(votes, axis=(- 1)) action = tf.argmax(votes, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_vote = tf.identity(votes[0], name='plot/eval/vote') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_vote=dict(height=p_vote, a=p_a)))) return action
def _act_eval_vote(self, agent_net, name): def count_value(votes, i): count = tf.equal(votes, i) count = tf.cast(count, tf.int32) count = tf.reduce_sum(count, axis=(- 1), keepdims=True) return count votes = tf.argmax(agent_net, axis=(- 1), output_type=tf.int32) votes = [count_value(votes, i) for i in range(self.n_actions)] votes = tf.concat(votes, axis=(- 1)) action = tf.argmax(votes, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_vote = tf.identity(votes[0], name='plot/eval/vote') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_vote=dict(height=p_vote, a=p_a)))) return action<|docstring|>Evaluation action based on voting policy from the heads<|endoftext|>
4b5a6e7c65d76c86bdf0f12104a7c148b45432cfd9ab6b20f86e4e7ba195651d
def _act_eval_greedy(self, agent_net, name): 'Evaluation action based on the greedy action w.r.t. the mean of all heads' mean = tf.reduce_mean(agent_net, axis=1) action = tf.argmax(mean, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_mean = tf.identity(mean[0], name='plot/eval/mean') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_mean=dict(height=p_mean, a=p_a)))) return action
Evaluation action based on the greedy action w.r.t. the mean of all heads
rltf/models/bstrap_dqn.py
_act_eval_greedy
nikonikolov/rltf
90
python
def _act_eval_greedy(self, agent_net, name): mean = tf.reduce_mean(agent_net, axis=1) action = tf.argmax(mean, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_mean = tf.identity(mean[0], name='plot/eval/mean') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_mean=dict(height=p_mean, a=p_a)))) return action
def _act_eval_greedy(self, agent_net, name): mean = tf.reduce_mean(agent_net, axis=1) action = tf.argmax(mean, axis=(- 1), output_type=tf.int32, name=name) p_a = tf.identity(action[0], name='plot/eval/a') p_mean = tf.identity(mean[0], name='plot/eval/mean') self.plot_conf.set_eval_spec(dict(eval_actions=dict(a_mean=dict(height=p_mean, a=p_a)))) return action<|docstring|>Evaluation action based on the greedy action w.r.t. the mean of all heads<|endoftext|>
f22a6b144542d3886c922675519fa5274fb7a4c7bc2d8da7ec0b2e77c3837681
def _act_train(self, agent_net, name): 'Select the greedy action from the selected head\n Args:\n agent_net: `tf.Tensor`, shape `[None, n_heads, n_actions]. The tensor output from\n `self._nn_model()` for the agent\n Returns:\n `tf.Tensor` of shape `[None]`\n ' head_mask = tf.one_hot(self._active_head, self.n_heads, dtype=tf.float32) head_mask = tf.tile(head_mask, [tf.shape(agent_net)[0], 1]) head_mask = tf.expand_dims(head_mask, axis=(- 1)) q_head = tf.reduce_sum((agent_net * head_mask), axis=1) action = tf.argmax(q_head, axis=(- 1), output_type=tf.int32, name=name) return dict(action=action)
Select the greedy action from the selected head Args: agent_net: `tf.Tensor`, shape `[None, n_heads, n_actions]. The tensor output from `self._nn_model()` for the agent Returns: `tf.Tensor` of shape `[None]`
rltf/models/bstrap_dqn.py
_act_train
nikonikolov/rltf
90
python
def _act_train(self, agent_net, name): 'Select the greedy action from the selected head\n Args:\n agent_net: `tf.Tensor`, shape `[None, n_heads, n_actions]. The tensor output from\n `self._nn_model()` for the agent\n Returns:\n `tf.Tensor` of shape `[None]`\n ' head_mask = tf.one_hot(self._active_head, self.n_heads, dtype=tf.float32) head_mask = tf.tile(head_mask, [tf.shape(agent_net)[0], 1]) head_mask = tf.expand_dims(head_mask, axis=(- 1)) q_head = tf.reduce_sum((agent_net * head_mask), axis=1) action = tf.argmax(q_head, axis=(- 1), output_type=tf.int32, name=name) return dict(action=action)
def _act_train(self, agent_net, name): 'Select the greedy action from the selected head\n Args:\n agent_net: `tf.Tensor`, shape `[None, n_heads, n_actions]. The tensor output from\n `self._nn_model()` for the agent\n Returns:\n `tf.Tensor` of shape `[None]`\n ' head_mask = tf.one_hot(self._active_head, self.n_heads, dtype=tf.float32) head_mask = tf.tile(head_mask, [tf.shape(agent_net)[0], 1]) head_mask = tf.expand_dims(head_mask, axis=(- 1)) q_head = tf.reduce_sum((agent_net * head_mask), axis=1) action = tf.argmax(q_head, axis=(- 1), output_type=tf.int32, name=name) return dict(action=action)<|docstring|>Select the greedy action from the selected head Args: agent_net: `tf.Tensor`, shape `[None, n_heads, n_actions]. The tensor output from `self._nn_model()` for the agent Returns: `tf.Tensor` of shape `[None]`<|endoftext|>
b7e8c7a695a4a27b3af972e75377f1e03efd0f2fd1d45caa170055756396ef3c
def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x
Build the head of the DQN network Args: x: tf.Tensor. Tensor for the input Returns: `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action
rltf/models/bstrap_dqn.py
build_head
nikonikolov/rltf
90
python
def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x
def build_head(x): ' Build the head of the DQN network\n Args:\n x: tf.Tensor. Tensor for the input\n Returns:\n `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action\n ' x = tf.layers.dense(x, units=512, activation=tf.nn.relu) x = tf.layers.dense(x, units=n_actions, activation=None) x = tf.expand_dims(x, axis=(- 2)) return x<|docstring|>Build the head of the DQN network Args: x: tf.Tensor. Tensor for the input Returns: `tf.Tensor` of shape `[batch_size, 1, n_actions]`. Contains the Q-function for each action<|endoftext|>
c7da68dd16807d0c355a4448a851ea807cae2bf745d3f000ff4e1a2e0fe9ad37
@news_blu.route('/comment_like', methods=['POST']) @user_login_data def comment_like(): '\n 评论点赞\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') comment_id = request.json.get('comment_id') action = request.json.get('action') if (not all([comment_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['add', 'remove']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment_id = int(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment = Comment.query.get(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not comment): return jsonify(errno=RET.NODATA, errmsg='评论不存在') if (action == 'add'): comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if (not comment_like_model): comment_like_model = CommentLike() comment_like_model.user_id = user.id comment_like_model.comment_id = comment.id db.session.add(comment_like_model) comment.like_count += 1 else: comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if comment_like_model: db.session.delete(comment_like_model) comment.like_count -= 1 try: db.session.commit() except Exception as e: db.session.rollback() current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据库操作失败') return jsonify(errno=RET.OK, errmsg='OK')
评论点赞 :return:
info/modules/news/views.py
comment_like
summerliu1024/information12
1
python
@news_blu.route('/comment_like', methods=['POST']) @user_login_data def comment_like(): '\n 评论点赞\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') comment_id = request.json.get('comment_id') action = request.json.get('action') if (not all([comment_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['add', 'remove']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment_id = int(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment = Comment.query.get(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not comment): return jsonify(errno=RET.NODATA, errmsg='评论不存在') if (action == 'add'): comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if (not comment_like_model): comment_like_model = CommentLike() comment_like_model.user_id = user.id comment_like_model.comment_id = comment.id db.session.add(comment_like_model) comment.like_count += 1 else: comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if comment_like_model: db.session.delete(comment_like_model) comment.like_count -= 1 try: db.session.commit() except Exception as e: db.session.rollback() current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据库操作失败') return jsonify(errno=RET.OK, errmsg='OK')
@news_blu.route('/comment_like', methods=['POST']) @user_login_data def comment_like(): '\n 评论点赞\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') comment_id = request.json.get('comment_id') action = request.json.get('action') if (not all([comment_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['add', 'remove']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment_id = int(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: comment = Comment.query.get(comment_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not comment): return jsonify(errno=RET.NODATA, errmsg='评论不存在') if (action == 'add'): comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if (not comment_like_model): comment_like_model = CommentLike() comment_like_model.user_id = user.id comment_like_model.comment_id = comment.id db.session.add(comment_like_model) comment.like_count += 1 else: comment_like_model = CommentLike.query.filter((CommentLike.user_id == user.id), (CommentLike.comment_id == comment.id)).first() if comment_like_model: db.session.delete(comment_like_model) comment.like_count -= 1 try: db.session.commit() except Exception as e: db.session.rollback() current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据库操作失败') return jsonify(errno=RET.OK, errmsg='OK')<|docstring|>评论点赞 :return:<|endoftext|>
486757dd18a888f67a1175062d5c218963da0023071b1081bd5ca05553c5d937
@news_blu.route('/news_comment', methods=['POST']) @user_login_data def comment_news(): '\n 评论新闻或者回复某条新闻下指定的评论\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') comment_content = request.json.get('comment') parent_id = request.json.get('parent_id') if (not all([news_id, comment_content])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) if parent_id: parent_id = int(parent_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') comment = Comment() comment.user_id = user.id comment.news_id = news_id comment.content = comment_content if parent_id: comment.parent_id = parent_id try: db.session.add(comment) db.session.commit() except Exception as e: current_app.logger.error(e) db.session.rollback() return jsonify(errno=RET.OK, errmsg='OK', data=comment.to_dict())
评论新闻或者回复某条新闻下指定的评论 :return:
info/modules/news/views.py
comment_news
summerliu1024/information12
1
python
@news_blu.route('/news_comment', methods=['POST']) @user_login_data def comment_news(): '\n 评论新闻或者回复某条新闻下指定的评论\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') comment_content = request.json.get('comment') parent_id = request.json.get('parent_id') if (not all([news_id, comment_content])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) if parent_id: parent_id = int(parent_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') comment = Comment() comment.user_id = user.id comment.news_id = news_id comment.content = comment_content if parent_id: comment.parent_id = parent_id try: db.session.add(comment) db.session.commit() except Exception as e: current_app.logger.error(e) db.session.rollback() return jsonify(errno=RET.OK, errmsg='OK', data=comment.to_dict())
@news_blu.route('/news_comment', methods=['POST']) @user_login_data def comment_news(): '\n 评论新闻或者回复某条新闻下指定的评论\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') comment_content = request.json.get('comment') parent_id = request.json.get('parent_id') if (not all([news_id, comment_content])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) if parent_id: parent_id = int(parent_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') comment = Comment() comment.user_id = user.id comment.news_id = news_id comment.content = comment_content if parent_id: comment.parent_id = parent_id try: db.session.add(comment) db.session.commit() except Exception as e: current_app.logger.error(e) db.session.rollback() return jsonify(errno=RET.OK, errmsg='OK', data=comment.to_dict())<|docstring|>评论新闻或者回复某条新闻下指定的评论 :return:<|endoftext|>
95742dc23ad900942e80a8d2945e5213ef3a22a0b39bb240cbd7d1d4c39b3e99
@news_blu.route('/<int:news_id>') @user_login_data def news_detail(news_id): '\n 新闻详情\n :param news_id:\n :return:\n ' user = g.user news_list = [] try: news_list = News.query.order_by(News.clicks.desc()).limit(constants.CLICK_RANK_MAX_NEWS) except Exception as e: current_app.logger.error(e) news_dict_li = [] for news in news_list: news_dict_li.append(news.to_basic_dict()) news = None try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) if (not news): abort(404) is_collected = False if user: if (news in user.collection_news): is_collected = True comments = [] try: comments = Comment.query.filter((Comment.news_id == news_id)).order_by(Comment.create_time.desc()).all() except Exception as e: current_app.logger.error(e) comment_like_ids = [] if g.user: try: comment_ids = [comment.id for comment in comments] comment_likes = CommentLike.query.filter(CommentLike.comment_id.in_(comment_ids), (CommentLike.user_id == g.user.id)).all() comment_like_ids = [comment_like.comment_id for comment_like in comment_likes] except Exception as e: current_app.logger.error(e) comment_dict_li = [] for comment in comments: comment_dict = comment.to_dict() comment_dict['is_like'] = False if (comment.id in comment_like_ids): comment_dict['is_like'] = True comment_dict_li.append(comment_dict) data = {'user': (user.to_dict() if user else None), 'news_dict_li': news_dict_li, 'news': news.to_dict(), 'is_collected': is_collected, 'comments': comment_dict_li} return render_template('news/detail.html', data=data)
新闻详情 :param news_id: :return:
info/modules/news/views.py
news_detail
summerliu1024/information12
1
python
@news_blu.route('/<int:news_id>') @user_login_data def news_detail(news_id): '\n 新闻详情\n :param news_id:\n :return:\n ' user = g.user news_list = [] try: news_list = News.query.order_by(News.clicks.desc()).limit(constants.CLICK_RANK_MAX_NEWS) except Exception as e: current_app.logger.error(e) news_dict_li = [] for news in news_list: news_dict_li.append(news.to_basic_dict()) news = None try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) if (not news): abort(404) is_collected = False if user: if (news in user.collection_news): is_collected = True comments = [] try: comments = Comment.query.filter((Comment.news_id == news_id)).order_by(Comment.create_time.desc()).all() except Exception as e: current_app.logger.error(e) comment_like_ids = [] if g.user: try: comment_ids = [comment.id for comment in comments] comment_likes = CommentLike.query.filter(CommentLike.comment_id.in_(comment_ids), (CommentLike.user_id == g.user.id)).all() comment_like_ids = [comment_like.comment_id for comment_like in comment_likes] except Exception as e: current_app.logger.error(e) comment_dict_li = [] for comment in comments: comment_dict = comment.to_dict() comment_dict['is_like'] = False if (comment.id in comment_like_ids): comment_dict['is_like'] = True comment_dict_li.append(comment_dict) data = {'user': (user.to_dict() if user else None), 'news_dict_li': news_dict_li, 'news': news.to_dict(), 'is_collected': is_collected, 'comments': comment_dict_li} return render_template('news/detail.html', data=data)
@news_blu.route('/<int:news_id>') @user_login_data def news_detail(news_id): '\n 新闻详情\n :param news_id:\n :return:\n ' user = g.user news_list = [] try: news_list = News.query.order_by(News.clicks.desc()).limit(constants.CLICK_RANK_MAX_NEWS) except Exception as e: current_app.logger.error(e) news_dict_li = [] for news in news_list: news_dict_li.append(news.to_basic_dict()) news = None try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) if (not news): abort(404) is_collected = False if user: if (news in user.collection_news): is_collected = True comments = [] try: comments = Comment.query.filter((Comment.news_id == news_id)).order_by(Comment.create_time.desc()).all() except Exception as e: current_app.logger.error(e) comment_like_ids = [] if g.user: try: comment_ids = [comment.id for comment in comments] comment_likes = CommentLike.query.filter(CommentLike.comment_id.in_(comment_ids), (CommentLike.user_id == g.user.id)).all() comment_like_ids = [comment_like.comment_id for comment_like in comment_likes] except Exception as e: current_app.logger.error(e) comment_dict_li = [] for comment in comments: comment_dict = comment.to_dict() comment_dict['is_like'] = False if (comment.id in comment_like_ids): comment_dict['is_like'] = True comment_dict_li.append(comment_dict) data = {'user': (user.to_dict() if user else None), 'news_dict_li': news_dict_li, 'news': news.to_dict(), 'is_collected': is_collected, 'comments': comment_dict_li} return render_template('news/detail.html', data=data)<|docstring|>新闻详情 :param news_id: :return:<|endoftext|>
751c6077b1fdbdacb8b730e9fb636de45607321be0e4a9751dcfc8f32b8d9bb5
@news_blu.route('/news_collect', methods=['POST']) @user_login_data def collect_news(): '\n 收藏新闻\n 1. 接受参数\n 2. 判断参数\n 3. 查询新闻,并判断新闻是否存在\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') action = request.json.get('action') if (not all([news_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['collect', 'cancel_collect']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') if (action == 'cancel_collect'): if (news in user.collection_news): user.collection_news.remove(news) elif (news not in user.collection_news): user.collection_news.append(news) return jsonify(errno=RET.OK, errmsg='操作成功')
收藏新闻 1. 接受参数 2. 判断参数 3. 查询新闻,并判断新闻是否存在 :return:
info/modules/news/views.py
collect_news
summerliu1024/information12
1
python
@news_blu.route('/news_collect', methods=['POST']) @user_login_data def collect_news(): '\n 收藏新闻\n 1. 接受参数\n 2. 判断参数\n 3. 查询新闻,并判断新闻是否存在\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') action = request.json.get('action') if (not all([news_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['collect', 'cancel_collect']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') if (action == 'cancel_collect'): if (news in user.collection_news): user.collection_news.remove(news) elif (news not in user.collection_news): user.collection_news.append(news) return jsonify(errno=RET.OK, errmsg='操作成功')
@news_blu.route('/news_collect', methods=['POST']) @user_login_data def collect_news(): '\n 收藏新闻\n 1. 接受参数\n 2. 判断参数\n 3. 查询新闻,并判断新闻是否存在\n :return:\n ' user = g.user if (not user): return jsonify(errno=RET.SESSIONERR, errmsg='用户未登录') news_id = request.json.get('news_id') action = request.json.get('action') if (not all([news_id, action])): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') if (action not in ['collect', 'cancel_collect']): return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news_id = int(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.PARAMERR, errmsg='参数错误') try: news = News.query.get(news_id) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='数据查询错误') if (not news): return jsonify(errno=RET.NODATA, errmsg='未查询到新闻数据') if (action == 'cancel_collect'): if (news in user.collection_news): user.collection_news.remove(news) elif (news not in user.collection_news): user.collection_news.append(news) return jsonify(errno=RET.OK, errmsg='操作成功')<|docstring|>收藏新闻 1. 接受参数 2. 判断参数 3. 查询新闻,并判断新闻是否存在 :return:<|endoftext|>
efe7df1db9452bfd21acf6c071e156cebddab5054797474e8c4569290e48e75a
async def create_offer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation of an SDP offer for the purpose of starting a new WebRTC connection to a remote peer.\n The SDP offer includes information about any MediaStreamTrack objects already attached to the WebRTC session,\n codec, and options supported by the machine, as well as any candidates already gathered by the ICE agent, for\n the purpose of being sent over the signaling channel to a potential peer to request a connection or to update\n the configuration of an existing connection.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createOffer)))
Initiates the creation of an SDP offer for the purpose of starting a new WebRTC connection to a remote peer. The SDP offer includes information about any MediaStreamTrack objects already attached to the WebRTC session, codec, and options supported by the machine, as well as any candidates already gathered by the ICE agent, for the purpose of being sent over the signaling channel to a potential peer to request a connection or to update the configuration of an existing connection.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
create_offer
MarshalX/python-webrtc
81
python
async def create_offer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation of an SDP offer for the purpose of starting a new WebRTC connection to a remote peer.\n The SDP offer includes information about any MediaStreamTrack objects already attached to the WebRTC session,\n codec, and options supported by the machine, as well as any candidates already gathered by the ICE agent, for\n the purpose of being sent over the signaling channel to a potential peer to request a connection or to update\n the configuration of an existing connection.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createOffer)))
async def create_offer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation of an SDP offer for the purpose of starting a new WebRTC connection to a remote peer.\n The SDP offer includes information about any MediaStreamTrack objects already attached to the WebRTC session,\n codec, and options supported by the machine, as well as any candidates already gathered by the ICE agent, for\n the purpose of being sent over the signaling channel to a potential peer to request a connection or to update\n the configuration of an existing connection.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createOffer)))<|docstring|>Initiates the creation of an SDP offer for the purpose of starting a new WebRTC connection to a remote peer. The SDP offer includes information about any MediaStreamTrack objects already attached to the WebRTC session, codec, and options supported by the machine, as well as any candidates already gathered by the ICE agent, for the purpose of being sent over the signaling channel to a potential peer to request a connection or to update the configuration of an existing connection.<|endoftext|>
0abe803569843d718b752546734f5da2d1c325c42ef23df9574e87d9e39f2af1
async def create_answer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation an SDP answer to an offer received from a remote peer during the offer/answer\n negotiation of a WebRTC connection. The answer contains information about any media already attached to the\n session, codecs and options supported by the machine, and any ICE candidates already gathered.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createAnswer)))
Initiates the creation an SDP answer to an offer received from a remote peer during the offer/answer negotiation of a WebRTC connection. The answer contains information about any media already attached to the session, codecs and options supported by the machine, and any ICE candidates already gathered.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
create_answer
MarshalX/python-webrtc
81
python
async def create_answer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation an SDP answer to an offer received from a remote peer during the offer/answer\n negotiation of a WebRTC connection. The answer contains information about any media already attached to the\n session, codecs and options supported by the machine, and any ICE candidates already gathered.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createAnswer)))
async def create_answer(self) -> 'webrtc.RTCSessionDescription': 'Initiates the creation an SDP answer to an offer received from a remote peer during the offer/answer\n negotiation of a WebRTC connection. The answer contains information about any media already attached to the\n session, codecs and options supported by the machine, and any ICE candidates already gathered.\n ' from webrtc import RTCSessionDescription return RTCSessionDescription._wrap((await to_async(self._native_obj.createAnswer)))<|docstring|>Initiates the creation an SDP answer to an offer received from a remote peer during the offer/answer negotiation of a WebRTC connection. The answer contains information about any media already attached to the session, codecs and options supported by the machine, and any ICE candidates already gathered.<|endoftext|>
da3f35b8fd8e8535b814d444dd21a5027a8c2305c5eaa9b8f4653a5c07a9d232
async def set_local_description(self, sdp: 'webrtc.RTCSessionDescription'): 'Changes the local description associated with the connection. This description specifies the properties\n of the local end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be\n returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n ' return (await to_async(self._native_obj.setLocalDescription)(sdp._native_obj))
Changes the local description associated with the connection. This description specifies the properties of the local end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously. Args: sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes one end of a connection or potential connection. Returns: :obj:`None`:
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
set_local_description
MarshalX/python-webrtc
81
python
async def set_local_description(self, sdp: 'webrtc.RTCSessionDescription'): 'Changes the local description associated with the connection. This description specifies the properties\n of the local end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be\n returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n ' return (await to_async(self._native_obj.setLocalDescription)(sdp._native_obj))
async def set_local_description(self, sdp: 'webrtc.RTCSessionDescription'): 'Changes the local description associated with the connection. This description specifies the properties\n of the local end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be\n returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n ' return (await to_async(self._native_obj.setLocalDescription)(sdp._native_obj))<|docstring|>Changes the local description associated with the connection. This description specifies the properties of the local end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously. Args: sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes one end of a connection or potential connection. Returns: :obj:`None`:<|endoftext|>
82f19031de1c8cad913d9e1ecb344a791124c1f54f18f0c054dcfc29a60450cf
async def set_remote_description(self, sdp: 'webrtc.RTCSessionDescription'): "Sets the specified session description as the remote peer's current offer or answer. The description\n specifies the properties of the remote end of the connection, including the media format.\n It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n " return (await to_async(self._native_obj.setRemoteDescription)(sdp._native_obj))
Sets the specified session description as the remote peer's current offer or answer. The description specifies the properties of the remote end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously. Args: sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes one end of a connection or potential connection. Returns: :obj:`None`:
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
set_remote_description
MarshalX/python-webrtc
81
python
async def set_remote_description(self, sdp: 'webrtc.RTCSessionDescription'): "Sets the specified session description as the remote peer's current offer or answer. The description\n specifies the properties of the remote end of the connection, including the media format.\n It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n " return (await to_async(self._native_obj.setRemoteDescription)(sdp._native_obj))
async def set_remote_description(self, sdp: 'webrtc.RTCSessionDescription'): "Sets the specified session description as the remote peer's current offer or answer. The description\n specifies the properties of the remote end of the connection, including the media format.\n It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously.\n\n Args:\n sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes\n one end of a connection or potential connection.\n\n Returns:\n :obj:`None`:\n " return (await to_async(self._native_obj.setRemoteDescription)(sdp._native_obj))<|docstring|>Sets the specified session description as the remote peer's current offer or answer. The description specifies the properties of the remote end of the connection, including the media format. It returns a :obj:`Coroutine`. A result will be returned once the description has been changed, asynchronously. Args: sdp (:obj:`webrtc.RTCSessionDescription`): A :obj:`webrtc.RTCSessionDescription` object that describes one end of a connection or potential connection. Returns: :obj:`None`:<|endoftext|>
fa6cf60f1dc45508e0b2aa61c9af53c2b4f1bad316ab2e0ac0c251d96874880a
def add_track(self, track: 'webrtc.MediaStreamTrack', stream: Optional[Union[('webrtc.MediaStream', List['webrtc.MediaStream'])]]=None) -> 'webrtc.RTCRtpSender': 'Adds a new :obj:`webrtc.MediaStreamTrack` to the set of tracks which will be transmitted to the other peer.\n\n Args:\n track (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` object representing the media track\n to add to the peer connection.\n stream (:obj:`webrtc.MediaStream` or :obj:`list` of :obj:`webrtc.MediaStream`, optional): One or more\n local :obj:`webrtc.MediaStream` objects to which the track should be added.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n ' if (not stream): sender = self._native_obj.addTrack(track._native_obj, None) elif isinstance(stream, list): native_objects = [s._native_obj for s in stream] sender = self._native_obj.addTrack(track._native_obj, native_objects) else: sender = self._native_obj.addTrack(track._native_obj, stream._native_obj) from webrtc import RTCRtpSender return RTCRtpSender._wrap(sender)
Adds a new :obj:`webrtc.MediaStreamTrack` to the set of tracks which will be transmitted to the other peer. Args: track (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` object representing the media track to add to the peer connection. stream (:obj:`webrtc.MediaStream` or :obj:`list` of :obj:`webrtc.MediaStream`, optional): One or more local :obj:`webrtc.MediaStream` objects to which the track should be added. Returns: :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to transmit the media data.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
add_track
MarshalX/python-webrtc
81
python
def add_track(self, track: 'webrtc.MediaStreamTrack', stream: Optional[Union[('webrtc.MediaStream', List['webrtc.MediaStream'])]]=None) -> 'webrtc.RTCRtpSender': 'Adds a new :obj:`webrtc.MediaStreamTrack` to the set of tracks which will be transmitted to the other peer.\n\n Args:\n track (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` object representing the media track\n to add to the peer connection.\n stream (:obj:`webrtc.MediaStream` or :obj:`list` of :obj:`webrtc.MediaStream`, optional): One or more\n local :obj:`webrtc.MediaStream` objects to which the track should be added.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n ' if (not stream): sender = self._native_obj.addTrack(track._native_obj, None) elif isinstance(stream, list): native_objects = [s._native_obj for s in stream] sender = self._native_obj.addTrack(track._native_obj, native_objects) else: sender = self._native_obj.addTrack(track._native_obj, stream._native_obj) from webrtc import RTCRtpSender return RTCRtpSender._wrap(sender)
def add_track(self, track: 'webrtc.MediaStreamTrack', stream: Optional[Union[('webrtc.MediaStream', List['webrtc.MediaStream'])]]=None) -> 'webrtc.RTCRtpSender': 'Adds a new :obj:`webrtc.MediaStreamTrack` to the set of tracks which will be transmitted to the other peer.\n\n Args:\n track (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` object representing the media track\n to add to the peer connection.\n stream (:obj:`webrtc.MediaStream` or :obj:`list` of :obj:`webrtc.MediaStream`, optional): One or more\n local :obj:`webrtc.MediaStream` objects to which the track should be added.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n ' if (not stream): sender = self._native_obj.addTrack(track._native_obj, None) elif isinstance(stream, list): native_objects = [s._native_obj for s in stream] sender = self._native_obj.addTrack(track._native_obj, native_objects) else: sender = self._native_obj.addTrack(track._native_obj, stream._native_obj) from webrtc import RTCRtpSender return RTCRtpSender._wrap(sender)<|docstring|>Adds a new :obj:`webrtc.MediaStreamTrack` to the set of tracks which will be transmitted to the other peer. Args: track (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` object representing the media track to add to the peer connection. stream (:obj:`webrtc.MediaStream` or :obj:`list` of :obj:`webrtc.MediaStream`, optional): One or more local :obj:`webrtc.MediaStream` objects to which the track should be added. Returns: :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to transmit the media data.<|endoftext|>
5206a8e7ea9eac468f50b5383185d21f9c5b3b8f9f87f6fdcc7c979a05678bef
def add_transceiver(self, track_or_kind: Union[('webrtc.MediaStreamTrack', 'webrtc.MediaType')], init: Optional['webrtc.RtpTransceiverInit']=None) -> 'webrtc.RTCRtpTransceiver': "Creates a new :obj:`webrtc.RTCRtpTransceiver` and adds it to the set of transceivers associated with the\n connection. Each transceiver represents a bidirectional stream, with both an :obj:`webrtc.RTCRtpSender` and\n an :obj:`webrtc.RTCRtpReceiver` associated with it.\n\n Args:\n track_or_kind (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` to associate with the\n transceiver, or a member of :obj:`webrtc.MediaType` which is used as the kind of the receiver's track,\n and by extension of the :obj:`webrtc.RTCRtpReceiver` itself.\n init (:obj:`webrtc.RtpTransceiverInit`, optional): An object for specifying any options when creating\n the new transceiver.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n " from webrtc import MediaStreamTrack, RTCRtpTransceiver if init: init = init._native_obj if isinstance(track_or_kind, MediaStreamTrack): transceiver = self._native_obj.addTransceiver(track_or_kind._native_obj, init) else: transceiver = self._native_obj.addTransceiver(track_or_kind, init) return RTCRtpTransceiver._wrap(transceiver)
Creates a new :obj:`webrtc.RTCRtpTransceiver` and adds it to the set of transceivers associated with the connection. Each transceiver represents a bidirectional stream, with both an :obj:`webrtc.RTCRtpSender` and an :obj:`webrtc.RTCRtpReceiver` associated with it. Args: track_or_kind (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` to associate with the transceiver, or a member of :obj:`webrtc.MediaType` which is used as the kind of the receiver's track, and by extension of the :obj:`webrtc.RTCRtpReceiver` itself. init (:obj:`webrtc.RtpTransceiverInit`, optional): An object for specifying any options when creating the new transceiver. Returns: :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to transmit the media data.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
add_transceiver
MarshalX/python-webrtc
81
python
def add_transceiver(self, track_or_kind: Union[('webrtc.MediaStreamTrack', 'webrtc.MediaType')], init: Optional['webrtc.RtpTransceiverInit']=None) -> 'webrtc.RTCRtpTransceiver': "Creates a new :obj:`webrtc.RTCRtpTransceiver` and adds it to the set of transceivers associated with the\n connection. Each transceiver represents a bidirectional stream, with both an :obj:`webrtc.RTCRtpSender` and\n an :obj:`webrtc.RTCRtpReceiver` associated with it.\n\n Args:\n track_or_kind (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` to associate with the\n transceiver, or a member of :obj:`webrtc.MediaType` which is used as the kind of the receiver's track,\n and by extension of the :obj:`webrtc.RTCRtpReceiver` itself.\n init (:obj:`webrtc.RtpTransceiverInit`, optional): An object for specifying any options when creating\n the new transceiver.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n " from webrtc import MediaStreamTrack, RTCRtpTransceiver if init: init = init._native_obj if isinstance(track_or_kind, MediaStreamTrack): transceiver = self._native_obj.addTransceiver(track_or_kind._native_obj, init) else: transceiver = self._native_obj.addTransceiver(track_or_kind, init) return RTCRtpTransceiver._wrap(transceiver)
def add_transceiver(self, track_or_kind: Union[('webrtc.MediaStreamTrack', 'webrtc.MediaType')], init: Optional['webrtc.RtpTransceiverInit']=None) -> 'webrtc.RTCRtpTransceiver': "Creates a new :obj:`webrtc.RTCRtpTransceiver` and adds it to the set of transceivers associated with the\n connection. Each transceiver represents a bidirectional stream, with both an :obj:`webrtc.RTCRtpSender` and\n an :obj:`webrtc.RTCRtpReceiver` associated with it.\n\n Args:\n track_or_kind (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` to associate with the\n transceiver, or a member of :obj:`webrtc.MediaType` which is used as the kind of the receiver's track,\n and by extension of the :obj:`webrtc.RTCRtpReceiver` itself.\n init (:obj:`webrtc.RtpTransceiverInit`, optional): An object for specifying any options when creating\n the new transceiver.\n\n Returns:\n :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to\n transmit the media data.\n " from webrtc import MediaStreamTrack, RTCRtpTransceiver if init: init = init._native_obj if isinstance(track_or_kind, MediaStreamTrack): transceiver = self._native_obj.addTransceiver(track_or_kind._native_obj, init) else: transceiver = self._native_obj.addTransceiver(track_or_kind, init) return RTCRtpTransceiver._wrap(transceiver)<|docstring|>Creates a new :obj:`webrtc.RTCRtpTransceiver` and adds it to the set of transceivers associated with the connection. Each transceiver represents a bidirectional stream, with both an :obj:`webrtc.RTCRtpSender` and an :obj:`webrtc.RTCRtpReceiver` associated with it. Args: track_or_kind (:obj:`webrtc.MediaStreamTrack`): A :obj:`webrtc.MediaStreamTrack` to associate with the transceiver, or a member of :obj:`webrtc.MediaType` which is used as the kind of the receiver's track, and by extension of the :obj:`webrtc.RTCRtpReceiver` itself. init (:obj:`webrtc.RtpTransceiverInit`, optional): An object for specifying any options when creating the new transceiver. Returns: :obj:`webrtc.RTCRtpSender`: The :obj:`webrtc.RTCRtpSender` object which will be used to transmit the media data.<|endoftext|>
f554251fcfd78c3ca498d9cd043b5649076b54b2c9779d10a817877988bce2a1
def get_transceivers(self) -> List['webrtc.RTCRtpTransceiver']: 'Returns a :obj:`list` of the :obj:`webrtc.RTCRtpTransceiver` objects being used to send and\n receive data on the connection.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpTransceiver`: An array of the :obj:`webrtc.RTCRtpTransceiver` objects\n representing the transceivers handling sending and receiving all media\n on the :obj:`webrtc.RTCPeerConnection`. The list is in the order in which the transceivers were\n added to the connection.\n ' from webrtc import RTCRtpTransceiver return RTCRtpTransceiver._wrap_many(self._native_obj.getTransceivers())
Returns a :obj:`list` of the :obj:`webrtc.RTCRtpTransceiver` objects being used to send and receive data on the connection. Returns: :obj:`list` of :obj:`webrtc.RTCRtpTransceiver`: An array of the :obj:`webrtc.RTCRtpTransceiver` objects representing the transceivers handling sending and receiving all media on the :obj:`webrtc.RTCPeerConnection`. The list is in the order in which the transceivers were added to the connection.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
get_transceivers
MarshalX/python-webrtc
81
python
def get_transceivers(self) -> List['webrtc.RTCRtpTransceiver']: 'Returns a :obj:`list` of the :obj:`webrtc.RTCRtpTransceiver` objects being used to send and\n receive data on the connection.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpTransceiver`: An array of the :obj:`webrtc.RTCRtpTransceiver` objects\n representing the transceivers handling sending and receiving all media\n on the :obj:`webrtc.RTCPeerConnection`. The list is in the order in which the transceivers were\n added to the connection.\n ' from webrtc import RTCRtpTransceiver return RTCRtpTransceiver._wrap_many(self._native_obj.getTransceivers())
def get_transceivers(self) -> List['webrtc.RTCRtpTransceiver']: 'Returns a :obj:`list` of the :obj:`webrtc.RTCRtpTransceiver` objects being used to send and\n receive data on the connection.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpTransceiver`: An array of the :obj:`webrtc.RTCRtpTransceiver` objects\n representing the transceivers handling sending and receiving all media\n on the :obj:`webrtc.RTCPeerConnection`. The list is in the order in which the transceivers were\n added to the connection.\n ' from webrtc import RTCRtpTransceiver return RTCRtpTransceiver._wrap_many(self._native_obj.getTransceivers())<|docstring|>Returns a :obj:`list` of the :obj:`webrtc.RTCRtpTransceiver` objects being used to send and receive data on the connection. Returns: :obj:`list` of :obj:`webrtc.RTCRtpTransceiver`: An array of the :obj:`webrtc.RTCRtpTransceiver` objects representing the transceivers handling sending and receiving all media on the :obj:`webrtc.RTCPeerConnection`. The list is in the order in which the transceivers were added to the connection.<|endoftext|>
ec258ce2e487ca5fac18d5d706a9be0dd95f849ca57eaeb61d82609bea5cf872
def get_senders(self) -> List['webrtc.RTCRtpSender']: "Returns an array of :obj:`webrtc.RTCRtpSender` objects, each of which represents the RTP sender responsible\n for transmitting one track's data. A sender object provides methods and properties for examining\n and controlling the encoding and transmission of the track's data.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpSenders` is not defined by the specification, and may change\n from one call to :attr:`get_senders` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpSender`: An array of :obj:`webrtc.RTCRtpSender` objects, one for each\n track on the connection. The array is empty if there are no RTP senders on the connection.\n " from webrtc import RTCRtpSender return RTCRtpSender._wrap_many(self._native_obj.getSenders())
Returns an array of :obj:`webrtc.RTCRtpSender` objects, each of which represents the RTP sender responsible for transmitting one track's data. A sender object provides methods and properties for examining and controlling the encoding and transmission of the track's data. Note: The order of the returned :obj:`webrtc.RTCRtpSenders` is not defined by the specification, and may change from one call to :attr:`get_senders` to the next. Returns: :obj:`list` of :obj:`webrtc.RTCRtpSender`: An array of :obj:`webrtc.RTCRtpSender` objects, one for each track on the connection. The array is empty if there are no RTP senders on the connection.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
get_senders
MarshalX/python-webrtc
81
python
def get_senders(self) -> List['webrtc.RTCRtpSender']: "Returns an array of :obj:`webrtc.RTCRtpSender` objects, each of which represents the RTP sender responsible\n for transmitting one track's data. A sender object provides methods and properties for examining\n and controlling the encoding and transmission of the track's data.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpSenders` is not defined by the specification, and may change\n from one call to :attr:`get_senders` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpSender`: An array of :obj:`webrtc.RTCRtpSender` objects, one for each\n track on the connection. The array is empty if there are no RTP senders on the connection.\n " from webrtc import RTCRtpSender return RTCRtpSender._wrap_many(self._native_obj.getSenders())
def get_senders(self) -> List['webrtc.RTCRtpSender']: "Returns an array of :obj:`webrtc.RTCRtpSender` objects, each of which represents the RTP sender responsible\n for transmitting one track's data. A sender object provides methods and properties for examining\n and controlling the encoding and transmission of the track's data.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpSenders` is not defined by the specification, and may change\n from one call to :attr:`get_senders` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpSender`: An array of :obj:`webrtc.RTCRtpSender` objects, one for each\n track on the connection. The array is empty if there are no RTP senders on the connection.\n " from webrtc import RTCRtpSender return RTCRtpSender._wrap_many(self._native_obj.getSenders())<|docstring|>Returns an array of :obj:`webrtc.RTCRtpSender` objects, each of which represents the RTP sender responsible for transmitting one track's data. A sender object provides methods and properties for examining and controlling the encoding and transmission of the track's data. Note: The order of the returned :obj:`webrtc.RTCRtpSenders` is not defined by the specification, and may change from one call to :attr:`get_senders` to the next. Returns: :obj:`list` of :obj:`webrtc.RTCRtpSender`: An array of :obj:`webrtc.RTCRtpSender` objects, one for each track on the connection. The array is empty if there are no RTP senders on the connection.<|endoftext|>
78512012ec9ce979cd2911ae7b9770e6df30278e3a49d0abf3f2c270dcbe1a02
def get_receivers(self) -> List['webrtc.RTCRtpReceiver']: 'Returns an array of :obj:`webrtc.RTCRtpReceiver` objects, each of which represents one RTP receiver. Each RTP\n receiver manages the reception and decoding of data for a :obj:`webrtc.MediaStreamTrack`\n on an :obj:`webrtc.RTCPeerConnection`.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpReceiver` is not defined by the specification, and may change\n from one call to :attr:`get_receivers` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpReceiver`: An array of :obj:`webrtc.RTCRtpReceiver` objects, one for each\n track on the connection. The array is empty if there are no RTP receivers on the connection.\n ' from webrtc import RTCRtpReceiver return RTCRtpReceiver._wrap_many(self._native_obj.getReceivers())
Returns an array of :obj:`webrtc.RTCRtpReceiver` objects, each of which represents one RTP receiver. Each RTP receiver manages the reception and decoding of data for a :obj:`webrtc.MediaStreamTrack` on an :obj:`webrtc.RTCPeerConnection`. Note: The order of the returned :obj:`webrtc.RTCRtpReceiver` is not defined by the specification, and may change from one call to :attr:`get_receivers` to the next. Returns: :obj:`list` of :obj:`webrtc.RTCRtpReceiver`: An array of :obj:`webrtc.RTCRtpReceiver` objects, one for each track on the connection. The array is empty if there are no RTP receivers on the connection.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
get_receivers
MarshalX/python-webrtc
81
python
def get_receivers(self) -> List['webrtc.RTCRtpReceiver']: 'Returns an array of :obj:`webrtc.RTCRtpReceiver` objects, each of which represents one RTP receiver. Each RTP\n receiver manages the reception and decoding of data for a :obj:`webrtc.MediaStreamTrack`\n on an :obj:`webrtc.RTCPeerConnection`.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpReceiver` is not defined by the specification, and may change\n from one call to :attr:`get_receivers` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpReceiver`: An array of :obj:`webrtc.RTCRtpReceiver` objects, one for each\n track on the connection. The array is empty if there are no RTP receivers on the connection.\n ' from webrtc import RTCRtpReceiver return RTCRtpReceiver._wrap_many(self._native_obj.getReceivers())
def get_receivers(self) -> List['webrtc.RTCRtpReceiver']: 'Returns an array of :obj:`webrtc.RTCRtpReceiver` objects, each of which represents one RTP receiver. Each RTP\n receiver manages the reception and decoding of data for a :obj:`webrtc.MediaStreamTrack`\n on an :obj:`webrtc.RTCPeerConnection`.\n\n Note:\n The order of the returned :obj:`webrtc.RTCRtpReceiver` is not defined by the specification, and may change\n from one call to :attr:`get_receivers` to the next.\n\n Returns:\n :obj:`list` of :obj:`webrtc.RTCRtpReceiver`: An array of :obj:`webrtc.RTCRtpReceiver` objects, one for each\n track on the connection. The array is empty if there are no RTP receivers on the connection.\n ' from webrtc import RTCRtpReceiver return RTCRtpReceiver._wrap_many(self._native_obj.getReceivers())<|docstring|>Returns an array of :obj:`webrtc.RTCRtpReceiver` objects, each of which represents one RTP receiver. Each RTP receiver manages the reception and decoding of data for a :obj:`webrtc.MediaStreamTrack` on an :obj:`webrtc.RTCPeerConnection`. Note: The order of the returned :obj:`webrtc.RTCRtpReceiver` is not defined by the specification, and may change from one call to :attr:`get_receivers` to the next. Returns: :obj:`list` of :obj:`webrtc.RTCRtpReceiver`: An array of :obj:`webrtc.RTCRtpReceiver` objects, one for each track on the connection. The array is empty if there are no RTP receivers on the connection.<|endoftext|>
4aed5f7b741e0e4bc37f7c5b79fddc7e66d1f3040fe1d9fe312ac42ee517fe44
def remove_track(self, sender: 'webrtc.RTCRtpSender') -> None: "Tells the local end of the connection to stop sending media from the specified track, without\n actually removing the corresponding :obj:`webrtc.RTCRtpSender` from the list of senders as reported\n by :attr:`get_senders``. If the track is already stopped, or is not in the connection's senders list,\n this method has no effect." return self._native_obj.removeTrack(sender._native_obj)
Tells the local end of the connection to stop sending media from the specified track, without actually removing the corresponding :obj:`webrtc.RTCRtpSender` from the list of senders as reported by :attr:`get_senders``. If the track is already stopped, or is not in the connection's senders list, this method has no effect.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
remove_track
MarshalX/python-webrtc
81
python
def remove_track(self, sender: 'webrtc.RTCRtpSender') -> None: "Tells the local end of the connection to stop sending media from the specified track, without\n actually removing the corresponding :obj:`webrtc.RTCRtpSender` from the list of senders as reported\n by :attr:`get_senders``. If the track is already stopped, or is not in the connection's senders list,\n this method has no effect." return self._native_obj.removeTrack(sender._native_obj)
def remove_track(self, sender: 'webrtc.RTCRtpSender') -> None: "Tells the local end of the connection to stop sending media from the specified track, without\n actually removing the corresponding :obj:`webrtc.RTCRtpSender` from the list of senders as reported\n by :attr:`get_senders``. If the track is already stopped, or is not in the connection's senders list,\n this method has no effect." return self._native_obj.removeTrack(sender._native_obj)<|docstring|>Tells the local end of the connection to stop sending media from the specified track, without actually removing the corresponding :obj:`webrtc.RTCRtpSender` from the list of senders as reported by :attr:`get_senders``. If the track is already stopped, or is not in the connection's senders list, this method has no effect.<|endoftext|>
941743d1788d6ca6ee0849fa21888c70e46a9ef03242b59743337b6eae260da5
def restart_ice(self) -> None: 'Allows to easily request that ICE candidate gathering be redone on both ends of the connection.\n This simplifies the process by allowing the same method to be used by either the caller or the receiver\n to trigger an ICE restart.' return self._native_obj.restartIce()
Allows to easily request that ICE candidate gathering be redone on both ends of the connection. This simplifies the process by allowing the same method to be used by either the caller or the receiver to trigger an ICE restart.
python-webrtc/python/webrtc/interfaces/rtc_peer_connection.py
restart_ice
MarshalX/python-webrtc
81
python
def restart_ice(self) -> None: 'Allows to easily request that ICE candidate gathering be redone on both ends of the connection.\n This simplifies the process by allowing the same method to be used by either the caller or the receiver\n to trigger an ICE restart.' return self._native_obj.restartIce()
def restart_ice(self) -> None: 'Allows to easily request that ICE candidate gathering be redone on both ends of the connection.\n This simplifies the process by allowing the same method to be used by either the caller or the receiver\n to trigger an ICE restart.' return self._native_obj.restartIce()<|docstring|>Allows to easily request that ICE candidate gathering be redone on both ends of the connection. This simplifies the process by allowing the same method to be used by either the caller or the receiver to trigger an ICE restart.<|endoftext|>