function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_run_call_pipeline_if_no_model_display_name_nor_model_labels( self, mock_pipeline_service_create, mock_dataset_time_series, mock_model_service_get, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_run_call_pipeline_if_set_additional_experiments( self, mock_pipeline_service_create, mock_dataset_time_series, mock_model_service_get, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_run_called_twice_raises( self, mock_dataset_time_series, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_run_raises_if_pipeline_fails( self, mock_pipeline_service_create_and_get_with_fail, mock_dataset_time_series, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_raises_before_run_is_called(self, mock_pipeline_service_create): aiplatform.init(project=_TEST_PROJECT, staging_bucket=_TEST_BUCKET_NAME) job = AutoMLForecastingTrainingJob( display_name=_TEST_DISPLAY_NAME, optimization_objective=_TEST_TRAINING_OPTIMIZATION_OBJECTIVE_NAME, column_transformations=_TEST_TRAINING_COLUMN_TRANSFORMATIONS, ) with pytest.raises(RuntimeError): job.get_model() with pytest.raises(RuntimeError): job.has_failed with pytest.raises(RuntimeError): job.state
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_splits_fraction( self, mock_pipeline_service_create, mock_pipeline_service_get, mock_dataset_time_series, mock_model_service_get, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_splits_predefined( self, mock_pipeline_service_create, mock_pipeline_service_get, mock_dataset_time_series, mock_model_service_get, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def test_splits_default( self, mock_pipeline_service_create, mock_pipeline_service_get, mock_dataset_time_series, mock_model_service_get, sync,
googleapis/python-aiplatform
[ 306, 205, 306, 52, 1600875819 ]
def dump_connection_info(engine: Engine, fileobj: TextIO = sys.stdout) -> None: """ Dumps some connection info, as an SQL comment. Obscures passwords. Args: engine: the SQLAlchemy :class:`Engine` to dump metadata information from fileobj: the file-like object (default ``sys.stdout``) to write information to """ meta = MetaData(bind=engine) writeline_nl(fileobj, sql_comment(f'Database info: {meta}'))
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def dump(querysql, *multiparams, **params): compsql = querysql.compile(dialect=engine.dialect) writeline_nl(fileobj, f"{compsql};")
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def quick_mapper(table: Table) -> Type[DeclarativeMeta]: """ Makes a new SQLAlchemy mapper for an existing table. See https://www.tylerlesmann.com/2009/apr/27/copying-databases-across-platforms-sqlalchemy/ Args: table: SQLAlchemy :class:`Table` object Returns: a :class:`DeclarativeMeta` class """ # noqa # noinspection PyPep8Naming Base = declarative_base() class GenericMapper(Base): __table__ = table # noinspection PyTypeChecker return GenericMapper
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def literal_processor(self, dialect: DefaultDialect) -> Callable[[Any], str]: super_processor = super().literal_processor(dialect) def process(value: Any) -> str: log.debug("process: {!r}", value) if isinstance(value, int): return str(value) if not isinstance(value, str): value = str(value) result = super_processor(value) if isinstance(result, bytes): result = result.decode(dialect.encoding) return result return process
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def make_literal_query_fn(dialect: DefaultDialect) -> Callable[[str], str]: DialectClass = dialect.__class__ # noinspection PyClassHasNoInit,PyAbstractClass class LiteralDialect(DialectClass): # https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa colspecs = { # prevent various encoding explosions String: StringLiteral, # teach SA about how to literalize a datetime DateTime: StringLiteral, # don't format py2 long integers to NULL NullType: StringLiteral, } def literal_query(statement: str) -> str: """ NOTE: This is entirely insecure. DO NOT execute the resulting strings. """ # https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query # noqa if isinstance(statement, Query): statement = statement.statement return statement.compile( dialect=LiteralDialect(), compile_kwargs={'literal_binds': True}, ).string + ";" return literal_query
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def get_literal_query(statement: Union[Query, Executable], bind: Connectable = None) -> str: """ Takes an SQLAlchemy statement and produces a literal SQL version, with values filled in. As per https://stackoverflow.com/questions/5631078/sqlalchemy-print-the-actual-query Notes: - for debugging purposes *only* - insecure; you should always separate queries from their values - please also note that this function is quite slow Args: statement: the SQL statement (a SQLAlchemy object) to use bind: if the statement is unbound, you will need to specify an object here that supports SQL execution Returns: a string literal version of the query. """ # noqa # log.debug("statement: {!r}", statement) # log.debug("statement.bind: {!r}", statement.bind) if isinstance(statement, Query): if bind is None: bind = statement.session.get_bind(statement._mapper_zero_or_none()) statement = statement.statement elif bind is None: bind = statement.bind if bind is None: # despite all that raise ValueError("Attempt to call get_literal_query with an unbound " "statement and no 'bind' parameter") # noinspection PyUnresolvedReferences dialect = bind.dialect compiler = statement._compiler(dialect) class LiteralCompiler(compiler.__class__): # noinspection PyMethodMayBeStatic def visit_bindparam(self, bindparam: BindParameter, within_columns_clause: bool = False, literal_binds: bool = False, **kwargs) -> str: return super().render_literal_bindparam( bindparam, within_columns_clause=within_columns_clause, literal_binds=literal_binds, **kwargs ) # noinspection PyUnusedLocal def render_literal_value(self, value: Any, type_) -> str: """Render the value of a bind parameter as a quoted literal. This is used for statement sections that do not accept bind paramters on the target driver/database. This should be implemented by subclasses using the quoting services of the DBAPI. """ if isinstance(value, str): value = value.replace("'", "''") return "'%s'" % value elif value is None: return "NULL" elif isinstance(value, (float, int)): return repr(value) elif isinstance(value, decimal.Decimal): return str(value) elif (isinstance(value, datetime.datetime) or isinstance(value, datetime.date) or isinstance(value, datetime.time) or isinstance(value, pendulum.DateTime) or isinstance(value, pendulum.Date) or isinstance(value, pendulum.Time)): # All have an isoformat() method. return f"'{value.isoformat()}'" # return ( # "TO_DATE('%s','YYYY-MM-DD HH24:MI:SS')" # % value.strftime("%Y-%m-%d %H:%M:%S") # ) else: raise NotImplementedError( "Don't know how to literal-quote value %r" % value) compiler = LiteralCompiler(dialect, statement) return compiler.process(statement) + ";"
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def dump_database_as_insert_sql(engine: Engine, fileobj: TextIO = sys.stdout, include_ddl: bool = False, multirow: bool = False) -> None: """ Reads an entire database and writes SQL to replicate it to the output file-like object. Args: engine: SQLAlchemy :class:`Engine` fileobj: file-like object to write to include_ddl: if ``True``, include the DDL to create the table as well multirow: write multi-row ``INSERT`` statements """ for tablename in get_table_names(engine): dump_table_as_insert_sql( engine=engine, table_name=tablename, fileobj=fileobj, include_ddl=include_ddl, multirow=multirow )
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def bulk_insert_extras(dialect_name: str, fileobj: TextIO, start: bool) -> None: """ Writes bulk ``INSERT`` preamble (start=True) or end (start=False). For MySQL, this temporarily switches off autocommit behaviour and index/FK checks, for speed, then re-enables them at the end and commits. Args: dialect_name: SQLAlchemy dialect name (see :class:`SqlaDialectName`) fileobj: file-like object to write to start: if ``True``, write preamble; if ``False``, write end """ lines = [] if dialect_name == SqlaDialectName.MYSQL: if start: lines = [ "SET autocommit=0;", "SET unique_checks=0;", "SET foreign_key_checks=0;", ] else: lines = [ "SET foreign_key_checks=1;", "SET unique_checks=1;", "COMMIT;", ] writelines_nl(fileobj, lines)
RudolfCardinal/pythonlib
[ 10, 5, 10, 2, 1426004501 ]
def __init__(self, value=None, data=None): self.value = value if data is None: self.data = None elif isinstance(data, dict): self.data = data.copy() elif isinstance(data, list): self.data = data[:] else: self.data = data
openweave/happy
[ 41, 22, 41, 7, 1504117462 ]
def test1(arg1=20, arg2='name', arg3=1.23): print('test1') print('arg1', arg1) print('arg2', arg2) print('arg3', arg3)
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def test2(arg1=20, arg2='name', arg3=1.23): """ The docstring serves as help when using the --help flag. Args: arg1: int arg2: str arg3: float """ print('test2') print('arg1', arg1) print('arg2', arg2) print('arg3', arg3)
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def test_experiment1(x=2, y=3): return x**2 + y**2
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def test_experiment2(x=2, y=3): return x**2 + y**2, {'additional': 'info'}
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def test_experiment3(x=2, y=3): return x**2 + y**2, {'additional': 'info'}, {'attach': 'this sentence.'}
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def test_experiment4(x=2, y=4): exp = ro.Experiment('params_from_def', params=ro.dict_to_constants(locals())) exp.add_result(x**2 + y**2, data={'additional': 'as usual.'})
seba-1511/randopt
[ 108, 8, 108, 13, 1477335124 ]
def testSoftplusGrad(self): check_grads(nn.softplus, (1e-8,), order=4, rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testSoftplusGradInf(self): self.assertAllClose( 1., jax.grad(nn.softplus)(float('inf')))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testSoftplusGradNan(self): check_grads(nn.softplus, (float('nan'),), order=1, rtol=1e-2 if jtu.device_under_test() == "tpu" else None)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testSoftplusZero(self, dtype): self.assertEqual(jnp.log(dtype(2)), nn.softplus(dtype(0)))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testSoftplusValue(self): val = nn.softplus(89.) self.assertAllClose(val, 89., check_dtypes=False)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testEluGrad(self): check_grads(nn.elu, (1e4,), order=4, eps=1.)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testGluValue(self): val = nn.glu(jnp.array([1.0, 0.0])) self.assertAllClose(val, jnp.array([0.5]))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testGelu(self, approximate): def gelu_reference(x): return x * scipy.stats.norm.cdf(x) rng = jtu.rand_default(self.rng()) args_maker = lambda: [rng((4, 5, 6), jnp.float32)] self._CheckAgainstNumpy( gelu_reference, partial(nn.gelu, approximate=approximate), args_maker, check_dtypes=False, tol=1e-3 if approximate else None)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testDtypeMatchesInput(self, dtype, fn): x = jnp.zeros((), dtype=dtype) out = fn(x) self.assertEqual(out.dtype, dtype)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testHardTanhMemory(self): # see https://github.com/google/jax/pull/1640 with jax.enable_checks(False): # With checks we materialize the array jax.make_jaxpr(lambda: nn.hard_tanh(jnp.ones((10 ** 12,)))) # don't oom
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testSoftmaxWhereMask(self, fn): x = jnp.array([5.5, 1.3, -4.2, 0.9]) m = jnp.array([True, False, True, True]) x_filtered = jnp.take(x, jnp.array([0, 2, 3])) out_masked = jnp.take( fn(x, where=m, initial=-jnp.inf), jnp.array([0, 2, 3])) out_filtered = fn(x_filtered) self.assertAllClose(out_masked, out_filtered)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testOneHot(self): actual = nn.one_hot(jnp.array([0, 1, 2]), 3) expected = jnp.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) self.assertAllClose(actual, expected) actual = nn.one_hot(jnp.array([1, 2, 0]), 3) expected = jnp.array([[0., 1., 0.], [0., 0., 1.], [1., 0., 0.]]) self.assertAllClose(actual, expected)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testOneHotNonArrayInput(self): actual = nn.one_hot([0, 1, 2], 3) expected = jnp.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) self.assertAllClose(actual, expected)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testOneHotConcretizationError(self): # https://github.com/google/jax/issues/3654 msg = r"in jax.nn.one_hot argument `num_classes`" with self.assertRaisesRegex(core.ConcretizationTypeError, msg): jax.jit(nn.one_hot)(3, 5)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testTanhExists(self): nn.tanh # doesn't crash
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def fwd(): a = jnp.array(1.) def f(hx, _): hx = jax.nn.sigmoid(hx + a) return hx, None hx = jnp.array(0.) jax.lax.scan(f, hx, None, length=2)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def initializer_record(name, initializer, dtypes, min_dims=2, max_dims=4): shapes = [shape for shape in ALL_SHAPES if min_dims <= len(shape) <= max_dims] return InitializerRecord(name, initializer, shapes, dtypes)
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testInitializer(self, initializer, shape, dtype): rng = random.PRNGKey(0) val = initializer(rng, shape, dtype) self.assertEqual(shape, jnp.shape(val)) self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testInitializerProvider(self, initializer_provider, shape, dtype): rng = random.PRNGKey(0) initializer = initializer_provider(dtype=dtype) val = initializer(rng, shape) self.assertEqual(shape, jnp.shape(val)) self.assertEqual(jax.dtypes.canonicalize_dtype(dtype), jnp.dtype(val))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def testVarianceScalingBatchAxis(self): rng = random.PRNGKey(0) shape = (2, 3, 4, 5) initializer = nn.initializers.variance_scaling( scale=1.0, mode='fan_avg', distribution='truncated_normal', in_axis=0, out_axis=(2, 3), batch_axis=1) val = initializer(rng, shape) self.assertEqual(shape, jnp.shape(val))
google/jax
[ 22193, 2080, 22193, 1296, 1540502702 ]
def move1(): g = GripperCommandGoal() g.command = GripperCommand() g.command.position = 0.15 print JOINT_NAME client.send_goal(g) try: client.wait_for_result() except KeyboardInterrupt: client.cancel_goal() raise
EricssonResearch/scott-eu
[ 21, 8, 21, 1, 1498718504 ]
def setUp(self): self.opor = RedisOperator()
WiseDoge/ProxyPool
[ 345, 123, 345, 7, 1480910054 ]
def test_puts_and_pop(self): self.opor.puts('1') assert self.opor.pop() == '1' self.opor.puts(['1', '2', '3']) init_size = self.opor.size self.opor.pop() assert self.opor.size == init_size - 1
WiseDoge/ProxyPool
[ 345, 123, 345, 7, 1480910054 ]
def test_gets(self): init_size = self.opor.size self.opor.gets(3) assert self.opor.size == init_size
WiseDoge/ProxyPool
[ 345, 123, 345, 7, 1480910054 ]
def test_cpu(self): lgb_train, lgb_eval = self.load_datasets() params = { 'task': 'train', 'boosting_type': 'gbdt', 'objective': 'regression', 'metric': {'l2', 'auc'}, 'num_leaves': 31, 'learning_rate': 0.05, 'feature_fraction': 0.9, 'bagging_fraction': 0.8, 'bagging_freq': 5, 'force_row_wise': True, 'verbose': 0 } # Run only one round for faster test gbm = lgb.train(params, lgb_train, num_boost_round=1, valid_sets=lgb_eval, early_stopping_rounds=1) self.assertEqual(1, gbm.best_iteration)
Kaggle/docker-python
[ 2096, 883, 2096, 27, 1428975938 ]
def test_gpu(self): lgb_train, lgb_eval = self.load_datasets()
Kaggle/docker-python
[ 2096, 883, 2096, 27, 1428975938 ]
def load_datasets(self): df_train = pd.read_csv('/input/tests/data/lgb_train.csv', header=None, sep='\t') df_test = pd.read_csv('/input/tests/data/lgb_test.csv', header=None, sep='\t')
Kaggle/docker-python
[ 2096, 883, 2096, 27, 1428975938 ]
def find_python_executable() -> str: """ Find the relevant python executable that is of the given python major version. Will test, in decreasing priority order: * the current Python interpreter * 'pythonX' executable in PATH (with X the given major version) if available * 'python' executable in PATH if available * Windows Python launcher 'py' executable in PATH if available Incompatible python versions for Certbot will be evicted (e.g. Python 3 versions less than 3.6). :rtype: str :return: the relevant python executable path :raise RuntimeError: if no relevant python executable path could be found """ python_executable_path = None # First try, current python executable if _check_version('{0}.{1}.{2}'.format( sys.version_info[0], sys.version_info[1], sys.version_info[2])): return sys.executable # Second try, with python executables in path for one_version in ('3', '',): try: one_python = 'python{0}'.format(one_version) output = subprocess.check_output([one_python, '--version'], universal_newlines=True, stderr=subprocess.STDOUT) if _check_version(output.strip().split()[1]): return subprocess.check_output([one_python, '-c', 'import sys; sys.stdout.write(sys.executable);'], universal_newlines=True) except (subprocess.CalledProcessError, OSError): pass # Last try, with Windows Python launcher try: output_version = subprocess.check_output(['py', '-3', '--version'], universal_newlines=True, stderr=subprocess.STDOUT) if _check_version(output_version.strip().split()[1]): return subprocess.check_output(['py', env_arg, '-c', 'import sys; sys.stdout.write(sys.executable);'], universal_newlines=True) except (subprocess.CalledProcessError, OSError): pass if not python_executable_path: raise RuntimeError('Error, no compatible Python executable for Certbot could be found.')
letsencrypt/letsencrypt
[ 29698, 3333, 29698, 543, 1415760740 ]
def subprocess_with_print(cmd, env=None, shell=False): if env is None: env = os.environ print('+ {0}'.format(subprocess.list2cmdline(cmd)) if isinstance(cmd, list) else cmd) subprocess.check_call(cmd, env=env, shell=shell)
letsencrypt/letsencrypt
[ 29698, 3333, 29698, 543, 1415760740 ]
def get_venv_python_path(venv_path): python_linux = os.path.join(venv_path, 'bin/python') if os.path.isfile(python_linux): return os.path.abspath(python_linux) python_windows = os.path.join(venv_path, 'Scripts\\python.exe') if os.path.isfile(python_windows): return os.path.abspath(python_windows) raise ValueError(( 'Error, could not find python executable in venv path {0}: is it a valid venv ?' .format(venv_path)))
letsencrypt/letsencrypt
[ 29698, 3333, 29698, 543, 1415760740 ]
def install_packages(venv_name, pip_args): """Installs packages in the given venv. :param str venv_name: The name or path at where the virtual environment should be created. :param pip_args: Command line arguments that should be given to pip to install packages :type pip_args: `list` of `str` """ # Using the python executable from venv, we ensure to execute following commands in this venv. py_venv = get_venv_python_path(venv_name) subprocess_with_print([py_venv, os.path.abspath('tools/pipstrap.py')]) command = [py_venv, os.path.abspath('tools/pip_install.py')] command.extend(pip_args) subprocess_with_print(command) if os.path.isdir(os.path.join(venv_name, 'bin')): # Linux/OSX specific print('-------------------------------------------------------------------') print('Please run the following command to activate developer environment:') print('source {0}/bin/activate'.format(venv_name)) print('-------------------------------------------------------------------') elif os.path.isdir(os.path.join(venv_name, 'Scripts')): # Windows specific print('---------------------------------------------------------------------------') print('Please run one of the following commands to activate developer environment:') print('{0}\\Scripts\\activate.bat (for Batch)'.format(venv_name)) print('.\\{0}\\Scripts\\Activate.ps1 (for Powershell)'.format(venv_name)) print('---------------------------------------------------------------------------') else: raise ValueError('Error, directory {0} is not a valid venv.'.format(venv_name))
letsencrypt/letsencrypt
[ 29698, 3333, 29698, 543, 1415760740 ]
def main(pip_args=None): venv_path = prepare_venv_path('venv') create_venv(venv_path) if not pip_args: pip_args = REQUIREMENTS install_packages(venv_path, pip_args)
letsencrypt/letsencrypt
[ 29698, 3333, 29698, 543, 1415760740 ]
def __init__(self, banner, ps1, fail): """ Sets up the client """ self._socket = None self._banner = banner self._ps1 = ps1 self.fail = fail self.__wait_prompt = True self.__prefix = ""
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def close(self): """ Close the connection """ self._socket.close()
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def run_command(self, command, disconnect=False): """ Runs a command on the remote shell """ # Wait for the first prompt if self.__wait_prompt: self.wait_prompt() self.__wait_prompt = False # Run the command self._socket.send(to_bytes(command + "\n")) # Disconnect if required if disconnect: self.close() return # Get its result data = self.wait_prompt(False) return data.strip()
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def setUp(self): """ Starts a framework and install the shell bundle """ # Start the framework self.framework = create_framework(('pelix.ipopo.core', 'pelix.shell.core', 'pelix.shell.remote')) self.framework.start() context = self.framework.get_bundle_context() # Get the core shell service svc_ref = context.get_service_reference(SERVICE_SHELL) self.shell = context.get_service(svc_ref) # Start the remote shell with use_ipopo(context) as ipopo: self.remote = ipopo.instantiate( FACTORY_REMOTE_SHELL, "remoteShell", {'pelix.shell.address': '127.0.0.1', 'pelix.shell.port': 9000})
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def _run_local_command(self, command, *args): """ Runs the given command and returns the output stream """ # String output str_output = StringIO() # Format command if args: command = command.format(*args) # Run command self.shell.execute(command, stdout=str_output) return str_output.getvalue().strip()
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def testRemoteVsRemoteCommands(self): """ Tests the output for two clients """ # Create clients client_1 = ShellClient(self.remote.get_banner(), self.remote.get_ps1(), self.fail) client_2 = ShellClient(self.remote.get_banner(), self.remote.get_ps1(), self.fail) # Connect them to the remote shell client_1.connect(self.remote.get_access()) client_2.connect(self.remote.get_access()) try: for command in ('bl', 'bd 0', 'sl', 'sd 1'): # Get clients outputs client_1_output = client_1.run_command(command) client_2_output = client_2.run_command(command) # Compare them self.assertEqual(client_1_output, client_2_output) finally: # Close the client in any case client_1.close() client_2.close()
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def testInvalidConfiguration(self): """ Tests the instantiation of the remote shell with invalid port """ import logging logging.basicConfig(level=logging.DEBUG) with use_ipopo(self.framework.get_bundle_context()) as ipopo: # Check invalid ports for port in (-1, 100000, '-100', '65536', 'Abc', None): remote = ipopo.instantiate(FACTORY_REMOTE_SHELL, "remoteShell_test", {'pelix.shell.port': port}) # Check that the port is in a valid range self.assertGreater(remote.get_access()[1], 0) self.assertLess(remote.get_access()[1], 65536) ipopo.kill("remoteShell_test") # Check empty addresses for address in (None, ''): remote = ipopo.instantiate(FACTORY_REMOTE_SHELL, "remoteShell_test", {'pelix.shell.address': address, 'pelix.shell.port': 0}) # Check that the address has been selected anyway self.assertTrue(remote.get_access()[0]) ipopo.kill("remoteShell_test")
isandlaTech/cohorte-3rdparty
[ 1, 1, 1, 1, 1411546128 ]
def cnn(self, model_input, l2_penalty=1e-8, num_filters = [1024, 1024, 1024], filter_sizes = [1,2,3], sub_scope="", **unused_params): max_frames = model_input.get_shape().as_list()[1] num_features = model_input.get_shape().as_list()[2] shift_inputs = [] for i in xrange(max(filter_sizes)): if i == 0: shift_inputs.append(model_input) else: shift_inputs.append(tf.pad(model_input, paddings=[[0,0],[i,0],[0,0]])[:,:max_frames,:]) cnn_outputs = [] for nf, fs in zip(num_filters, filter_sizes): sub_input = tf.concat(shift_inputs[:fs], axis=2) sub_filter = tf.get_variable(sub_scope+"cnn-filter-len%d"%fs, shape=[num_features*fs, nf], dtype=tf.float32, initializer=tf.truncated_normal_initializer(mean=0.0, stddev=0.1), regularizer=tf.contrib.layers.l2_regularizer(l2_penalty)) cnn_outputs.append(tf.einsum("ijk,kl->ijl", sub_input, sub_filter)) cnn_output = tf.concat(cnn_outputs, axis=2) return cnn_output
wangheda/youtube-8m
[ 177, 61, 177, 2, 1488610618 ]
def sub_model(self, model_input, vocab_size, num_mixtures=None, l2_penalty=1e-8, sub_scope="", **unused_params): num_mixtures = num_mixtures or FLAGS.moe_num_mixtures gate_activations = slim.fully_connected( model_input, vocab_size * (num_mixtures + 1), activation_fn=None, biases_initializer=None, weights_regularizer=slim.l2_regularizer(l2_penalty), scope="gates-"+sub_scope) expert_activations = slim.fully_connected( model_input, vocab_size * num_mixtures, activation_fn=None, weights_regularizer=slim.l2_regularizer(l2_penalty), scope="experts-"+sub_scope) gating_distribution = tf.nn.softmax(tf.reshape( gate_activations, [-1, num_mixtures + 1])) # (Batch * #Labels) x (num_mixtures + 1) expert_distribution = tf.nn.sigmoid(tf.reshape( expert_activations, [-1, num_mixtures])) # (Batch * #Labels) x num_mixtures final_probabilities_by_class_and_batch = tf.reduce_sum( gating_distribution[:, :num_mixtures] * expert_distribution, 1) final_probabilities = tf.reshape(final_probabilities_by_class_and_batch, [-1, vocab_size]) return final_probabilities
wangheda/youtube-8m
[ 177, 61, 177, 2, 1488610618 ]
def extra_method(self): return 'called extra method'
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def on_done(self): pass
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def on_queued(self, **kwargs): return kwargs
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def __init__(self, arg1, arg2): self.arg1 = arg1 self.arg2 = arg2
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def test_can_instantiate_base_subscriber(self): try: BaseSubscriber() except InvalidSubscriberMethodError: self.fail('BaseSubscriber should be instantiable')
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def test_subclass_can_have_and_call_additional_methods(self): subscriber = ExtraMethodsSubscriber() self.assertEqual(subscriber.extra_method(), 'called extra method')
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def test_can_subclass_and_override_constructor_from_base_class(self): subscriber = OverrideConstructorSubscriber('foo', arg2='bar') # Make sure you can create a custom constructor. self.assertEqual(subscriber.arg1, 'foo') self.assertEqual(subscriber.arg2, 'bar')
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def test_not_callable_in_subclass_subscriber_method(self): with self.assertRaisesRegex( InvalidSubscriberMethodError, 'must be callable' ): NotCallableSubscriber()
boto/s3transfer
[ 163, 116, 163, 48, 1452640782 ]
def main(): # usage, parse parameters usage = "usage: %prog [options] arg" parser = OptionParser( usage ) # option to debug and verbose parser.add_option( "-v", "--verbose", action="store_true", dest="verbose" ) # options to control files parser.add_option( "-l", "--list", type="string", dest="directoryList", help="list of elastix output directories" ) (options, args) = parser.parse_args() # Check if option -l is given if options.directoryList == None : parser.error( "The option directory list (-l) should be given" ) # Use glob, this works not only on Linux dirList = glob.glob( options.directoryList ); # Add everything not processed dirList.extend( args ); print( "directory checksum" ) for directory in dirList: # Equivalent to: fileName = options.directory + "/" + "elastix.log" fileName = os.path.join( directory, "elastix.log" ); # Read elastix.log and find last line with checksum try: f = open( fileName ) except IOError as e: print( directory + " No elastix.log found" ) continue checksumFound = False; for line in f: if "Registration result checksum:" in line: checksumline = line; checksumFound = True; # Extract checksum if checksumFound: checksum = checksumline.split(': ')[1].rstrip( "\n" ); # Print result print( directory + " " + checksum ); else: print( directory + " -" ); f.close(); return 0
SuperElastix/elastix
[ 369, 101, 369, 53, 1495032082 ]
def _cmp_sample(a, b): """Compare two samples. First compare the resource ids. Compare the timestamps if the resource ids are the same. :param a: First sample :param b: Second sample :return: Result of cmp function. :rtype: Integer """ result = cmp(a.resource_id, b.resource_id) if result == 0: result = cmp(a.timestamp, b.timestamp) return result
absalon-james/usage
[ 1, 1, 1, 2, 1461701508 ]
def __init__(self, client, name, max_samples=15000): """Init the meter. :param client: Ceilometer client :type client: ceilometerclient.client :param name: Name of the meter :type name: String :param max_samples: Max number of samples per query. :type max_samples: Integer """ self.client = client self.name = name self.max_samples = max_samples # Extra time is 4 hours. 4 * 60 * 60 = 14400 self._extra_time = datetime.timedelta(seconds=14400)
absalon-james/usage
[ 1, 1, 1, 2, 1461701508 ]
def _reading_generator(self, samples, start, stop): """Yields one reading at a time. Samples are grouped by resource id(already sorted by resource id) and then used to create a reading object. :param samples: List of samples sorted by resource_id and timestamp. :type samples: List :param start: Reading start time :type start: Datetime :param stop: Reading stop time :type stop: Datetime :yields: Reading objects """ # Yield a reading for each resource/meter pair for _, g in itertools.groupby(samples, lambda x: x.resource_id): try: yield Reading(list(g), start, stop) except NoSamplesError: continue
absalon-james/usage
[ 1, 1, 1, 2, 1461701508 ]
def set_aws_keys(USERNAME, AWS_ACCESS_KEY_VAR, AWS_SECRET_ACCESS_KEY_VAR): global AWS_ACCESS_KEY global AWS_SECRET_ACCESS_KEY global KICKFLIP_USER_NAME AWS_ACCESS_KEY = AWS_ACCESS_KEY_VAR AWS_SECRET_ACCESS_KEY = AWS_SECRET_ACCESS_KEY_VAR KICKFLIP_USER_NAME = USERNAME return True
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def upload_file(filename): return True
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def connect(): global connected global kickflip_session global KICKFLIP_CLIENT_ID global KICKFLIP_CLIENT_SECRET global KICKFLIP_API_URL if not connected: endpoint = KICKFLIP_BASE_URL + '/o/token/' payload = ({ 'client_secret': KICKFLIP_CLIENT_SECRET, 'grant_type': 'client_credentials', 'client_id': KICKFLIP_CLIENT_ID, }) response = requests.post(endpoint, payload) if response.status_code != 200: raise Exception("Error: Couldn't connect to Kickflip...") token = response.json() client = MobileApplicationClient(KICKFLIP_CLIENT_ID) kickflip_session = OAuth2Session( KICKFLIP_CLIENT_ID, client=client, token=token ) connected = True print "CONNECTED" return connected
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def g(*args, **kwargs): if not connected: raise Exception("No session connected. connect() first?") return f(*args, **kwargs)
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def set_keys(client_id, client_secret): global KICKFLIP_CLIENT_ID global KICKFLIP_CLIENT_SECRET KICKFLIP_CLIENT_ID = client_id KICKFLIP_CLIENT_SECRET = client_secret
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def set_access_tokens(): global KICKFLIP_ACCESS_TOKEN global KICKFLIP_SECRET_ACCESS_TOKEN # requests-oauth.get_tokens() KICKFLIP_ACCESS_TOKEN = key KICKFLIP_SECRET_ACCESS_TOKEN = secret_key return ''
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def get_account_status(username): return ''
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def create_user(username, password=""): """ Uses the `/user/new` endpoint taking the username as a parameter. TODO: What happens when you specify no password? e.g. username="banana1" """ endpoint = KICKFLIP_API_URL + '/user/new' payload = {'username': username} if password: payload['password'] = password user_response = kickflip_session.post(endpoint, payload) return user_response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def get_user_info(username): """ Uses the `/user/info` endpoint taking the username as a parameter. e.g. username="banana1" """ endpoint = KICKFLIP_API_URL + '/user/info/' payload = {'username': username} user_response = kickflip_session.post(endpoint, payload) return user_response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def start_stream(file_path, stream_name=None, private=False, username=''): """ Uses the `/stream/start` endpoint taking the username as a parameter. If you specify no username, it will fallback to the default `KICKFLIP_USER_NAME` set in the set_aws_keys() function. e.g. username="banana1" """ endpoint = KICKFLIP_API_URL + '/stream/start/' payload = {'username': KICKFLIP_USER_NAME} if username: payload['username'] = username user_response = kickflip_session.post(endpoint, payload) import pdb pdb.set_trace() stream_video(file_path) return ''
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def stop_stream(): return ''
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def get_stream_info(stream_id): """ Uses the `/stream/info` endpoint taking the stream_id as a parameter. e.g. stream_id="e83a515e-fe69-4b19-afba-20f30d56b719" """ endpoint = KICKFLIP_API_URL + '/stream/info/' payload = {'stream_id': stream_id} response = kickflip_session.post(endpoint, payload) return response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def get_stream_by_location(uuid, lat, lon, radius=0): """ Uses the `/search/location` endpoint. takes the stream_id as a parameter. e.g. uuid="", username="bej48snvvthy" """ endpoint = KICKFLIP_API_URL + '/search/location/' payload = ({ 'uuid': uuid, 'lat': lat, 'lon': lon }) if radius != 0: payload['radius'] = radius response = kickflip_session.post(endpoint, payload) return response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def get_stream_credentials(username, password): """ Uses the `/user/uuid` endpoint. takes a valid username and password as parameter. It will return all the necessary credentials to use the API and the upload endpoints. e.g. username="bej48snvvthy", password="" """ endpoint = KICKFLIP_API_URL + '/user/uuid/' payload = {'username': username, 'password': password} response = kickflip_session.post(endpoint, payload) return response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def search_by_keyword(keyword="", uuid=""): """ Uses the `/search` endpoint. takes a user uuid and a keyword. If you specify no `uuid`, the search will not show private streams? If the keyword is empty, it will return all the streams from the app. e.g. uuid="e9c3d27e-406b-4f4a-9b87-6d3460c60ca6", keyword="" reply: { u'total_items': 3, u'next_page_available': False, u'success': True, u'page_number': 1, u'streams': [...], u'results_per_page': 25 } """ endpoint = KICKFLIP_API_URL + '/search/' payload = {'uuid': uuid, 'keyword': keyword} response = kickflip_session.post(endpoint, payload) return response.json()
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def process(self, event): """ event.event_type 'modified' | 'created' | 'moved' | 'deleted' event.is_directory True | False event.src_path path/to/observed/file """ # Process the file there print event.src_path, event.event_type # Print for degug if '.m3u8' not in event.src_path: upload_file(event.src_path)
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def on_created(self, event): self.process(event)
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def upload_file(file_path): global AWS_ACCESS_KEY global AWS_SECRET_ACCESS_KEY head, tail = os.path.split(file_path) bucket = None s3 = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY) bucket = s3.get_bucket(KICKFLIP_APP_NAME) # , validate=False) k = Key(bucket) head, tail = os.path.split(file_path) k.key = KICKFLIP_USER_NAME + "/" + tail k.set_contents_from_filename(file_path) k.set_acl('public-read') if '.m3u8' in file_path: print k.generate_url(expires_in=300) return k
Kickflip/python-kickflip
[ 12, 12, 12, 2, 1400099987 ]
def sample_get_data_source(): # Create a client client = bigquery_datatransfer_v1.DataTransferServiceClient() # Initialize request argument(s) request = bigquery_datatransfer_v1.GetDataSourceRequest( name="name_value", ) # Make the request response = client.get_data_source(request=request) # Handle the response print(response)
googleapis/python-bigquery-datatransfer
[ 72, 28, 72, 6, 1575936546 ]
def getAvgGradient(w, X, y, L, K): [N,D] = X.shape W01,b1,W12,b2,W23,b3 = parseParams(w,D,K)
jihunhamm/Crowd-ML
[ 16, 10, 16, 5, 1463416723 ]
def predict(w, X, K): N,D = X.shape W01,b1,W12,b2,W23,b3 = parseParams(w,D,K)
jihunhamm/Crowd-ML
[ 16, 10, 16, 5, 1463416723 ]
def parseParams(w,D,K): cnt = 0 W01 = w[:D*nh].reshape((D,nh)) cnt += D*nh b1 = w[cnt:cnt+nh].reshape((1,nh)) cnt += nh W12 = w[cnt:cnt+nh*nh].reshape((nh,nh)) cnt += nh*nh b2 = w[cnt:cnt+nh].reshape((1,nh)) cnt += nh W23 = w[cnt:cnt+nh*K].reshape((nh,K)) cnt += nh*K b3 = w[cnt:cnt+K].reshape((1,K)) cnt += K if (cnt != w.size): print 'Error: wrong param size' exit()
jihunhamm/Crowd-ML
[ 16, 10, 16, 5, 1463416723 ]
def init(D,K): d = (D+1)*nh + (nh+1)*nh + (nh+1)*K w = 1.e-1*np.random.normal(size=(d,)) #w = np.zeros((d,)) return w
jihunhamm/Crowd-ML
[ 16, 10, 16, 5, 1463416723 ]
def loss(w, X, y, L, K): _,l = getAvgGradient(w, X, y, L, K) return l
jihunhamm/Crowd-ML
[ 16, 10, 16, 5, 1463416723 ]