function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def _compute_mandate_assembly_count(self): """ count the number of assemblies linked to the current partner count the number of mandates linked to the assemblies of the current partner """ for partner in self: assemblies = partner._get_assemblies() partner.ext_assembly_count = len(assemblies) partner.ext_mandate_count = len(assemblies._get_mandates())
mozaik-association/mozaik
[ 28, 20, 28, 4, 1421746811 ]
def __init__(self, parent = None, name = None, size = None, min_size = None, max_size = None, fixed_size = None, margins = None, padding = None, helptext = None, position = None, style = None, hexpand = None, vexpand = None, font = None, base_color = None, background_color = None, foreground_color = None, selection_color = None, border_color = None, outline_color = None, border_size = None, outline_size = None, position_technique = None, is_focusable = None, comment = None, scale_start = None, scale_end = None, step_length = None, marker_length = None, orientation = None): self.real_widget = fifechan.Slider(scale_start or self.DEFAULT_SCALE_START, scale_end or self.DEFAULT_SCALE_END) self.orientation = self.DEFAULT_ORIENTATION self.step_length = self.DEFAULT_STEP_LENGTH self.marker_length = self.DEFAULT_MARKER_LENGTH super(Slider, self).__init__(parent=parent, name=name, size=size, min_size=min_size, max_size=max_size, fixed_size=fixed_size, margins=margins, padding=padding, helptext=helptext, position=position, style=style, hexpand=hexpand, vexpand=vexpand, font=font, base_color=base_color, background_color=background_color, foreground_color=foreground_color, selection_color=selection_color, border_color=border_color, outline_color=outline_color, border_size=border_size, outline_size=outline_size, position_technique=position_technique, is_focusable=is_focusable, comment=comment) if orientation is not None: self.orientation = orientation if scale_start is not None: self.scale_start = scale_start if scale_end is not None: self.scale_end = scale_end if step_length is not None: self.step_length = step_length if marker_length is not None: self.marker_length = marker_length self.accepts_data = True self._realSetData = self._setValue self._realGetData = self._getValue
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _setScale(self, start, end): """setScale(self, double scaleStart, double scaleEnd)""" if type(start) != float: raise RuntimeError("Slider expects float for start scale") if type(end) != float: raise RuntimeError("Slider expects float for end scale") self.real_widget.setScale(start, end)
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _setScaleStart(self, start): """setScaleStart(self, double scaleStart)""" if type(start) != float: raise RuntimeError("Slider expects float for start scale") self.real_widget.setScaleStart(start)
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _getScaleEnd(self): """getScaleEnd(self) -> double""" return self.real_widget.getScaleEnd()
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _getValue(self): """getValue(self) -> double""" return self.real_widget.getValue()
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _setMarkerLength(self, length): """setMarkerLength(self, int length)""" if type(length) != int: raise RuntimeError("Slider only accepts int for Marker length") self.real_widget.setMarkerLength(length)
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _setOrientation(self, orientation): """setOrientation(self, Orientation orientation)""" self.real_widget.setOrientation(orientation)
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _setStepLength(self, length): """setStepLength(self, double length)""" if type(length) != float: raise RuntimeError("Slider only accepts floats for step length") self.real_widget.setStepLength(length)
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def __init__(self, file, **settings): self.file = file self.settings = {"ifc_class": None} for key, value in settings.items(): self.settings[key] = value
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def maybe_print(msg): if VERBOSE: print(msg)
isb-cgc/ISB-CGC-Webapp
[ 12, 9, 12, 7, 1443114166 ]
def main(): global VERBOSE args = parse_args() oauth_flow_args = [args.noauth_local_webserver] if args.noauth_local_webserver else [] VERBOSE = args.verbose maybe_print('--verbose: printing extra information') storage = Storage(args.storage_file) credentials = get_credentials(storage, oauth_flow_args) maybe_print('credentials stored in ' + args.storage_file) maybe_print('access_token: ' + credentials.access_token) maybe_print('refresh_token: ' + credentials.refresh_token)
isb-cgc/ISB-CGC-Webapp
[ 12, 9, 12, 7, 1443114166 ]
def __init__(self, index_name, batch_size=500): self.index_name = index_name self.buffer = [] self.batch_size = batch_size self.total_size = 0 self.connected = False self.client = None
commonsearch/cosr-back
[ 121, 25, 121, 42, 1455684257 ]
def index(self, _id, hit): """ Queue one document for indexing. """ if not self.connected: self.connect() # https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html self.buffer.append('{"index":{"_id":"%s"}}\n%s\n' % ( _id, json.dumps(hit) # pylint: disable=no-member )) if len(self.buffer) >= self.batch_size: self.flush()
commonsearch/cosr-back
[ 121, 25, 121, 42, 1455684257 ]
def refresh(self): """ Sends a "refresh" to the ES index, forcing the actual indexing of what was sent up until now """ if not self.connected: return if config["ENV"] not in ("local", "ci"): raise Exception("refresh() not allowed in env %s" % config["ENV"]) self.indices().refresh(index=self.index_name)
commonsearch/cosr-back
[ 121, 25, 121, 42, 1455684257 ]
def bulk_index(self): """ Indexes the current buffer to Elasticsearch, bypassing the bulk() helper for performance """ connection = self.client.transport.get_connection() bulk_url = "/%s/page/_bulk" % self.index_name body = "".join(self.buffer) # TODO retries # status, headers, data status, _, _ = connection.perform_request("POST", bulk_url, body=body) if status != 200: raise Exception("Elasticsearch returned status=%s" % status) # TODO: look for errors there? # parsed = json.loads(data)
commonsearch/cosr-back
[ 121, 25, 121, 42, 1455684257 ]
def validate_config(config): """Validate that the configuration is valid, throws if it isn't.""" if config.get(CONF_MIN) >= config.get(CONF_MAX): raise vol.Invalid(f"'{CONF_MAX}' must be > '{CONF_MIN}'") return config
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def __init__(self, hass, config, config_entry, discovery_data): """Initialize the MQTT Number.""" self._config = config self._optimistic = False self._sub_state = None self._current_number = None NumberEntity.__init__(self) MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def config_schema(): """Return the config schema.""" return DISCOVERY_SCHEMA
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def message_received(msg): """Handle new MQTT messages.""" payload = self._templates[CONF_VALUE_TEMPLATE](msg.payload) try: if payload == self._config[CONF_PAYLOAD_RESET]: num_value = None elif payload.isnumeric(): num_value = int(payload) else: num_value = float(payload) except ValueError: _LOGGER.warning("Payload '%s' is not a Number", msg.payload) return if num_value is not None and ( num_value < self.min_value or num_value > self.max_value ): _LOGGER.error( "Invalid value for %s: %s (range %s - %s)", self.entity_id, num_value, self.min_value, self.max_value, ) return self._current_number = num_value self.async_write_ha_state()
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def min_value(self) -> float: """Return the minimum value.""" return self._config[CONF_MIN]
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def max_value(self) -> float: """Return the maximum value.""" return self._config[CONF_MAX]
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def step(self) -> float: """Return the increment/decrement step.""" return self._config[CONF_STEP]
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def unit_of_measurement(self) -> str | None: """Return the unit of measurement.""" return self._config.get(CONF_UNIT_OF_MEASUREMENT)
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def value(self): """Return the current value.""" return self._current_number
home-assistant/home-assistant
[ 58698, 22318, 58698, 2794, 1379402988 ]
def get_square_for_point(self, x, y) -> Optional[str]: pass
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def get_squares_for_bounds(self, bounds) -> Optional[str]: pass
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def __init__(self, rows, cols): rows = tuple(float(y) for y in rows) cols = tuple(float(x) for x in cols) self.rows = tuple(sorted(rows)) self.cols = tuple(sorted(cols)) if self.rows == rows: self.invert_y = False elif self.rows == tuple(reversed(rows)): self.invert_y = True else: raise ValueError('row coordinates are not ordered') if self.cols == cols: self.invert_x = False elif self.cols == tuple(reversed(cols)): self.invert_x = True else: raise ValueError('column coordinates are not ordered')
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def get_squares_for_bounds(self, bounds): minx, miny, maxx, maxy = bounds if self.invert_x: minx, maxx = maxx, minx if self.invert_y: miny, maxy = maxy, miny min_square = self.get_square_for_point(minx, miny) max_square = self.get_square_for_point(maxx, maxy) if not min_square or not max_square: return None if min_square == max_square: return min_square return '%s-%s' % (min_square, max_square)
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def get_square_for_point(self, x, y): return None
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def main(): seed_everything(4321) parser = ArgumentParser(add_help=False) parser = Trainer.add_argparse_args(parser) parser.add_argument("--trainer_method", default="fit") parser.add_argument("--tmpdir") parser.add_argument("--workdir") parser.set_defaults(gpus=2) parser.set_defaults(accelerator="ddp") args = parser.parse_args() dm = ClassifDataModule() model = ClassificationModel() trainer = Trainer.from_argparse_args(args) if args.trainer_method == "fit": trainer.fit(model, datamodule=dm) result = None elif args.trainer_method == "test": result = trainer.test(model, datamodule=dm) elif args.trainer_method == "fit_test": trainer.fit(model, datamodule=dm) result = trainer.test(model, datamodule=dm) else: raise ValueError(f"Unsupported: {args.trainer_method}") result_ext = {"status": "complete", "method": args.trainer_method, "result": result} file_path = os.path.join(args.tmpdir, "ddp.result") torch.save(result_ext, file_path)
williamFalcon/pytorch-lightning
[ 21876, 2764, 21876, 665, 1553993157 ]
def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: BigQueryWriteAsyncClient: The constructed client. """ return BigQueryWriteClient.from_service_account_info.__func__(BigQueryWriteAsyncClient, info, *args, **kwargs) # type: ignore
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: BigQueryWriteAsyncClient: The constructed client. """ return BigQueryWriteClient.from_service_account_file.__func__(BigQueryWriteAsyncClient, filename, *args, **kwargs) # type: ignore
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[ClientOptions] = None
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def transport(self) -> BigQueryWriteTransport: """Returns the transport used by the client instance. Returns: BigQueryWriteTransport: The transport used by the client instance. """ return self._client.transport
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def __init__( self, *, credentials: ga_credentials.Credentials = None, transport: Union[str, BigQueryWriteTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_create_write_stream(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.CreateWriteStreamRequest( parent="parent_value", ) # Make the request response = client.create_write_stream(request=request) # Handle the response print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def append_rows( self, requests: AsyncIterator[storage.AppendRowsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (),
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_append_rows(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.AppendRowsRequest( write_stream="write_stream_value", ) # This method expects an iterator which contains # 'bigquery_storage_v1beta2.AppendRowsRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] def request_generator(): for request in requests: yield request # Make the request stream = client.append_rows(requests=request_generator()) # Handle the response for response in stream: print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_get_write_stream(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.GetWriteStreamRequest( name="name_value", ) # Make the request response = client.get_write_stream(request=request) # Handle the response print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_finalize_write_stream(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.FinalizeWriteStreamRequest( name="name_value", ) # Make the request response = client.finalize_write_stream(request=request) # Handle the response print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_batch_commit_write_streams(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.BatchCommitWriteStreamsRequest( parent="parent_value", write_streams=['write_streams_value_1', 'write_streams_value_2'], ) # Make the request response = client.batch_commit_write_streams(request=request) # Handle the response print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def sample_flush_rows(): # Create a client client = bigquery_storage_v1beta2.BigQueryWriteClient() # Initialize request argument(s) request = bigquery_storage_v1beta2.FlushRowsRequest( write_stream="write_stream_value", ) # Make the request response = client.flush_rows(request=request) # Handle the response print(response)
googleapis/python-bigquery-storage
[ 73, 35, 73, 15, 1575936548 ]
def test_data_iterator(): script = os.path.join(td.PYTHON_DEMO_DIR, 'quantile_data_iterator.py') cmd = ['python', script] subprocess.check_call(cmd)
dmlc/xgboost
[ 23850, 8553, 23850, 364, 1391707683 ]
def test_categorical_demo(): script = os.path.join(td.PYTHON_DEMO_DIR, 'categorical.py') cmd = ['python', script] subprocess.check_call(cmd)
dmlc/xgboost
[ 23850, 8553, 23850, 364, 1391707683 ]
def register(request, backend='default', template_name='registration/registration_form.html'): backend = get_backend(backend) # determine is registration is currently allowed. the ``request`` object # is passed which can be used to selectively disallow registration based on # the user-agent if not backend.registration_allowed(request): return redirect(*backend.registration_closed_redirect(request)) form_class = backend.get_registration_form_class(request) if request.method == 'POST': form = form_class(request.POST, request.FILES) if form.is_valid(): user = backend.register(request, form) return redirect(backend.post_registration_redirect(request, user)) else: form = form_class() return render(request, template_name, {'form': form})
chop-dbhi/biorepo-portal
[ 6, 7, 6, 90, 1442606924 ]
def verify(request, backend='default', template_name='registration/registration_verify.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) if profile: # check to see if moderation for this profile is required and whether or # not it is a verified account. if backend.moderation_required(request, profile): moderation_required = True backend.verify(request, profile, **kwargs) else: moderation_required = False # attempt to activate this user backend.activate(request, profile, **kwargs) else: moderation_required = None return render(request, template_name, { 'profile': profile, 'moderation_required': moderation_required, })
chop-dbhi/biorepo-portal
[ 6, 7, 6, 90, 1442606924 ]
def moderate(request, backend='default', template_name='registration/registration_moderate.html', **kwargs): backend = get_backend(backend) profile = backend.get_profile(request, **kwargs) form_class = backend.get_moderation_form_class(request) if request.method == 'POST': form = form_class(request.POST) if form.is_valid(): backend.moderate(request, form, profile, **kwargs) return redirect(backend.post_moderation_redirect(request, profile)) else: form = form_class() return render(request, template_name, { 'form': form, 'profile': profile, })
chop-dbhi/biorepo-portal
[ 6, 7, 6, 90, 1442606924 ]
def options(opt): opt.add_option('--orch-config', action = 'store', default = 'orch.cfg', help='Give an orchestration configuration file.') opt.add_option('--orch-start', action = 'store', default = 'start', help='Set the section to start the orchestration')
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def build(bld): import orch.build orch.build.build(bld)
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def __init__(self, **pkgcfg): self._config = pkgcfg
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def get(self, name, default = None): return self._config.get(name,default)
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def depends_step(self, step): ''' Return a list of steps that this step depends on ''' d = self._config.get('depends') if not d: return list() ds = [x[1] for x in [s.split(':') for s in string2list(d)] if x[0] == step] return ds
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def dependencies(self): ''' Return all dependencies set via "depends" configuration items return list of tuples: (mystep, package, package_step) eg: ('prepare', 'gcc', 'install') ''' ret = list() try: deps = getattr(self, 'depends', None) except KeyError: return list() for dep in string2list(deps): mystep, other = dep.split(':') pkg,pkg_step = other.split('_',1) ret.append((mystep, pkg, pkg_step)) return ret
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def worch_hello(self): 'Just testing' print ("%s" % self.worch.format('Hi from worch, my name is "{package}/{version}" and I am using "{dumpenv_cmd}" with extra {extra}', extra='spice')) print ('My bld.env: %s' % (self.bld.env.keys(),)) print ('My all_envs: %s' % (sorted(self.bld.all_envs.keys()),)) print ('My env: %s' % (self.env.keys(),)) print ('My groups: %s' % (self.env['orch_group_dict'].keys(),)) print ('My packages: %s' % (self.env['orch_package_list'],))
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def step(self, name, rule, **kwds): ''' Make a worch installation step. This invokes the build context on the rule with the following augmentations: - the given step name is prefixed with the package name - if the rule is a string (scriptlet) then the worch exec_command is used - successful execution of the rule leads to a worch control file being produced. ''' step_name = '%s_%s' % (self.worch.package, name) # append control file as an additional output target = string2list(kwds.get('target', '')) if not isinstance(target, list): target = [target] cn = self.control_node(name) if not cn in target: target.append(cn) kwds['target'] = target
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def runit(t): rc = rulefun(t) if not rc: msg.debug('orch: successfully ran %s' % step_name) cn.write(time.asctime(time.localtime()) + '\n') return rc
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def control_node(self, step, package = None): ''' Return a node for the control file given step of this package or optionally another package. ''' if not package: package = self.worch.package filename = '%s_%s' % (package, step) path = self.worch.format('{control_dir}/{filename}', filename=filename) return self.path.find_or_declare(path)
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def make_node(self, path, parent_node=None): if not parent_node: if path.startswith('/'): parent_node = self.bld.root else: parent_node = self.bld.bldnode return parent_node.make_node(path)
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def worch_package(ctx, worch_config, *args, **kw): # transfer waf-specific keywords explicitly kw['name'] = worch_config['package'] kw['features'] = ' '.join(string2list(worch_config['features'])) kw['use'] = worch_config.get('use') # make the TaskGen object for the package worch=WorchConfig(**worch_config) tgen = ctx(*args, worch=worch, **kw) tgen.env = ctx.all_envs[worch.package] tgen.env.env = tgen.env.munged_env msg.debug('orch: package "%s" with features: %s' % \ (kw['name'], ', '.join(kw['features'].split()))) return tgen
hwaf/hwaf
[ 10, 1, 10, 14, 1355159308 ]
def test_help_noargs(self): stdout = self.RunCommand('help', return_stdout=True) self.assertIn(b'Available commands', stdout)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def test_help_invalid_subcommand_arg(self): stdout = self.RunCommand('help', ['web', 'asdf'], return_stdout=True) self.assertIn(b'help about one of the subcommands', stdout)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def test_help_command_arg(self): stdout = self.RunCommand('help', ['ls'], return_stdout=True) self.assertIn(b'ls - List providers, buckets', stdout)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def test_subcommand_help_arg(self): stdout = self.RunCommand('web', ['set', '--help'], return_stdout=True) self.assertIn(b'gsutil web set', stdout) self.assertNotIn(b'gsutil web get', stdout)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def test_help_wrong_num_args(self): stderr = self.RunGsUtil(['cp'], return_stderr=True, expected_status=1) self.assertIn('Usage:', stderr)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def bin_spike(x, l): """ l is the number of points used for comparison, thus l=2 means that each point will be compared only against the previous and following measurements. l=2 is is probably not a good choice, too small. Maybe use pstsd instead? Dummy way to avoid warnings when x[ini:fin] are all masked. Improve this in the future. """ assert x.ndim == 1, "I'm not ready to deal with multidimensional x" assert l%2 == 0, "l must be an even integer" N = len(x) bin = ma.masked_all(N) # bin_std = ma.masked_all(N) half_window = int(l/2) idx = (i for i in range(half_window, N - half_window) if np.isfinite(x[i])) for i in idx: ini = max(0, i - half_window) fin = min(N, i + half_window) # At least 3 valid points if ma.compressed(x[ini:fin]).size >= 3: bin[i] = x[i] - ma.median(x[ini:fin]) # bin_std[i] = (np.append(x[ini:i], x[i+1:fin+1])).std() bin[i] /= (np.append(x[ini:i], x[i+1:fin+1])).std() return bin
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def __init__(self, data, varname, cfg, autoflag=True): self.data = data self.varname = varname self.cfg = cfg self.set_features() if autoflag: self.test()
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def set_features(self): self.features = {'bin_spike': bin_spike(self.data[self.varname], self.cfg['l'])}
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def get_primary_properties(cls, domain): """ Get slugs and human-friendly names for the properties that are available for filtering and/or displayed by default in the report, without needing to click "See More". """ if domain_has_privilege(domain, privileges.APP_USER_PROFILES): user_data_label = _("profile or user data") else: user_data_label = _("user data") return { "username": _("username"), ROLE_FIELD: _("role"), "email": _("email"), DOMAIN_FIELD: _("project"), "is_active": _("is active"), "language": _("language"), PHONE_NUMBERS_FIELD: _("phone numbers"), LOCATION_FIELD: _("primary location"), "user_data": user_data_label, TWO_FACTOR_FIELD: _("two factor authentication disabled"), ASSIGNED_LOCATIONS_FIELD: _("assigned locations"), }
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def headers(self): h = [ DataTablesColumn(_("Affected User"), sortable=False), DataTablesColumn(_("Modified by User"), sortable=False), DataTablesColumn(_("Action"), prop_name='action'), DataTablesColumn(_("Via"), prop_name='changed_via'), DataTablesColumn(_("Changes"), sortable=False), DataTablesColumn(_("Change Message"), sortable=False), DataTablesColumn(_("Timestamp"), prop_name='changed_at'), ] return DataTablesHeader(*h)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def total_records(self): return self._get_queryset().count()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_queryset(self): user_slugs = self.request.GET.getlist(EMWF.slug) user_ids = self._get_user_ids(user_slugs) # return empty queryset if no matching users were found if user_slugs and not user_ids: return UserHistory.objects.none() changed_by_user_slugs = self.request.GET.getlist(ChangedByUserFilter.slug) changed_by_user_ids = self._get_user_ids(changed_by_user_slugs) # return empty queryset if no matching users were found if changed_by_user_slugs and not changed_by_user_ids: return UserHistory.objects.none() user_property = self.request.GET.get('user_property') actions = self.request.GET.getlist('action') user_upload_record_id = self.request.GET.get('user_upload_record') query = self._build_query(user_ids, changed_by_user_ids, user_property, actions, user_upload_record_id) return query
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_users_es_query(self, slugs): return EnterpriseUserFilter.user_es_query( self.domain, slugs, self.request.couch_user, )
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _for_domains(self): return BillingAccount.get_account_by_domain(self.domain).get_domains()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_property_filters(user_property): if user_property in CHANGE_MESSAGES_FIELDS: query_filters = Q(change_messages__has_key=user_property) # to include CommCareUser creation from UI where a location can be assigned as a part of user creation # which is tracked only under "changes" and not "change messages" if user_property == LOCATION_FIELD: query_filters = query_filters | Q(changes__has_key='location_id') else: query_filters = Q(changes__has_key=user_property) return query_filters
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def rows(self): records = self._get_queryset().order_by(self.ordering)[ self.pagination.start:self.pagination.start + self.pagination.count ] for record in records: yield self._user_history_row(record, self.domain, self.timezone)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def ordering(self): by, direction = list(self.get_sorting_block()[0].items())[0] return '-' + by if direction == 'desc' else by
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_location_name(self, location_id): from corehq.apps.locations.models import SQLLocation if not location_id: return None try: location_object = SQLLocation.objects.get(location_id=location_id) except ObjectDoesNotExist: return None return location_object.display_name
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _html_list(self, changes): items = [] if isinstance(changes, dict): for key, value in changes.items(): if isinstance(value, dict): value = self._html_list(value) elif isinstance(value, list): value = format_html(", ".join(value)) else: value = format_html(str(value)) items.append("<li>{}: {}</li>".format(key, value)) elif isinstance(changes, list): items = ["<li>{}</li>".format(format_html(change)) for change in changes] return mark_safe(f"<ul class='list-unstyled'>{''.join(items)}</ul>")
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def __init__(self, plotly_name="rangebreaks", parent_name="layout.xaxis", **kwargs): super(RangebreaksValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, data_class_str=kwargs.pop("data_class_str", "Rangebreak"), data_docs=kwargs.pop( "data_docs", """ bounds Sets the lower and upper bounds of this axis rangebreak. Can be used with `pattern`. dvalue Sets the size of each `values` item. The default is one day in milliseconds. enabled Determines whether this axis rangebreak is enabled or disabled. Please note that `rangebreaks` only work for "date" axis type. name When used in a template, named items are created in the output figure in addition to any items the figure already has in this array. You can modify these items in the output figure by making your own item with `templateitemname` matching this `name` alongside your modifications (including `visible: false` or `enabled: false` to hide it). Has no effect outside of a template. pattern Determines a pattern on the time line that generates breaks. If *day of week* - days of the week in English e.g. 'Sunday' or `sun` (matching is case-insensitive and considers only the first three characters), as well as Sunday-based integers between 0 and 6. If "hour" - hour (24-hour clock) as decimal numbers between 0 and 24. for more info. Examples: - { pattern: 'day of week', bounds: [6, 1] } or simply { bounds: ['sat', 'mon'] } breaks from Saturday to Monday (i.e. skips the weekends). - { pattern: 'hour', bounds: [17, 8] } breaks from 5pm to 8am (i.e. skips non-work hours). templateitemname Used to refer to a named item in this array in the template. Named items from the template will be created even without a matching item in the input figure, but you can modify one by making an item with `templateitemname` matching its `name`, alongside your modifications (including `visible: false` or `enabled: false` to hide it). If there is no template or no matching item, this item will be hidden unless you explicitly show it with `visible: true`. values Sets the coordinate values corresponding to the rangebreaks. An alternative to `bounds`. Use `dvalue` to set the size of the values along the axis.
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def create(kernel): result = Tangible() result.template = "object/tangible/deed/event_perk/shared_fed_dub_2x10_honorguard_deed.iff" result.attribute_template_id = 2 result.stfName("event_perk","fed_dub_2x10_honorguard_deed_name")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def create(kernel): result = Tangible() result.template = "object/tangible/container/drum/shared_pob_ship_loot_box.iff" result.attribute_template_id = -1 result.stfName("space/space_interaction","pob_loot")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def create(kernel): result = Static() result.template = "object/static/naboo/shared_waterfall_naboo_falls_01.iff" result.attribute_template_id = -1 result.stfName("obj_n","unknown_object")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def is_locked(self): return self.dxf.flags & Layer.LOCK > 0
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def unlock(self): self.dxf.flags = self.dxf.flags & Layer.UNLOCK
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def is_on(self): return not self.is_off()
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def off(self): self.dxf.color = -abs(self.dxf.color)
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def set_color(self, color): color = abs(color) if self.is_on() else -abs(color) self.dxf.color = color
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def new(cls, handle, dxfattribs=None, dxffactory=None): if dxfattribs is not None: pattern = dxfattribs.pop('pattern', [0.0]) else: pattern = [0.0] entity = super(Linetype, cls).new(handle, dxfattribs, dxffactory) entity._setup_pattern(pattern) return entity
lautr3k/RepRap-iTopie
[ 71, 37, 71, 2, 1412522934 ]
def addTemplate(core): mobileTemplate = MobileTemplate()
ProjectSWGCore/NGECore2
[ 23, 70, 23, 56, 1372673790 ]
def extract_type_and_constructor(properties): constructor = properties['type'] args_separated = constructor.split('(', 1) if len(args_separated) == 1: return constructor, constructor type_no_constructor = args_separated[0] return type_no_constructor, constructor
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def is_graph_type(type): return type == 'RunningGraph' or type == 'RunningHistogram'
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def __init__(self, properties, is_graph_description=False): if not is_graph_description: self.name = properties['name'] self.type, self.constructor = extract_type_and_constructor(properties) self.extract_common(properties) if is_graph_type(self.type): description_properties = properties['description'] description_properties['type'] = 'Text' self.description = OverlayWidget(description_properties, True)
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def is_negative_coord(coords, axis, widgets_so_far): if isinstance(coords[axis], unicode): coord_split = coords[axis].split('.') # The coordinate is in the form other_widget.edge.mode # We simply need to know if other_widget's coordinate is negative or not. return widgets_so_far[coord_split[0]].negative_alignment[axis] return coords[axis] < 0
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def get_offset_helper(widget, axis, smaller_coord_side): # Assume axis is X. This function returns two values: # - An offset where the bounding box is placed at, # - Whether this offset is for the left or right edge. # # The input coordinate (widget.coord[axis]) is either: # # - a number: in this case, the offset is that number, and its sign determines whether this refers to the left or right edge of the bounding box. # - other_widget.edge.mode: this has multiple possibilities: # * edge=left, mode=align: the offset is other_widget.left, the edge is left. # * edge=left, mode=adjacent: the offset is other_widget.left, the edge is right. # * edge=right, mode=align: the offset is other_widget.right, the edge is right. # * edge=right, mode=adjacent: the offset is other_widget.right, the edge is left. # # The case for the Y axis is similar, with the edge values being top or bottom. coord = widget.coords[axis] if not isinstance(coord, unicode): is_left = coord >= 0 return coord, is_left coord_split = coord.split('.') is_left = coord_split[1] == smaller_coord_side is_align = coord_split[2] == 'align' other_widget_coords = 'mState.mOverlayWidgets[WidgetId::' + coord_split[0] + ']->coords' other_widget_coord_index = axis + (0 if is_left else 2) offset = other_widget_coords + '[' + str(other_widget_coord_index) + ']' return offset, is_left == is_align
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def get_offset_y(widget): return get_offset_helper(widget, 1, 'top')
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def generate_widget_init_helper(widget, is_graph_description=False): font_size = '0' # Common attributes color = [channel / 255.0 for channel in widget.color] offset_x, offset_x_is_left = get_offset_x(widget) offset_y, offset_y_is_top = get_offset_y(widget) if is_text_type(widget.type): # Attributes deriven from text properties font_size = widget.font width = str(widget.length) + ' * kFontGlyphWidths[fontSize]' height = 'kFontGlyphHeights[fontSize]' else: # Attributes deriven from graph properties width = str(widget.bar_width) + ' * static_cast<uint32_t>(widget->runningValues.size())' height = widget.height is_left_aligned = not widget.negative_alignment[0] is_top_aligned = not widget.negative_alignment[1] # We have offset_x, offset_y, width and height which together determine the bounding box. If # offset_x_is_left, the bounding box X would be in [offset_x, offset_x + width], otherwise it # would be in [offset_x - width, offset_x]. Similarly for y. Since we use negative values to # mean aligned to the right side of the screen, we need to make sure that: # # - if left aligned: offset_x - width is at minimum 1 # - if right aligned: offset_x + width is at maximum -1 # # We therefore have the following combinations for the X axis: # # - offset_x_is_left && is_left_aligned: [offset_x, offset_x + width] # - offset_x_is_left && !is_left_aligned: [offset_x, std::min(offset_x + width, -1)] # - !offset_x_is_left && is_left_aligned: [std::max(1, offset_x - width), offset_x] # - !offset_x_is_left && !is_left_aligned: [offset_x - width, offset_x] # # Similarly for y. coord0, coord2 = get_bounding_box_coords('offsetX', 'width', offset_x_is_left, is_left_aligned) coord1, coord3 = get_bounding_box_coords('offsetY', 'height', offset_y_is_top, is_top_aligned) return template_init_widget.format( subwidget='description.' if is_graph_description else '', offset_x=offset_x, offset_y=offset_y, width=width, height=height, type=widget.type, font_size=font_size, coord0=coord0, coord1=coord1, coord2=coord2, coord3=coord3, color_r=color[0], color_g=color[1], color_b=color[2], color_a=color[3])
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def main(): if len(sys.argv) == 2 and sys.argv[1] == 'inputs': print(in_file) return if len(sys.argv) == 2 and sys.argv[1] == 'outputs': print(out_file) return with open(in_file) as fin: layout = json.loads(fin.read()) # Read the layouts from the json file and determine alignment of widgets (as they can refer to # other widgets. overlay_widgets = {} for widget_properties in layout['widgets']: widget = OverlayWidget(widget_properties) overlay_widgets[widget.name] = widget set_alignment_flags(widget, overlay_widgets) # Go over the widgets again and generate initialization code. Note that we need to iterate over # the widgets in order, so we can't use the overlay_widgets dictionary for iteration. init_widgets = [] for widget_properties in layout['widgets']: init_widgets.append(generate_widget_init(overlay_widgets[widget_properties['name']])) with open(out_file, 'w') as outfile: outfile.write( template_out_file.format( script_name=__file__, copyright_year=date.today().year, input_file_name=in_file, out_file_name=out_file, init_widgets='\n'.join(init_widgets))) outfile.close()
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def execute(): # Update Social Logins in User run_patch() # Create Social Login Key(s) from Social Login Keys frappe.reload_doc("integrations", "doctype", "social_login_key", force=True) if not frappe.db.exists('DocType', 'Social Login Keys'): return social_login_keys = frappe.get_doc("Social Login Keys", "Social Login Keys") if social_login_keys.get("facebook_client_id") or social_login_keys.get("facebook_client_secret"): facebook_login_key = frappe.new_doc("Social Login Key") facebook_login_key.get_social_login_provider("Facebook", initialize=True) facebook_login_key.social_login_provider = "Facebook" facebook_login_key.client_id = social_login_keys.get("facebook_client_id") facebook_login_key.client_secret = social_login_keys.get("facebook_client_secret") if not (facebook_login_key.client_secret and facebook_login_key.client_id): facebook_login_key.enable_social_login = 0 facebook_login_key.save() if social_login_keys.get("frappe_server_url"): frappe_login_key = frappe.new_doc("Social Login Key") frappe_login_key.get_social_login_provider("Frappe", initialize=True) frappe_login_key.social_login_provider = "Frappe" frappe_login_key.base_url = social_login_keys.get("frappe_server_url") frappe_login_key.client_id = social_login_keys.get("frappe_client_id") frappe_login_key.client_secret = social_login_keys.get("frappe_client_secret") if not (frappe_login_key.client_secret and frappe_login_key.client_id and frappe_login_key.base_url): frappe_login_key.enable_social_login = 0 frappe_login_key.save() if social_login_keys.get("github_client_id") or social_login_keys.get("github_client_secret"): github_login_key = frappe.new_doc("Social Login Key") github_login_key.get_social_login_provider("GitHub", initialize=True) github_login_key.social_login_provider = "GitHub" github_login_key.client_id = social_login_keys.get("github_client_id") github_login_key.client_secret = social_login_keys.get("github_client_secret") if not (github_login_key.client_secret and github_login_key.client_id): github_login_key.enable_social_login = 0 github_login_key.save() if social_login_keys.get("google_client_id") or social_login_keys.get("google_client_secret"): google_login_key = frappe.new_doc("Social Login Key") google_login_key.get_social_login_provider("Google", initialize=True) google_login_key.social_login_provider = "Google" google_login_key.client_id = social_login_keys.get("google_client_id") google_login_key.client_secret = social_login_keys.get("google_client_secret") if not (google_login_key.client_secret and google_login_key.client_id): google_login_key.enable_social_login = 0 google_login_key.save() frappe.delete_doc("DocType", "Social Login Keys")
frappe/frappe
[ 4495, 2418, 4495, 1493, 1307520856 ]
def insert_user_social_login(user, modified_by, provider, idx, userid=None, username=None): source_cols = get_standard_cols() creation_time = frappe.utils.get_datetime_str(frappe.utils.get_datetime()) values = [ frappe.generate_hash(length=10), creation_time, creation_time, user, modified_by, user, "User", "social_logins", cstr(idx), provider ] if userid: source_cols.append("userid") values.append(userid) if username: source_cols.append("username") values.append(username) query = """INSERT INTO `tabUser Social Login` (`{source_cols}`) VALUES ({values}) """.format( source_cols = "`, `".join(source_cols), values= ", ".join([frappe.db.escape(d) for d in values]) ) frappe.db.sql(query)
frappe/frappe
[ 4495, 2418, 4495, 1493, 1307520856 ]