code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Simple(FitnessTuple): <NEW_LINE> <INDENT> def __new__(self, value: Union[Number, Sequence[Number]]): <NEW_LINE> <INDENT> if isinstance(value, Number): <NEW_LINE> <INDENT> return Lexicographic([value]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Lexicographic(value) | The simplest possible fitness: a single value | 625990664e4d562566373b6c |
class NcVariable(netCDF4.Variable, NcChunkIteratorMixIn): <NEW_LINE> <INDENT> def __new__(cls, group, varname, datatype, **kwargs): <NEW_LINE> <INDENT> var = netCDF4.Variable.__new__(cls, group, varname, datatype, **kwargs) <NEW_LINE> group.variables[varname] = var <NEW_LINE> return var <NEW_LINE> <DEDENT> @property <NEW_LINE> def varname(self): <NEW_LINE> <INDENT> return self._name | Extends the netCDF4.Variable class by defining additional local methods
(just varname at present) and by inheriting iteration functionality from
the NcChunkIteratorMixIn mix-in class. | 625990663617ad0b5ee078b6 |
class TrayIcon(gtk.StatusIcon, BaseTray): <NEW_LINE> <INDENT> NAME = 'Tray Icon' <NEW_LINE> DESCRIPTION = 'The gtk tray icon' <NEW_LINE> AUTHOR = 'Mariano Guerra' <NEW_LINE> WEBSITE = 'www.emesene.org' <NEW_LINE> def __init__(self, handler, main_window=None): <NEW_LINE> <INDENT> BaseTray.__init__(self) <NEW_LINE> gtk.StatusIcon.__init__(self) <NEW_LINE> self.handler = handler <NEW_LINE> self.main_window = main_window <NEW_LINE> self.last_new_message = None <NEW_LINE> self.connect('activate', self._on_activate) <NEW_LINE> self.connect('popup-menu', self._on_popup) <NEW_LINE> self.set_login() <NEW_LINE> self.set_visible(True) <NEW_LINE> self.set_tooltip("emesene") <NEW_LINE> <DEDENT> def set_login(self): <NEW_LINE> <INDENT> self.menu = LoginMenu(self.handler, self.main_window) <NEW_LINE> self.menu.show_all() <NEW_LINE> self.set_from_file(self.handler.theme.logo) <NEW_LINE> <DEDENT> def set_main(self, session): <NEW_LINE> <INDENT> self.handler.session = session <NEW_LINE> self.handler.session.signals.status_change_succeed.subscribe( self._on_change_status) <NEW_LINE> self.handler.session.signals.conv_message.subscribe(self._on_message) <NEW_LINE> self.handler.session.signals.message_read.subscribe(self._on_read) <NEW_LINE> self.menu = MainMenu(self.handler, self.main_window) <NEW_LINE> self.menu.show_all() <NEW_LINE> self.set_tooltip("emesene - " + self.handler.session.account.account) <NEW_LINE> self._on_change_status(self.handler.session.account.status) <NEW_LINE> <DEDENT> def _on_message(self, cid, account, msgobj, cedict={}): <NEW_LINE> <INDENT> conv_manager = self._get_conversation_manager(cid, account) <NEW_LINE> if conv_manager and not conv_manager.is_active(): <NEW_LINE> <INDENT> self.set_blinking(True) <NEW_LINE> self.last_new_message = cid <NEW_LINE> <DEDENT> <DEDENT> def _on_read(self, conv): <NEW_LINE> <INDENT> self.set_blinking(False) <NEW_LINE> self.last_new_message = None <NEW_LINE> <DEDENT> def _on_activate(self, trayicon): <NEW_LINE> <INDENT> if self.last_new_message is not None and self.get_blinking(): <NEW_LINE> <INDENT> cid = self.last_new_message <NEW_LINE> conv_manager = self._get_conversation_manager(cid) <NEW_LINE> if conv_manager: <NEW_LINE> <INDENT> conversation = conv_manager.conversations[cid] <NEW_LINE> conv_manager.present(conversation) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.handler.on_hide_show_mainwindow(self.main_window) <NEW_LINE> <DEDENT> <DEDENT> def _on_change_status(self, stat): <NEW_LINE> <INDENT> if stat not in status.ALL or stat == -1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.set_from_file(self.handler.theme.status_icons_panel[stat]) <NEW_LINE> <DEDENT> def _on_popup(self, trayicon, button, activate_time): <NEW_LINE> <INDENT> position = None <NEW_LINE> if os.name == 'posix': <NEW_LINE> <INDENT> position = gtk.status_icon_position_menu <NEW_LINE> <DEDENT> self.menu.popup(None, None, position, button, activate_time, trayicon) | A widget that implements the tray icon of emesene for gtk | 625990668a43f66fc4bf38f6 |
class BatchOp(UpdateCallback): <NEW_LINE> <INDENT> title = 'Untitled operation' <NEW_LINE> description = 'This operation needs to be described' <NEW_LINE> def __init__(self, db, callback): <NEW_LINE> <INDENT> UpdateCallback.__init__(self, callback) <NEW_LINE> self.db = db <NEW_LINE> self.prepared = False <NEW_LINE> <DEDENT> def build_config(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def build_confirm_text(self): <NEW_LINE> <INDENT> text = _( 'The following action is to be performed:\n\n' 'Operation:\t%s') % self.title.replace('_','') <NEW_LINE> return text <NEW_LINE> <DEDENT> def build_path_list(self): <NEW_LINE> <INDENT> self.prepare() <NEW_LINE> return self.path_list <NEW_LINE> <DEDENT> def run_tool(self): <NEW_LINE> <INDENT> self.db.disable_signals() <NEW_LINE> with DbTxn(self.title, self.db, batch=True) as self.trans: <NEW_LINE> <INDENT> success = self._run() <NEW_LINE> <DEDENT> self.db.enable_signals() <NEW_LINE> self.db.request_rebuild() <NEW_LINE> return success <NEW_LINE> <DEDENT> def _run(self): <NEW_LINE> <INDENT> print("This method needs to be written.") <NEW_LINE> print("Running BatchOp tool... done.") <NEW_LINE> return True <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> self.handle_list = [] <NEW_LINE> self.path_list = [] <NEW_LINE> self._prepare() <NEW_LINE> self.prepared = True <NEW_LINE> <DEDENT> def _prepare(self): <NEW_LINE> <INDENT> print("This method needs to be written.") <NEW_LINE> print("Preparing BatchOp tool... done.") | Base class for the sub-tools. | 625990665166f23b2e244b36 |
class Attendance(models.Model): <NEW_LINE> <INDENT> yearmonth = models.ForeignKey( YearMonth, on_delete=models.CASCADE) <NEW_LINE> date = models.DateField() <NEW_LINE> stt_time = models.TimeField(default='00:00:00') <NEW_LINE> end_time = models.TimeField(default='00:00:00') <NEW_LINE> break_time = models.TimeField(default='00:00:00') <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True, null=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = (('yearmonth', 'date'),) <NEW_LINE> ordering = ['yearmonth', 'date'] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.date) <NEW_LINE> <DEDENT> @property <NEW_LINE> def str_operating_time(self): <NEW_LINE> <INDENT> m, s = divmod(self.operating_time.seconds, 60) <NEW_LINE> h, m = divmod(m, 60) <NEW_LINE> return "%02d:%02d" % (h, m) <NEW_LINE> <DEDENT> @property <NEW_LINE> def operating_time(self): <NEW_LINE> <INDENT> s = timedelta( hours=self.stt_time.hour, minutes=self.stt_time.minute, seconds=self.end_time.second) <NEW_LINE> e = timedelta( hours=self.end_time.hour, minutes=self.end_time.minute, seconds=self.end_time.second) <NEW_LINE> t = timedelta( hours=self.break_time.hour, minutes=self.break_time.minute, seconds=self.end_time.second) <NEW_LINE> if s > e: <NEW_LINE> <INDENT> e += timedelta(days=1) <NEW_LINE> <DEDENT> return (e-s-t) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_query_set_Attendances(cls, yearmonth): <NEW_LINE> <INDENT> return Attendance.objects.filter(yearmonth=yearmonth).order_by('date') <NEW_LINE> <DEDENT> def save_attendance(self, stt, end, break_time): <NEW_LINE> <INDENT> self.stt_time = stt <NEW_LINE> self.end_time = end <NEW_LINE> self.break_time = break_time <NEW_LINE> self.save() <NEW_LINE> <DEDENT> @receiver(post_save, sender=YearMonth) <NEW_LINE> def create_attendances(sender, instance, created, **kwargs): <NEW_LINE> <INDENT> if created: <NEW_LINE> <INDENT> import calendar <NEW_LINE> _, cnt = calendar.monthrange(instance.year, instance.month) <NEW_LINE> for i in range(1, cnt+1): <NEW_LINE> <INDENT> d = date(instance.year, instance.month, i) <NEW_LINE> Attendance.objects.create(yearmonth=instance, date=d) | Attendance
| 6259906692d797404e389710 |
class GithubApi: <NEW_LINE> <INDENT> def __init__(self, instance, repo_url, settings, timeout=30): <NEW_LINE> <INDENT> parsed_repo_url = urlparse(repo_url) <NEW_LINE> repo = parsed_repo_url.path.strip("/") <NEW_LINE> secret_reader = SecretReader(settings=settings) <NEW_LINE> token = secret_reader.read(instance["token"]) <NEW_LINE> git_cli = github.Github(token, base_url=GH_BASE_URL, timeout=timeout) <NEW_LINE> self.repo = git_cli.get_repo(repo) <NEW_LINE> <DEDENT> def get_repository_tree(self, ref="master"): <NEW_LINE> <INDENT> tree_items = [] <NEW_LINE> for item in self.repo.get_git_tree(sha=ref, recursive=True).tree: <NEW_LINE> <INDENT> tree_item = {"path": item.path, "name": Path(item.path).name} <NEW_LINE> tree_items.append(tree_item) <NEW_LINE> <DEDENT> return tree_items <NEW_LINE> <DEDENT> @retry() <NEW_LINE> def get_file(self, path, ref="master"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.repo.get_contents(path, ref).decoded_content <NEW_LINE> <DEDENT> except github.UnknownObjectException: <NEW_LINE> <INDENT> return None | Github client implementing the common interfaces used in
the qontract-reconcile integrations.
:param instance: the Github instance and provided
by the app-interface
:param repo_url: the Github repository URL
:param settings: the app-interface settings
:type instance: dict
:type repo_url: str
:type settings: dict | 625990667d847024c075db3e |
class DescribeAutoSnapshotPoliciesRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.AutoSnapshotPolicyIds = None <NEW_LINE> self.Filters = None <NEW_LINE> self.Limit = None <NEW_LINE> self.Offset = None <NEW_LINE> self.Order = None <NEW_LINE> self.OrderField = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.AutoSnapshotPolicyIds = params.get("AutoSnapshotPolicyIds") <NEW_LINE> if params.get("Filters") is not None: <NEW_LINE> <INDENT> self.Filters = [] <NEW_LINE> for item in params.get("Filters"): <NEW_LINE> <INDENT> obj = Filter() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Filters.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.Limit = params.get("Limit") <NEW_LINE> self.Offset = params.get("Offset") <NEW_LINE> self.Order = params.get("Order") <NEW_LINE> self.OrderField = params.get("OrderField") | DescribeAutoSnapshotPolicies请求参数结构体
| 62599066adb09d7d5dc0bcd0 |
class SuperChip: <NEW_LINE> <INDENT> def __init__(self, array): <NEW_LINE> <INDENT> self.rom = numpy.array(array,numpy.uint8) <NEW_LINE> self.romSlice = self.rom[:0x1000] <NEW_LINE> self.banks = len(self.rom) / 0x1000 <NEW_LINE> if self.banks == 2: <NEW_LINE> <INDENT> self.access = self.accessF8 <NEW_LINE> <DEDENT> elif self.banks == 4: <NEW_LINE> <INDENT> self.access = self.accessF6 <NEW_LINE> <DEDENT> elif self.banks == 8: <NEW_LINE> <INDENT> self.access = self.accessF4 <NEW_LINE> <DEDENT> <DEDENT> def accessF8(self, address,value): <NEW_LINE> <INDENT> if address >= 0x1000 and address < 0x1080: <NEW_LINE> <INDENT> self.romSlice[address & 0x7F] = value <NEW_LINE> <DEDENT> elif address >= 0x1080 and address < 0x1100: <NEW_LINE> <INDENT> return self.romSlice[address & 0x7F] <NEW_LINE> <DEDENT> elif address == 0x1FF8 or address == 0x1FF9: <NEW_LINE> <INDENT> bank = address & 0x1 << 12 <NEW_LINE> self.romSlice[0x80:0x1000] = self.rom[ bank + 0x80 : bank + 0x1000 ] <NEW_LINE> <DEDENT> return self.romSlice[ address & 0xFFF ] <NEW_LINE> <DEDENT> def accessF6(self, address,value): <NEW_LINE> <INDENT> if address >= 0x1000 and address < 0x1080: <NEW_LINE> <INDENT> self.romSlice[address & 0x7F] = value <NEW_LINE> <DEDENT> elif address >= 0x1080 and address < 0x1100: <NEW_LINE> <INDENT> return self.romSlice[address & 0x7F] <NEW_LINE> <DEDENT> elif address >= 0x1FF6 and address <= 0x1FF9: <NEW_LINE> <INDENT> bank = address - 0x1FF6 << 12 <NEW_LINE> self.romSlice[0x80:0x1000] = self.rom[ bank + 0x80 : bank + 0x1000 ] <NEW_LINE> <DEDENT> return self.romSlice[ address & 0xFFF ] <NEW_LINE> <DEDENT> def accessF4(self, address,value): <NEW_LINE> <INDENT> if address >= 0x1000 and address < 0x1080: <NEW_LINE> <INDENT> self.romSlice[address & 0x7F] = value <NEW_LINE> <DEDENT> elif address >= 0x1080 and address < 0x1100: <NEW_LINE> <INDENT> return self.romSlice[address & 0x7F] <NEW_LINE> <DEDENT> elif address >= 0x1FF4 and address <= 0x1FFB: <NEW_LINE> <INDENT> bank = (address - 0x1FF4) << 12 <NEW_LINE> self.romSlice[0x80:0x1000] = self.rom[ bank + 0x80 : bank + 0x1000 ] <NEW_LINE> <DEDENT> return self.romSlice[ address & 0xFFF ] <NEW_LINE> <DEDENT> def getRam(self): <NEW_LINE> <INDENT> return [('SuperChip 128 bytes', self.romSlice[:0x80])] <NEW_LINE> <DEDENT> def getRom(self): <NEW_LINE> <INDENT> return self.romSlice | Atari Super-Chip | 6259906621bff66bcd7243cc |
class Role(object): <NEW_LINE> <INDENT> def __init__(self, name, permissions): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.permissions = permissions <NEW_LINE> <DEDENT> def has_permission(self, resource, action): <NEW_LINE> <INDENT> return any([resource == perm.resource and action == perm.action for perm in self.permissions]) | Role object to group users and permissions.
Attributes:
- name: The name of the role.
- permissions: A list of permissions. | 625990668e7ae83300eea7f4 |
class Comment(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User, blank=False, related_name="%(app_label)s_%(class)s_user_related") <NEW_LINE> reply_to = models.ForeignKey('self', blank=True, null=True, related_name="%(app_label)s_%(class)s_replyto_related") <NEW_LINE> comment = models.CharField(max_length=2048, blank=False) <NEW_LINE> add_date = models.DateTimeField(auto_now_add=True) <NEW_LINE> edit_date = models.DateTimeField(blank=True, null=True) <NEW_LINE> edit_user = models.ForeignKey(User, blank=True, null=True, related_name="%(app_label)s_%(class)s_edituser_related") <NEW_LINE> edit_reason = models.CharField(max_length=128, blank=True, null=True) <NEW_LINE> socialvote = models.ForeignKey(SocialVote, blank=True, null=True) | Los comentarios son la forma de expresión más sencilla
de los usuarios en Titles, Release, etc. Los Comment
se pueden añadir a cualquier modelo para habilitar los
comentarios. Cada fila es un comentario.
Se recomienda añadir en los modelos en los que se
habiliten los comentarios lo siguiente:
open_comments = models.BooleanField(default=True)
Gracias a esta opción se podrán habilitar y deshabilitar
los comentarios para los usuarios comunes. | 62599066cc0a2c111447c683 |
@view_auth_classes() <NEW_LINE> class GradeViewMixin(DeveloperErrorViewMixin): <NEW_LINE> <INDENT> def _get_course(self, course_key_string, user, access_action): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> course_key = CourseKey.from_string(course_key_string) <NEW_LINE> <DEDENT> except InvalidKeyError: <NEW_LINE> <INDENT> raise self.api_error( status_code=status.HTTP_404_NOT_FOUND, developer_message='The provided course key cannot be parsed.', error_code='invalid_course_key' ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return courses.get_course_with_access( user, access_action, course_key, check_if_enrolled=True, ) <NEW_LINE> <DEDENT> except Http404: <NEW_LINE> <INDENT> log.info('Course with ID "%s" not found', course_key_string) <NEW_LINE> <DEDENT> except CourseAccessRedirect: <NEW_LINE> <INDENT> log.info('User %s does not have access to course with ID "%s"', user.username, course_key_string) <NEW_LINE> <DEDENT> raise self.api_error( status_code=status.HTTP_404_NOT_FOUND, developer_message='The user, the course or both do not exist.', error_code='user_or_course_does_not_exist', ) <NEW_LINE> <DEDENT> def _get_effective_user(self, request, course): <NEW_LINE> <INDENT> if 'username' in request.GET: <NEW_LINE> <INDENT> username = request.GET.get('username') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> username = request.user.username <NEW_LINE> <DEDENT> if request.user.username == username: <NEW_LINE> <INDENT> return request.user <NEW_LINE> <DEDENT> if not has_access(request.user, CourseStaffRole.ROLE, course): <NEW_LINE> <INDENT> log.info( 'User %s tried to access the grade for user %s.', request.user.username, username ) <NEW_LINE> raise self.api_error( status_code=status.HTTP_403_FORBIDDEN, developer_message='The user requested does not match the logged in user.', error_code='user_mismatch' ) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return USER_MODEL.objects.get(username=username) <NEW_LINE> <DEDENT> except USER_MODEL.DoesNotExist: <NEW_LINE> <INDENT> raise self.api_error( status_code=status.HTTP_404_NOT_FOUND, developer_message='The user matching the requested username does not exist.', error_code='user_does_not_exist' ) <NEW_LINE> <DEDENT> <DEDENT> def perform_authentication(self, request): <NEW_LINE> <INDENT> super(GradeViewMixin, self).perform_authentication(request) <NEW_LINE> if request.user.is_anonymous: <NEW_LINE> <INDENT> raise AuthenticationFailed | Mixin class for Grades related views. | 62599066009cb60464d02c9f |
class span(parameter): <NEW_LINE> <INDENT> pass | Wing span
b
:Unit: [m] | 6259906616aa5153ce401c41 |
class SwitchSchema: <NEW_LINE> <INDENT> CONF_STATE_ADDRESS = CONF_STATE_ADDRESS <NEW_LINE> DEFAULT_NAME = "KNX Switch" <NEW_LINE> SCHEMA = vol.Schema( { vol.Required(CONF_ADDRESS): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_STATE_ADDRESS): cv.string, } ) | Voluptuous schema for KNX switches. | 62599066a219f33f346c7f6d |
class Settings: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 800 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed_factor = 1.5 <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.blaster_speed_factor = 4 <NEW_LINE> self.blaster_width = 3 <NEW_LINE> self.blaster_height = 15 <NEW_LINE> self.blaster_color = 100, 10, 5 <NEW_LINE> self.blaster_rays_allowed = 6 <NEW_LINE> self.alien_speed_factor = 1 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.fleet_direction = 1 | A class to store all settings for Alien Invasion | 6259906645492302aabfdc43 |
class GnmplutostatsPluginURL(Plugin): <NEW_LINE> <INDENT> implements(IPluginURL) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = "Gnmplutostats App" <NEW_LINE> self.urls = 'portal.plugins.gnmplutostats.urls' <NEW_LINE> self.urlpattern = r'^gnmplutostats/' <NEW_LINE> self.namespace = r'gnmplutostats' <NEW_LINE> self.plugin_guid = '227c2ab9-e299-4537-99b4-c313a35d1b2e' <NEW_LINE> log.debug("Initiated Gnmplutostats App") | Adds a plugin handler which creates url handler for the index page | 625990664f6381625f19a058 |
class AccelerationControlWidget(FigureCanvas): <NEW_LINE> <INDENT> def __init__(self, params, parent=None): <NEW_LINE> <INDENT> self.figure = Figure(facecolor=(1., 1., 1.)) <NEW_LINE> super(AccelerationControlWidget, self).__init__(self.figure) <NEW_LINE> self.setParent(parent) <NEW_LINE> self.axes = self.figure.add_subplot(111) <NEW_LINE> self.axes.set_autoscale_on(True) <NEW_LINE> self.canvas = self.figure.canvas <NEW_LINE> self.blade = params.blade <NEW_LINE> self.menu = None <NEW_LINE> <DEDENT> def contextMenuEvent(self, event): <NEW_LINE> <INDENT> if self.menu is None: <NEW_LINE> <INDENT> self.menu = QMenu(self) <NEW_LINE> self.reset = QAction("Reset", self.menu) <NEW_LINE> self.reset.triggered[()].connect(self.blade.resetAcceleration) <NEW_LINE> self.menu.addAction(self.reset) <NEW_LINE> self.reset.setCheckable(False) <NEW_LINE> <DEDENT> self.action = self.menu.exec_(self.mapToGlobal(event.pos())) | CLASS - Defines the graphics area for plotting the acceleration control bezier curve | 62599066460517430c432c08 |
class ExecveGoal(FunctionGoal): <NEW_LINE> <INDENT> def __init__(self, name, address, arguments): <NEW_LINE> <INDENT> super(ExecveGoal, self).__init__(name, address, []) <NEW_LINE> for arg in arguments: <NEW_LINE> <INDENT> if type(arg) not in [int, long]: <NEW_LINE> <INDENT> arg = str(arg) <NEW_LINE> <DEDENT> self.arguments.append(arg) | This class represents a call to execve to start another program | 625990664428ac0f6e659c99 |
class FromFields(object): <NEW_LINE> <INDENT> def __init__(self, api_endpoint: str, api_key: str, x_domain: str = None, x_time_zone: str = None): <NEW_LINE> <INDENT> self._getresponse_client = GetresponseClient(api_endpoint=api_endpoint, api_key=api_key, x_domain=x_domain, x_time_zone=x_time_zone) <NEW_LINE> <DEDENT> def get_from_fields(self, query: list = None, **kwargs): <NEW_LINE> <INDENT> url = '/from-fields?' <NEW_LINE> if query: <NEW_LINE> <INDENT> for item in query: <NEW_LINE> <INDENT> query_data = str(item).split('=') <NEW_LINE> url = url + 'query[' + query_data[0] + ']=' + query_data[1] + '&' <NEW_LINE> <DEDENT> <DEDENT> for key, value in kwargs.items(): <NEW_LINE> <INDENT> if key == 'sort': <NEW_LINE> <INDENT> url = url + key + '[createdOn]=' + value + '&' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = url + key + '=' + value + '&' <NEW_LINE> <DEDENT> <DEDENT> url = url[:-1] <NEW_LINE> r = self._getresponse_client.get(url) <NEW_LINE> return r <NEW_LINE> <DEDENT> def get_from_field(self, field_id: str, fields: str = None): <NEW_LINE> <INDENT> url = '/from-fields/' + field_id <NEW_LINE> if fields: <NEW_LINE> <INDENT> url += '?fields=' + fields <NEW_LINE> <DEDENT> r = self._getresponse_client.get(url) <NEW_LINE> return r <NEW_LINE> <DEDENT> def post_from_field(self, name: str, email: str): <NEW_LINE> <INDENT> url = '/from-fields' <NEW_LINE> data = {'name': name, 'email': email} <NEW_LINE> r = self._getresponse_client.post(url, data=json.dumps(data)) <NEW_LINE> return r <NEW_LINE> <DEDENT> def delete_or_replace_from_field(self, from_field_id: str, replace_id: str = None): <NEW_LINE> <INDENT> url = '/from-fields/' + from_field_id <NEW_LINE> if replace_id: <NEW_LINE> <INDENT> data = {'fromFieldIdToReplaceWith': replace_id} <NEW_LINE> r = self._getresponse_client.delete(url, data=json.dumps(data)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = self._getresponse_client.delete(url) <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> def make_default(self, from_field_id: str): <NEW_LINE> <INDENT> url = '/from-fields/' + from_field_id + '/default' <NEW_LINE> r = self._getresponse_client.post(url, data=None) <NEW_LINE> return r | Class represents From fields section of API
http://apidocs.getresponse.com/v3/resources/fromfields | 6259906667a9b606de547655 |
class VirtualHubRouteTable(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'routes': {'key': 'routes', 'type': '[VirtualHubRoute]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(VirtualHubRouteTable, self).__init__(**kwargs) <NEW_LINE> self.routes = kwargs.get('routes', None) | VirtualHub route table.
:param routes: List of all routes.
:type routes: list[~azure.mgmt.network.v2019_04_01.models.VirtualHubRoute] | 6259906676e4537e8c3f0cea |
class ProxyBase(IProxy): <NEW_LINE> <INDENT> clib_support = False <NEW_LINE> def __enter__(self): <NEW_LINE> <INDENT> threading.local()._orig_socket = socket.socket <NEW_LINE> try: <NEW_LINE> <INDENT> self.negotiate() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> socket.socket = threading.local()._orig_socket <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.cleanup() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> socket.socket = threading.local()._orig_socket <NEW_LINE> <DEDENT> <DEDENT> def negotiate(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> pass | base class of proxy
| 625990664e4d562566373b6e |
class Kosten(CustomModel): <NEW_LINE> <INDENT> name = models.CharField( verbose_name="Name", max_length=500, default="Zusätzliche Kosten", ) <NEW_LINE> preis = models.FloatField( verbose_name="Preis (exkl. MwSt)", default=0.0, ) <NEW_LINE> mwstsatz = models.FloatField( verbose_name="MwSt-Satz", choices=constants.MWSTSETS, default=7.7, ) <NEW_LINE> @property <NEW_LINE> def mengenbezeichnung(self): <NEW_LINE> <INDENT> return "[:de]Stück[:fr]Pièce[:it]Pezzo[:en]Piece[:]" <NEW_LINE> <DEDENT> @admin.display(description="Name", ordering="name") <NEW_LINE> def clean_name(self): <NEW_LINE> <INDENT> return clean(self.name) <NEW_LINE> <DEDENT> @admin.display(description="Kosten") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{ self.clean_name() } ({ self.preis } CHF' + (f' + {self.mwstsatz}% MwSt' if self.mwstsatz else '') + f') ({self.pk})' <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Kosten" <NEW_LINE> verbose_name_plural = "Kosten" <NEW_LINE> <DEDENT> objects = models.Manager() | Model representing additional costs | 625990663539df3088ecda06 |
class Transactions(command.Command): <NEW_LINE> <INDENT> required = ['addr', 'pwd'] <NEW_LINE> def handle(self, *args, **kwargs): <NEW_LINE> <INDENT> addr = self.data['addr'] <NEW_LINE> pwd = self.data['pwd'] <NEW_LINE> if not mongo.db.addresses.find_one({"addr": addr, "pwd": pwd}): <NEW_LINE> <INDENT> self.error("Your address or password was invalid") <NEW_LINE> return <NEW_LINE> <DEDENT> payload = {"transactions": []} <NEW_LINE> for t in mongo.db.transactions.find({"to": addr}): <NEW_LINE> <INDENT> payload['transactions'].append({ "from": t['from'], "to": addr, "amount": t['amount'] }) <NEW_LINE> <DEDENT> for t in mongo.db.transactions.find({"from": addr}): <NEW_LINE> <INDENT> payload['transactions'].append({ "from": addr, "to": t['to'], "amount": t['amount'] }) <NEW_LINE> <DEDENT> self.success(payload) | Gives the user a list of their transactions,
allowing clients to display changes of coins
to and from the given address.
fingerprint: {"cmd": "transactions", "addr": _, "pwd": _} | 62599066435de62698e9d571 |
class StartPage(tk.Frame): <NEW_LINE> <INDENT> def __init__(self,parent,controller): <NEW_LINE> <INDENT> tk.Frame.__init__(self,parent) <NEW_LINE> label = tk.Label(self,text="Start Page",font=LARGE_FONT) <NEW_LINE> label.pack(pady = 10,padx = 10) <NEW_LINE> button1 = ttk.Button(self,text="Page One", command=lambda:controller.show_frame(PageOne)) <NEW_LINE> button2 = ttk.Button(self, text="Tabels", command=lambda: controller.show_frame(PageTwo)) <NEW_LINE> button3 = ttk.Button(self, text="Today", command=lambda: controller.show_frame(Today)) <NEW_LINE> button1.pack() <NEW_LINE> button2.pack() <NEW_LINE> button3.pack() | Class for Initial Page which contains
few buttons to navigate between other pages | 625990668a43f66fc4bf38f8 |
class Table(grid.GridTableBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> grid.GridTableBase.__init__(self) <NEW_LINE> <DEDENT> def GetNumberRows(self): <NEW_LINE> <INDENT> return len(data) <NEW_LINE> <DEDENT> def GetNumberCols(self): <NEW_LINE> <INDENT> return len(identifiers) <NEW_LINE> <DEDENT> def IsEmptyCell(self, row, col): <NEW_LINE> <INDENT> key = identifiers[col] <NEW_LINE> return not data[row][key] <NEW_LINE> <DEDENT> def GetValue(self, row, col): <NEW_LINE> <INDENT> key = identifiers[col] <NEW_LINE> return str(data[row][key]) <NEW_LINE> <DEDENT> def SetValue(self, row, col, value): <NEW_LINE> <INDENT> key = identifiers[col] <NEW_LINE> data[row][key] = value <NEW_LINE> <DEDENT> def GetColLabelValue(self, col): <NEW_LINE> <INDENT> identifier = identifiers[col] <NEW_LINE> return col_labels[identifier] <NEW_LINE> <DEDENT> def GetRowLabelValue(self, row): <NEW_LINE> <INDENT> return row_labels[row] <NEW_LINE> <DEDENT> def move_column(self, mover_col, index_col): <NEW_LINE> <INDENT> def change_table(): <NEW_LINE> <INDENT> old = identifiers[mover_col] <NEW_LINE> del identifiers[mover_col] <NEW_LINE> if index_col > mover_col: <NEW_LINE> <INDENT> identifiers.insert(index_col - 1, old) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> identifiers.insert(index_col, old) <NEW_LINE> <DEDENT> <DEDENT> def change_grid(): <NEW_LINE> <INDENT> view.BeginBatch() <NEW_LINE> msg = grid.GridTableMessage(self, grid.GRIDTABLE_NOTIFY_COLS_DELETED, mover_col, 1) <NEW_LINE> view.ProcessTableMessage(msg) <NEW_LINE> msg = grid.GridTableMessage(self, grid.GRIDTABLE_NOTIFY_COLS_INSERTED, index_col, 1) <NEW_LINE> view.ProcessTableMessage(msg) <NEW_LINE> view.EndBatch() <NEW_LINE> <DEDENT> view = self.GetView() <NEW_LINE> if view: <NEW_LINE> <INDENT> change_table() <NEW_LINE> change_grid() <NEW_LINE> <DEDENT> <DEDENT> def move_row(self, mover_row, index_row): <NEW_LINE> <INDENT> def change_table(): <NEW_LINE> <INDENT> old_label = row_labels[mover_row] <NEW_LINE> old_data = data[mover_row] <NEW_LINE> del row_labels[mover_row] <NEW_LINE> del data[mover_row] <NEW_LINE> if index_row > mover_row: <NEW_LINE> <INDENT> row_labels.insert(index_row - 1, old_label) <NEW_LINE> data.insert(index_row - 1, old_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> row_labels.insert(index_row, old_label) <NEW_LINE> data.insert(index_row, old_data) <NEW_LINE> <DEDENT> <DEDENT> def change_grid(): <NEW_LINE> <INDENT> view.BeginBatch() <NEW_LINE> msg = grid.GridTableMessage(self, grid.GRIDTABLE_NOTIFY_ROWS_DELETED, mover_row, 1) <NEW_LINE> view.ProcessTableMessage(msg) <NEW_LINE> msg = grid.GridTableMessage(self, grid.GRIDTABLE_NOTIFY_ROWS_INSERTED, index_row, 1) <NEW_LINE> view.ProcessTableMessage(msg) <NEW_LINE> view.EndBatch() <NEW_LINE> <DEDENT> view = self.GetView() <NEW_LINE> if view: <NEW_LINE> <INDENT> change_table() <NEW_LINE> change_grid() | Holds the data. Usually wx.grid.GridStringTable is used but gridmovers requires customisation of base class. | 62599066aad79263cf42ff20 |
class PostUpdateView(LoginRequiredMixin, UserPassesTestMixin, UpdateView): <NEW_LINE> <INDENT> model = Post <NEW_LINE> fields = ['content'] <NEW_LINE> template_name = 'blog/post_new.html' <NEW_LINE> success_url = '/' <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> form.instance.author = self.request.user <NEW_LINE> return super().form_valid(form) <NEW_LINE> <DEDENT> def test_func(self): <NEW_LINE> <INDENT> return is_users(self.get_object().author, self.request.user) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> data = super().get_context_data(**kwargs) <NEW_LINE> data['tag_line'] = 'Edit a post' <NEW_LINE> return data | Comment update view. | 625990667d847024c075db40 |
class TN(_RowMeasure): <NEW_LINE> <INDENT> name, label = 'tn', 'TN' <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> return self.cm.sum() - (TP(self.cm)() + FP(self.cm)() + FN(self.cm)()) | A row measure that computes the true negatives of each class. | 62599066aad79263cf42ff21 |
class CloudbillingBillingAccountsGetRequest(_messages.Message): <NEW_LINE> <INDENT> name = _messages.StringField(1, required=True) | A CloudbillingBillingAccountsGetRequest object.
Fields:
name: The resource name of the billing account to retrieve. For example,
`billingAccounts/012345-567890-ABCDEF`. | 6259906632920d7e50bc77ae |
class IPAddress(IPBase): <NEW_LINE> <INDENT> INET_TYPE = None <NEW_LINE> def __init__(self, address): <NEW_LINE> <INDENT> self.address = address <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> address_string = socket.inet_ntop(self.INET_TYPE, self.address) <NEW_LINE> return address_string <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.address == other.address <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.address) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_string(string): <NEW_LINE> <INDENT> if is_ipv6(string): <NEW_LINE> <INDENT> return IP6Address.build_from_string(string) <NEW_LINE> <DEDENT> return IP4Address.build_from_string(string) | Abstract base class for IP addresses | 6259906632920d7e50bc77af |
class TaskAttachmentViewHandler(FileHandler): <NEW_LINE> <INDENT> @tornado.web.authenticated <NEW_LINE> @actual_phase_required(0, 3) <NEW_LINE> @multi_contest <NEW_LINE> def get(self, task_name, filename): <NEW_LINE> <INDENT> task = self.get_task(task_name) <NEW_LINE> if task is None: <NEW_LINE> <INDENT> raise tornado.web.HTTPError(404) <NEW_LINE> <DEDENT> if filename not in task.attachments: <NEW_LINE> <INDENT> raise tornado.web.HTTPError(404) <NEW_LINE> <DEDENT> attachment = task.attachments[filename].digest <NEW_LINE> self.sql_session.close() <NEW_LINE> mimetype = get_type_for_file_name(filename) <NEW_LINE> if mimetype is None: <NEW_LINE> <INDENT> mimetype = 'application/octet-stream' <NEW_LINE> <DEDENT> self.fetch(attachment, mimetype, filename) | Shows an attachment file of a task in the contest.
| 6259906656ac1b37e6303899 |
class SearcherTest(RailroadServer): <NEW_LINE> <INDENT> def test(self, n): <NEW_LINE> <INDENT> nodes = list(self.nodes.keys()) <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> source = choice(nodes) <NEW_LINE> destination = choice(nodes) <NEW_LINE> astar, dijkstra = searcher.run(source, destination, self.paths.values(), self.nodes.values()) <NEW_LINE> if astar[0] != dijkstra[0]: <NEW_LINE> <INDENT> print('Test failed:\n\tsource: ' + source + '\n\tdestination: ' + destination + '\n\tastar: ' + str(astar[0]) + '\n\tdijkstra: ' + str(dijkstra[0])) <NEW_LINE> exit(1) | Tests the searcher file by generating random source and destination nodes, verifying that the shortest paths found by both A* and Dijkstra are equal. | 6259906616aa5153ce401c43 |
class LocalOscillatorTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self) -> None: <NEW_LINE> <INDENT> addr = '/dev/ttyUSB0' <NEW_LINE> self.device = kuhne_electronic.LocalOscillator(addr) <NEW_LINE> self.device.initialize() <NEW_LINE> <DEDENT> def tearDown(self) -> None: <NEW_LINE> <INDENT> self.device.close() <NEW_LINE> <DEDENT> def test_idn(self): <NEW_LINE> <INDENT> result = self.device.idn <NEW_LINE> self.assertIsInstance(result, str) | For testing the Kuhne Electronic Local Oscillator class. | 625990664a966d76dd5f065f |
class MultiClipAlgorithm(GeoAlgorithm): <NEW_LINE> <INDENT> OUTPUT_LAYER = 'OUTPUT_LAYER' <NEW_LINE> INPUT_LAYER = 'INPUT_LAYER' <NEW_LINE> def defineCharacteristics(self): <NEW_LINE> <INDENT> self.name = 'Clip Layer by Other Layer' <NEW_LINE> self.group = 'Vector Algorithms' <NEW_LINE> self.addParameter(ParameterVector(self.INPUT_LAYER, self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY], False)) <NEW_LINE> self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Output layer with selected features'))) <NEW_LINE> <DEDENT> def processAlgorithm(self, progress): <NEW_LINE> <INDENT> inputFilename = self.getParameterValue(self.INPUT_LAYER) <NEW_LINE> output = self.getOutputValue(self.OUTPUT_LAYER) <NEW_LINE> vectorLayer = dataobjects.getObjectFromUri(inputFilename) <NEW_LINE> settings = QSettings() <NEW_LINE> systemEncoding = settings.value('/UI/encoding', 'System') <NEW_LINE> provider = vectorLayer.dataProvider() <NEW_LINE> writer = QgsVectorFileWriter(output, systemEncoding, provider.fields(), provider.geometryType(), provider.crs()) <NEW_LINE> features = vector.features(vectorLayer) <NEW_LINE> for f in features: <NEW_LINE> <INDENT> writer.addFeature(f) | This is an example algorithm that takes a vector layer and
creates a new one just with just those features of the input
layer that are selected.
It is meant to be used as an example of how to create your own
algorithms and explain methods and variables used to do it. An
algorithm like this will be available in all elements, and there
is not need for additional work.
All Processing algorithms should extend the GeoAlgorithm class. | 62599066d7e4931a7ef3d73a |
class SentencePieceUnigramTokenizer(BaseTokenizer): <NEW_LINE> <INDENT> def __init__( self, replacement = "_", add_prefix_space: bool = True, unk_token: Union[str, AddedToken] = "<unk>", eos_token: Union[str, AddedToken] = "</s>", pad_token: Union[str, AddedToken] = "<pad>", ): <NEW_LINE> <INDENT> self.special_tokens = { "pad": {"id": 0, "token": pad_token}, "eos": {"id": 1, "token": eos_token}, "unk": {"id": 2, "token": unk_token}, } <NEW_LINE> self.special_tokens_list = [None] * len(self.special_tokens) <NEW_LINE> for token_dict in self.special_tokens.values(): <NEW_LINE> <INDENT> self.special_tokens_list[token_dict["id"]] = token_dict["token"] <NEW_LINE> <DEDENT> tokenizer = Tokenizer(Unigram()) <NEW_LINE> tokenizer.normalizer = normalizers.Sequence( [ normalizers.Nmt(), normalizers.NFKC(), normalizers.Replace(Regex(" {2,}"), " "), normalizers.Lowercase(), ] ) <NEW_LINE> tokenizer.pre_tokenizer = pre_tokenizers.Sequence( [ pre_tokenizers.Metaspace( replacement=replacement, add_prefix_space=add_prefix_space ), pre_tokenizers.Digits(individual_digits=True), pre_tokenizers.Punctuation(), ] ) <NEW_LINE> tokenizer.decoder = decoders.Metaspace( replacement=replacement, add_prefix_space=add_prefix_space ) <NEW_LINE> tokenizer.post_processor = TemplateProcessing( single=f"$A {self.special_tokens['eos']['token']}", special_tokens=[ (self.special_tokens["eos"]["token"], self.special_tokens["eos"]["id"]) ], ) <NEW_LINE> parameters = { "model": "SentencePieceUnigram", "replacement": replacement, "add_prefix_space": add_prefix_space, } <NEW_LINE> super().__init__(tokenizer, parameters) <NEW_LINE> <DEDENT> def train( self, files: Union[str, List[str]], vocab_size: int = 8000, show_progress: bool = True, ): <NEW_LINE> <INDENT> trainer = trainers.UnigramTrainer( vocab_size=vocab_size, special_tokens=self.special_tokens_list, show_progress=show_progress, ) <NEW_LINE> if isinstance(files, str): <NEW_LINE> <INDENT> files = [files] <NEW_LINE> <DEDENT> self._tokenizer.train(files, trainer=trainer) <NEW_LINE> self.add_unk_id() <NEW_LINE> <DEDENT> def train_from_iterator( self, iterator: Union[Iterator[str], Iterator[Iterator[str]]], vocab_size: int = 8000, show_progress: bool = True, ): <NEW_LINE> <INDENT> trainer = trainers.UnigramTrainer( vocab_size=vocab_size, special_tokens=self.special_tokens_list, show_progress=show_progress, ) <NEW_LINE> self._tokenizer.train_from_iterator(iterator, trainer=trainer) <NEW_LINE> self.add_unk_id() <NEW_LINE> <DEDENT> def add_unk_id(self): <NEW_LINE> <INDENT> tokenizer_json = json.loads(self._tokenizer.to_str()) <NEW_LINE> tokenizer_json["model"]["unk_id"] = self.special_tokens["unk"]["id"] <NEW_LINE> self._tokenizer = Tokenizer.from_str(json.dumps(tokenizer_json)) | This class is a copy of `DeDLOC's tokenizer implementation <https://github.com/yandex-research/DeDLOC/blob/main/sahajbert/tokenizer/tokenizer_model.py>`
Custom SentencePiece Unigram Tokenizer with NMT, NKFC, spaces and lower-casing characters normalization
Represents the Unigram algorithm, with the pretokenization used by SentencePiece | 625990663539df3088ecda07 |
class ConfigBase(object): <NEW_LINE> <INDENT> def get(self,key,default=None): <NEW_LINE> <INDENT> if hasattr(self,key): <NEW_LINE> <INDENT> return getattr(self,key) <NEW_LINE> <DEDENT> return default <NEW_LINE> <DEDENT> def __init__(self, config_dict=None): <NEW_LINE> <INDENT> if config_dict: <NEW_LINE> <INDENT> self.set_dict(config_dict) <NEW_LINE> <DEDENT> <DEDENT> def set_dict(self, config_dict): <NEW_LINE> <INDENT> for k,v in config_dict.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> headers = True <NEW_LINE> header_style = "color: #ffffff; font-family: arial; background-color: #0000B3; font-size: 12pt; text-align: center" <NEW_LINE> freeze_col = 0 <NEW_LINE> freeze_row = 1 <NEW_LINE> row_styles = ( "color: #000000; font-family: arial; background-color: #666666; border-color: #ff0000", "color: #000000; font-family: arial; background-color: #FFFFFF" ) <NEW_LINE> adjust_all_col_width = True <NEW_LINE> datetime_format = 'M/D/YY h:mm:ss' <NEW_LINE> date_format = 'M/D/YY' <NEW_LINE> time_format = "h:mm:ss" | Holds the configuration | 62599066435de62698e9d572 |
@ddt <NEW_LINE> class frontlogin(myunit.MyTest): <NEW_LINE> <INDENT> @data(*get_csv_data('D:\projectTestCase\iwebshop\iwebshop\data\logindata.csv')) <NEW_LINE> @unpack <NEW_LINE> def test_blogin(self, username, password): <NEW_LINE> <INDENT> browes = login(self.driver, "http://127.0.0.1/iwebshop/") <NEW_LINE> browes.userLogin(username, password) <NEW_LINE> result = browes.get_anquantuichu() <NEW_LINE> self.assertEqual(result, u"安全退出") | 前台登录功能测试 | 625990662c8b7c6e89bd4f4e |
class TaskActionTimer: <NEW_LINE> <INDENT> __slots__ = ["ctx", "delays", "num", "delay", "timeout", "is_waiting"] <NEW_LINE> def __init__(self, ctx=None, delays=None, num=0, delay=None, timeout=None): <NEW_LINE> <INDENT> self.ctx = ctx <NEW_LINE> self.delays = None <NEW_LINE> self.set_delays(delays) <NEW_LINE> self.num = int(num) <NEW_LINE> if delay is not None: <NEW_LINE> <INDENT> delay = float(delay) <NEW_LINE> <DEDENT> self.delay = delay <NEW_LINE> if timeout is not None: <NEW_LINE> <INDENT> timeout = float(timeout) <NEW_LINE> <DEDENT> self.timeout = timeout <NEW_LINE> self.is_waiting = False <NEW_LINE> <DEDENT> def delay_timeout_as_str(self): <NEW_LINE> <INDENT> return r"%s (after %s)" % ( get_seconds_as_interval_string(self.delay), get_time_string_from_unix_time(self.timeout)) <NEW_LINE> <DEDENT> def is_delay_done(self, now=None): <NEW_LINE> <INDENT> if self.timeout is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if now is None: <NEW_LINE> <INDENT> now = time() <NEW_LINE> <DEDENT> return now > self.timeout <NEW_LINE> <DEDENT> def is_timeout_set(self): <NEW_LINE> <INDENT> return self.timeout is not None <NEW_LINE> <DEDENT> def next(self, no_exhaust=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.delay = self.delays[self.num] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> if not no_exhaust: <NEW_LINE> <INDENT> self.delay = None <NEW_LINE> <DEDENT> <DEDENT> if self.delay is not None: <NEW_LINE> <INDENT> self.timeout = time() + self.delay <NEW_LINE> self.num += 1 <NEW_LINE> <DEDENT> return self.delay <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.num = 0 <NEW_LINE> self.delay = None <NEW_LINE> self.timeout = None <NEW_LINE> self.is_waiting = False <NEW_LINE> <DEDENT> def set_delays(self, delays=None): <NEW_LINE> <INDENT> if delays is None: <NEW_LINE> <INDENT> self.delays = [float(0)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.delays = [float(delay) for delay in delays] <NEW_LINE> <DEDENT> <DEDENT> def set_waiting(self): <NEW_LINE> <INDENT> self.delay = None <NEW_LINE> self.is_waiting = True <NEW_LINE> self.timeout = None <NEW_LINE> <DEDENT> def unset_waiting(self): <NEW_LINE> <INDENT> self.is_waiting = False | A timer with delays for task actions. | 625990663d592f4c4edbc647 |
class DataLoader: <NEW_LINE> <INDENT> cleaner = SentenceCleaner() <NEW_LINE> def __init__(self, path, vocabulary=None, do_shuffle = True, is_partial=False): <NEW_LINE> <INDENT> print("Reading data from {} ".format(path)) <NEW_LINE> self.load_data(path, vocabulary, is_partial) <NEW_LINE> self.shuffle = do_shuffle <NEW_LINE> <DEDENT> def load_data(self, path, vocabulary, partial_sentence): <NEW_LINE> <INDENT> list = [] <NEW_LINE> with open(path) as file: <NEW_LINE> <INDENT> for line in file: <NEW_LINE> <INDENT> list.append(DataLoader.cleaner.prepare_sentence(line, is_partial=partial_sentence)) <NEW_LINE> <DEDENT> <DEDENT> self.data = np.array(list) <NEW_LINE> print("Start translation") <NEW_LINE> if vocabulary is not None: <NEW_LINE> <INDENT> voc = vocabulary.get_vocabulary_as_dict() <NEW_LINE> self.data_num = np.array([voc.get(w, voc["<unk>"]) for w in self.data.reshape([-1])]).reshape(self.data.shape) <NEW_LINE> <DEDENT> <DEDENT> def batch_iterator(self, num_epochs, batch_size): <NEW_LINE> <INDENT> num_samples = self.data_num.shape[0] <NEW_LINE> batches_per_epoch = int((num_samples - 1) / batch_size) + 1 <NEW_LINE> for epoch in range(num_epochs): <NEW_LINE> <INDENT> if self.shuffle: <NEW_LINE> <INDENT> shuffle_indices = np.random.permutation(np.arange(num_samples)) <NEW_LINE> shuffled_data = self.data_num[shuffle_indices] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shuffled_data = self.data_num <NEW_LINE> <DEDENT> for b in range(batches_per_epoch): <NEW_LINE> <INDENT> start_index = b * batch_size <NEW_LINE> end_index = min((b + 1) * batch_size, num_samples) <NEW_LINE> yield shuffled_data[start_index:end_index] | loads the trainings data and creates a generator for data batches to serve to Neural Networks | 62599066009cb60464d02ca2 |
class SingleThreadedDownloader(object): <NEW_LINE> <INDENT> def __init__(self, writer): <NEW_LINE> <INDENT> self._articles = [] <NEW_LINE> self._writer = writer <NEW_LINE> <DEDENT> def queue_article(self, article): <NEW_LINE> <INDENT> self._articles.append(article) <NEW_LINE> <DEDENT> def process_all(self): <NEW_LINE> <INDENT> for article in self._articles: <NEW_LINE> <INDENT> result = process(article) <NEW_LINE> if result: <NEW_LINE> <INDENT> self._writer.write(result) <NEW_LINE> <DEDENT> <DEDENT> self._articles = [] | Class for downloading and parsing links in a single (current) thread. | 625990666e29344779b01dba |
class FloatProperty(_Property, FloatVariable): <NEW_LINE> <INDENT> __attributes__ = [] | Implements an float property | 6259906699fddb7c1ca63984 |
class CategoryActions(BaseActions): <NEW_LINE> <INDENT> def __get_form(self): <NEW_LINE> <INDENT> self.form = CaseCategoryForm(self.request.REQUEST) <NEW_LINE> self.form.populate(product_id=self.product_id) <NEW_LINE> return self.form <NEW_LINE> <DEDENT> def __check_form_validation(self): <NEW_LINE> <INDENT> form = self.__get_form() <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return 0, self.render_ajax(forms.errors_to_list(form)) <NEW_LINE> <DEDENT> return 1, form <NEW_LINE> <DEDENT> def __check_perms(self, perm): <NEW_LINE> <INDENT> return 1, True <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> is_valid, perm = self.__check_perms('change') <NEW_LINE> if not is_valid: <NEW_LINE> <INDENT> return perm <NEW_LINE> <DEDENT> is_valid, form = self.__check_form_validation() <NEW_LINE> if not is_valid: <NEW_LINE> <INDENT> return form <NEW_LINE> <DEDENT> category_pk = self.request.REQUEST.get('o_category') <NEW_LINE> category = TestCaseCategory.objects.get(pk=category_pk) <NEW_LINE> tcs = self.get_testcases() <NEW_LINE> for tc in tcs: <NEW_LINE> <INDENT> tc.category = category <NEW_LINE> tc.save() <NEW_LINE> <DEDENT> return self.render_ajax(self.ajax_response) <NEW_LINE> <DEDENT> def render_ajax(self, response): <NEW_LINE> <INDENT> return HttpResponse(simplejson.dumps(self.ajax_response)) <NEW_LINE> <DEDENT> def render_form(self): <NEW_LINE> <INDENT> form = CaseCategoryForm(initial={ 'product': self.product_id, 'category': self.request.REQUEST.get('o_category'), }) <NEW_LINE> form.populate(product_id=self.product_id) <NEW_LINE> return HttpResponse(form.as_p()) | Category actions used by view function `category` | 62599066f7d966606f74946f |
class RawHtmlPostprocessor(Postprocessor): <NEW_LINE> <INDENT> BLOCK_LEVEL_REGEX = re.compile(r'^\<\/?([^ >]+)') <NEW_LINE> def run(self, text): <NEW_LINE> <INDENT> replacements = OrderedDict() <NEW_LINE> for i in range(self.md.htmlStash.html_counter): <NEW_LINE> <INDENT> html = self.stash_to_string(self.md.htmlStash.rawHtmlBlocks[i]) <NEW_LINE> if self.isblocklevel(html): <NEW_LINE> <INDENT> replacements["<p>{}</p>".format( self.md.htmlStash.get_placeholder(i))] = html <NEW_LINE> <DEDENT> replacements[self.md.htmlStash.get_placeholder(i)] = html <NEW_LINE> <DEDENT> def substitute_match(m): <NEW_LINE> <INDENT> key = m.group(0) <NEW_LINE> if key not in replacements: <NEW_LINE> <INDENT> if key[3:-4] in replacements: <NEW_LINE> <INDENT> return f'<p>{ replacements[key[3:-4]] }</p>' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> return replacements[key] <NEW_LINE> <DEDENT> if replacements: <NEW_LINE> <INDENT> base_placeholder = util.HTML_PLACEHOLDER % r'([0-9]+)' <NEW_LINE> pattern = re.compile(f'<p>{ base_placeholder }</p>|{ base_placeholder }') <NEW_LINE> processed_text = pattern.sub(substitute_match, text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return text <NEW_LINE> <DEDENT> if processed_text == text: <NEW_LINE> <INDENT> return processed_text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.run(processed_text) <NEW_LINE> <DEDENT> <DEDENT> def isblocklevel(self, html): <NEW_LINE> <INDENT> m = self.BLOCK_LEVEL_REGEX.match(html) <NEW_LINE> if m: <NEW_LINE> <INDENT> if m.group(1)[0] in ('!', '?', '@', '%'): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.md.is_block_level(m.group(1)) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def stash_to_string(self, text): <NEW_LINE> <INDENT> return str(text) | Restore raw html to the document. | 6259906632920d7e50bc77b0 |
@gin.configurable <NEW_LINE> class LSTMEncodingNetwork(network.Network): <NEW_LINE> <INDENT> def __init__( self, input_tensor_spec, preprocessing_layers=None, preprocessing_combiner=None, conv_layer_params=None, input_fc_layer_params=(75, 40), lstm_size=(40,), output_fc_layer_params=(75, 40), activation_fn=tf.keras.activations.relu, dtype=tf.float32, name='LSTMEncodingNetwork', ): <NEW_LINE> <INDENT> kernel_initializer = tf.compat.v1.variance_scaling_initializer( scale=2.0, mode='fan_in', distribution='truncated_normal') <NEW_LINE> input_encoder = encoding_network.EncodingNetwork( input_tensor_spec, preprocessing_layers=preprocessing_layers, preprocessing_combiner=preprocessing_combiner, conv_layer_params=conv_layer_params, fc_layer_params=input_fc_layer_params, activation_fn=activation_fn, kernel_initializer=kernel_initializer, dtype=dtype) <NEW_LINE> if len(lstm_size) == 1: <NEW_LINE> <INDENT> cell = tf.keras.layers.LSTMCell(lstm_size[0], dtype=dtype) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cell = tf.keras.layers.StackedRNNCells( [tf.keras.layers.LSTMCell(size, dtype=dtype) for size in lstm_size]) <NEW_LINE> <DEDENT> output_encoder = ([ tf.keras.layers.Dense( num_units, activation=activation_fn, kernel_initializer=kernel_initializer, dtype=dtype, name='/'.join([name, 'dense'])) for num_units in output_fc_layer_params ]) <NEW_LINE> state_spec = tf.nest.map_structure( functools.partial( tensor_spec.TensorSpec, dtype=dtype, name='network_state_spec'), cell.state_size) <NEW_LINE> super(LSTMEncodingNetwork, self).__init__( input_tensor_spec=input_tensor_spec, state_spec=state_spec, name=name) <NEW_LINE> self._conv_layer_params = conv_layer_params <NEW_LINE> self._input_encoder = input_encoder <NEW_LINE> self._dynamic_unroll = dynamic_unroll_layer.DynamicUnroll(cell) <NEW_LINE> self._output_encoder = output_encoder <NEW_LINE> <DEDENT> def call(self, observation, step_type, network_state=None): <NEW_LINE> <INDENT> num_outer_dims = nest_utils.get_outer_rank(observation, self.input_tensor_spec) <NEW_LINE> if num_outer_dims not in (1, 2): <NEW_LINE> <INDENT> raise ValueError( 'Input observation must have a batch or batch x time outer shape.') <NEW_LINE> <DEDENT> has_time_dim = num_outer_dims == 2 <NEW_LINE> if not has_time_dim: <NEW_LINE> <INDENT> observation = tf.nest.map_structure(lambda t: tf.expand_dims(t, 1), observation) <NEW_LINE> step_type = tf.nest.map_structure(lambda t: tf.expand_dims(t, 1), step_type) <NEW_LINE> <DEDENT> state, network_state = self._input_encoder( observation, step_type, network_state) <NEW_LINE> with tf.name_scope('reset_mask'): <NEW_LINE> <INDENT> reset_mask = tf.equal(step_type, time_step.StepType.FIRST) <NEW_LINE> <DEDENT> state, network_state = self._dynamic_unroll( state, reset_mask, initial_state=network_state) <NEW_LINE> for layer in self._output_encoder: <NEW_LINE> <INDENT> state = layer(state) <NEW_LINE> <DEDENT> if not has_time_dim: <NEW_LINE> <INDENT> state = tf.squeeze(state, [1]) <NEW_LINE> <DEDENT> return state, network_state | Recurrent network. | 625990667c178a314d78e7a1 |
class MM(Layer): <NEW_LINE> <INDENT> def __init__(self, trans_a=False, trans_b=False, bigdl_type="float"): <NEW_LINE> <INDENT> super(MM, self).__init__(None, bigdl_type, trans_a, trans_b) | Module to perform matrix multiplication on two mini-batch inputs, producing a mini-batch.
:param trans_a: specifying whether or not transpose the first input matrix
:param trans_b: specifying whether or not transpose the second input matrix
>>> mM = MM(True, True)
creating: createMM | 625990668e71fb1e983bd230 |
class UserViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> queryset=User.objects.all() <NEW_LINE> serializer_class=UserSerializer | This class takes care of list and detail actions
| 62599066a219f33f346c7f71 |
class VerifyEmailView(APIView): <NEW_LINE> <INDENT> def put(self,request): <NEW_LINE> <INDENT> token = request.query_params.get("token") <NEW_LINE> if not token: <NEW_LINE> <INDENT> return Response({'message':"缺少token"},status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> user = User.check_verify_email_token(token) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> return Response({'message': '链接信息无效'}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> user.email_active = True <NEW_LINE> user.save() <NEW_LINE> return Response({'message': 'OK'}) | 用户邮箱验证
1.获取token(加密用户信息)并进行校验(token必传,token是否有效)
2.设置用户的邮箱验证标记True
3.返回应答,邮箱验证成功
| 625990664f88993c371f10d4 |
class QuantileEncoder(base.BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, n_label=10, sample=100000, random_state=42): <NEW_LINE> <INDENT> self.n_label = n_label <NEW_LINE> self.sample = sample <NEW_LINE> self.random_state = random_state <NEW_LINE> self.is_fitted = False <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> def _calculate_ecdf(x): <NEW_LINE> <INDENT> return ECDF(x[~np.isnan(x)]) <NEW_LINE> <DEDENT> if self.sample >= X.shape[0]: <NEW_LINE> <INDENT> self.ecdfs = X.apply(_calculate_ecdf, axis=0) <NEW_LINE> <DEDENT> elif self.sample > 1: <NEW_LINE> <INDENT> self.ecdfs = X.sample(n=self.sample, random_state=self.random_state).apply( _calculate_ecdf, axis=0 ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ecdfs = X.sample( frac=self.sample, random_state=self.random_state ).apply(_calculate_ecdf, axis=0) <NEW_LINE> <DEDENT> self.is_fitted = True <NEW_LINE> return self <NEW_LINE> <DEDENT> def fit_transform(self, X, y=None): <NEW_LINE> <INDENT> self.fit(X, y) <NEW_LINE> return self.transform(X) <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> assert ( self.is_fitted ), "fit() or fit_transform() must be called before transform()." <NEW_LINE> X = X.copy() <NEW_LINE> for i, col in enumerate(X.columns): <NEW_LINE> <INDENT> X.loc[:, col] = self._transform_col(X[col], i) <NEW_LINE> <DEDENT> return X <NEW_LINE> <DEDENT> def _transform_col(self, x, i): <NEW_LINE> <INDENT> rv = np.ones_like(x) * -1 <NEW_LINE> filt = ~np.isnan(x) <NEW_LINE> rv[filt] = np.floor((self.ecdfs[i](x[filt]) * 0.998 + 0.001) * self.n_label) <NEW_LINE> return rv | QuantileEncoder encodes numerical features to quantile values.
Attributes:
ecdfs (list of empirical CDF): empirical CDFs for columns
n_label (int): the number of labels to be created. | 6259906623849d37ff852820 |
class EndpointException(Exception): <NEW_LINE> <INDENT> pass | raised in case enpoint url construction fails | 62599066d268445f2663a712 |
class CreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = File.objects.all() <NEW_LINE> serializer_class = FileSerializer <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save() | This class defines the create behavior of our rest api. | 6259906666673b3332c31b68 |
class ComputeForwardingRulesDeleteRequest(_messages.Message): <NEW_LINE> <INDENT> forwardingRule = _messages.StringField(1, required=True) <NEW_LINE> project = _messages.StringField(2, required=True) <NEW_LINE> region = _messages.StringField(3, required=True) <NEW_LINE> requestId = _messages.StringField(4) | A ComputeForwardingRulesDeleteRequest object.
Fields:
forwardingRule: Name of the ForwardingRule resource to delete.
project: Project ID for this request.
region: Name of the region scoping this request.
requestId: begin_interface: MixerMutationRequestBuilder Request ID to
support idempotency. | 62599066cb5e8a47e493cd3a |
class PaletteColor(Widget): <NEW_LINE> <INDENT> color = ListProperty() <NEW_LINE> selected = BooleanProperty(False) <NEW_LINE> def on_touch_down(self, touch): <NEW_LINE> <INDENT> if self.collide_point(*touch.pos): <NEW_LINE> <INDENT> touch.grab(self) <NEW_LINE> if knspace.colors_screen.mode == 'normal': <NEW_LINE> <INDENT> clock_event = Clock.schedule_once(self.trigger_selection, 1) <NEW_LINE> touch.ud['trigger_selection'] = clock_event <NEW_LINE> <DEDENT> elif knspace.colors_screen.mode == 'selection': <NEW_LINE> <INDENT> self.parent.select_with_touch(self, touch) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_touch_move(self, touch): <NEW_LINE> <INDENT> if touch.grab_current is self and not self.collide_point(*touch.pos): <NEW_LINE> <INDENT> Clock.unschedule(touch.ud.get('trigger_selection')) <NEW_LINE> <DEDENT> if knspace.colors_screen.mode == 'selection': <NEW_LINE> <INDENT> if touch.grab_current is not self and self.collide_point(*touch.pos): <NEW_LINE> <INDENT> if not self.selected and self.parent.drag_mode == 'select': <NEW_LINE> <INDENT> self.parent.select_with_touch(self, touch) <NEW_LINE> <DEDENT> elif self.selected and self.parent.drag_mode == 'deselect': <NEW_LINE> <INDENT> self.parent.select_with_touch(self, touch) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def on_touch_up(self, touch): <NEW_LINE> <INDENT> Clock.unschedule(touch.ud.get('trigger_selection')) <NEW_LINE> if touch.grab_current is self and self.collide_point(*touch.pos): <NEW_LINE> <INDENT> if knspace.colors_screen.mode == 'normal': <NEW_LINE> <INDENT> screen_manager = App.get_running_app().root <NEW_LINE> knspace.lookup_screen.set_color(self.color) <NEW_LINE> screen_manager.current = 'lookup' <NEW_LINE> <DEDENT> <DEDENT> elif knspace.colors_screen.mode == 'selection': <NEW_LINE> <INDENT> if not self.parent.selected_nodes: <NEW_LINE> <INDENT> knspace.colors_screen.previous() <NEW_LINE> <DEDENT> <DEDENT> touch.ungrab(self) <NEW_LINE> <DEDENT> def on_selected(self, instance, selected): <NEW_LINE> <INDENT> if selected: <NEW_LINE> <INDENT> self.border_color = (0.8, 0.8, 0.8, 1) <NEW_LINE> self.border_width = 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.border_color = (0.3, 0.3, 0.3, 1) <NEW_LINE> self.border_width = 2 <NEW_LINE> <DEDENT> <DEDENT> def trigger_selection(self, dt): <NEW_LINE> <INDENT> knspace.colors_screen.mode = 'selection' <NEW_LINE> self.parent.select_with_touch(self) | Represents a color in a `Palette`. | 62599066f548e778e596ccf6 |
class TypeList(abc_type_objects.TypeList, osid_objects.OsidList): <NEW_LINE> <INDENT> def get_next_type(self): <NEW_LINE> <INDENT> return self.next() <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> from .primitives import Type <NEW_LINE> return self._get_next_object(Type) <NEW_LINE> <DEDENT> next_type = property(fget=get_next_type) <NEW_LINE> @utilities.arguments_not_none <NEW_LINE> def get_next_types(self, n): <NEW_LINE> <INDENT> return self._get_next_n(n) | Like all ``OsidLists,`` ``TypeList`` provides a means for accessing ``Type`` elements sequentially either one at
a time
or many at a time.
Examples: while (tl.hasNext()) { Type type = tl.getNextType(); }
or
while (tl.hasNext()) {
Type[] types = tl.getNextTypes(tl.available());
} | 62599066b7558d5895464ae5 |
class WriteDescriptorTests(SynchronousTestCase): <NEW_LINE> <INDENT> def test_kernelBufferFull(self): <NEW_LINE> <INDENT> descriptor = MemoryFile() <NEW_LINE> descriptor.write(b"hello, world") <NEW_LINE> self.assertIs(None, descriptor.doWrite()) | Tests for L{FileDescriptor}'s implementation of L{IWriteDescriptor}. | 6259906699fddb7c1ca63985 |
class ConvBertTokenizer(BertTokenizer): <NEW_LINE> <INDENT> vocab_files_names = VOCAB_FILES_NAMES <NEW_LINE> pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP <NEW_LINE> max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES <NEW_LINE> pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION | Construct a ConvBERT tokenizer. :class:`~transformers.ConvBertTokenizer` is identical to
:class:`~transformers.BertTokenizer` and runs end-to-end tokenization: punctuation splitting and wordpiece. Refer
to superclass :class:`~transformers.BertTokenizer` for usage examples and documentation concerning parameters. | 625990668e7ae83300eea7fa |
class UsersModuleError(AppError): <NEW_LINE> <INDENT> message = "Erro do serviço de usuários" | Base users module error | 625990669c8ee82313040d3e |
class Maze(object): <NEW_LINE> <INDENT> def __init__(self, maze_lines): <NEW_LINE> <INDENT> self.height = len(maze_lines) <NEW_LINE> self.width = len(maze_lines[0]) <NEW_LINE> self.maze_lines = maze_lines.copy() <NEW_LINE> for m in self.maze_lines: <NEW_LINE> <INDENT> assert len(m) == self.width, "Maze line wasn't the same width as the first line?" <NEW_LINE> <DEDENT> self.path_points = self._find_path_points() <NEW_LINE> <DEDENT> def _find_path_points(self): <NEW_LINE> <INDENT> path_points = {} <NEW_LINE> for y, m in enumerate(self.maze_lines): <NEW_LINE> <INDENT> for x, c in enumerate(m): <NEW_LINE> <INDENT> if str.isdigit(c): <NEW_LINE> <INDENT> path_points[int(c)] = (x, y) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return path_points <NEW_LINE> <DEDENT> def is_wall_or_out_of_bounds(self, x, y): <NEW_LINE> <INDENT> return x < 0 or x >= self.width or y < 0 or y >= self.height or self.maze_lines[y][x] == '#' | Class holding the maze data. Mazes are described as a list of strings (from top to bottom)
containing the characters '#' for walls, '0' to '9' for points to visit (and zero the start
position), and any other character meaning that block is accessible. | 62599066d486a94d0ba2d72b |
class Region(Base): <NEW_LINE> <INDENT> name_std = models.CharField(max_length=200) <NEW_LINE> country = models.ForeignKey(Country, related_name="regions", on_delete = models.CASCADE) <NEW_LINE> geoname_code = models.CharField(max_length=50, null=True, blank=True, db_index=True) <NEW_LINE> is_subregion = models.BooleanField(default=False) <NEW_LINE> parent = models.ForeignKey("self", null=True, default=None, related_name="subregions") <NEW_LINE> objects = RegionManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('region/state') <NEW_LINE> verbose_name_plural = _('regions/states') <NEW_LINE> ordering = ['name'] <NEW_LINE> <DEDENT> def get_display_name(self): <NEW_LINE> <INDENT> return u'%s, %s' % (self.name, self.country.name) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { "id": self.pk, "name": self.name, "slug": self.slug, "geoname_id": self.geoname_id, } | Region/State model.
Can search regions or subregions with some helpers
methods introduced to related manager.
Examples::
>>> c = Country.objects.get(code2="ES")
>>> c.regions.only_subregions()
[<Region: Ceuta>, <Region: Melilla>, <Region: Murcia>, <Region: Provincia de Castello>, '...']
>>> c.regions.only_regions()
[<Region: Andalusia>, <Region: Aragon>, <Region: Asturias>, <Region: Balearic Islands>, '...'] | 62599066cc0a2c111447c686 |
class ResponseContainerPagedSavedSearch(object): <NEW_LINE> <INDENT> swagger_types = { 'response': 'PagedSavedSearch', 'status': 'ResponseStatus' } <NEW_LINE> attribute_map = { 'response': 'response', 'status': 'status' } <NEW_LINE> def __init__(self, response=None, status=None, _configuration=None): <NEW_LINE> <INDENT> if _configuration is None: <NEW_LINE> <INDENT> _configuration = Configuration() <NEW_LINE> <DEDENT> self._configuration = _configuration <NEW_LINE> self._response = None <NEW_LINE> self._status = None <NEW_LINE> self.discriminator = None <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> self.response = response <NEW_LINE> <DEDENT> self.status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def response(self): <NEW_LINE> <INDENT> return self._response <NEW_LINE> <DEDENT> @response.setter <NEW_LINE> def response(self, response): <NEW_LINE> <INDENT> self._response = response <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> if self._configuration.client_side_validation and status is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `status`, must not be `None`") <NEW_LINE> <DEDENT> self._status = status <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ResponseContainerPagedSavedSearch, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ResponseContainerPagedSavedSearch): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ResponseContainerPagedSavedSearch): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599066f548e778e596ccf7 |
class AbstractRewriter(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, nodes, params): <NEW_LINE> <INDENT> self.nodes = nodes <NEW_LINE> self.params = params <NEW_LINE> self.timings = OrderedDict() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> state = State(self.nodes) <NEW_LINE> self._pipeline(state) <NEW_LINE> self._summary() <NEW_LINE> return state <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _pipeline(self, state): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def _summary(self): <NEW_LINE> <INDENT> row = "%s [elapsed: %.2f]" <NEW_LINE> out = " >>\n ".join(row % ("".join(filter(lambda c: not c.isdigit(), k[1:])), v) for k, v in self.timings.items()) <NEW_LINE> elapsed = sum(self.timings.values()) <NEW_LINE> dle("%s\n [Total elapsed: %.2f s]" % (out, elapsed)) | Transform Iteration/Expression trees to generate high performance C.
This is just an abstract class. Actual transformers should implement the
abstract method ``_pipeline``, which performs a sequence of AST transformations. | 625990661f037a2d8b9e5421 |
class MainPage(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> params = parse_params(self.request) <NEW_LINE> template = JINJA_ENVIRONMENT.get_template('index.html') <NEW_LINE> image_url = '/zombies/draw_simulation?{}'.format(urllib.urlencode(params)) <NEW_LINE> params['image_url'] = image_url <NEW_LINE> self.response.write(template.render(params)) | A handler for showing the simulator form. | 62599066a8370b77170f1b3c |
class ValidationError(object): <NEW_LINE> <INDENT> def __init__(self, obj, msg, type='error'): <NEW_LINE> <INDENT> self.obj = obj <NEW_LINE> self.msg = msg <NEW_LINE> self.type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_warning(self): <NEW_LINE> <INDENT> return self.type == 'warning' <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_error(self): <NEW_LINE> <INDENT> return self.type == 'error' <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> if isinstance(self.obj, odml.value.Value): <NEW_LINE> <INDENT> return self.obj.parent.get_path() <NEW_LINE> <DEDENT> return self.obj.get_path() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<ValidationError(%s):%s \"%s\">" % (self.type, self.obj, self.msg) | Represents an error found in the validation process
The error is bound to an odML-object (*obj*) or a list of those
and contains a message and a type which may be one of:
'error', 'warning', 'info' | 625990667b25080760ed8898 |
class TestGameplay: <NEW_LINE> <INDENT> def _run_gameplay_test(self, moves, result): <NEW_LINE> <INDENT> game = Game() <NEW_LINE> game.reset() <NEW_LINE> for move in moves: <NEW_LINE> <INDENT> move_made = game.move(move) <NEW_LINE> assert move_made <NEW_LINE> <DEDENT> assert game.game_over() <NEW_LINE> assert game.result == result <NEW_LINE> <DEDENT> def test_that_white_can_win(self): <NEW_LINE> <INDENT> moves = [ "26-23", "6-10", "23-18", "10-14", "27-23", "7-11", "23-19", "11-15", "25-22", "2-6", "28-24", "3-7", "18-9", "9-2", "19-10", "10-3", "3-12", "22-18", "1-6", "2-9", "24-19", "5-14", "4-8", "18-9", "12-3", ] <NEW_LINE> self._run_gameplay_test(moves, "white") <NEW_LINE> <DEDENT> def test_that_stalemate_can_occur_after_forty_moves(self): <NEW_LINE> <INDENT> game = Game() <NEW_LINE> game.reset() <NEW_LINE> moves = [ "26-23", "6-9", "23-18", "9-13", "30-26", "2-6", "26-23", "6-9", "18-14", "13-17", "14-10", "17-22", "10-6", "22-26", "6-2", "26-30", ] <NEW_LINE> for move in moves: <NEW_LINE> <INDENT> move_made = game.move(move) <NEW_LINE> assert move_made <NEW_LINE> <DEDENT> repeated_moves = ["2-6", "30-26", "6-2", "26-30"] <NEW_LINE> for _ in range(6): <NEW_LINE> <INDENT> for move in repeated_moves: <NEW_LINE> <INDENT> move_made = game.move(move) <NEW_LINE> assert move_made <NEW_LINE> <DEDENT> <DEDENT> assert game.game_over() <NEW_LINE> assert game.result == "draw" | The class contains tests running through entire games.
The purpose of this test class is to check that macro functionality, such as
moving, promotion, taking, stalemates etc. works in practice. | 62599066ac7a0e7691f73c53 |
class CapturedTrace: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.buffer = StringIO() <NEW_LINE> self.handler = logging.StreamHandler(self.buffer) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._handlers = logger.handlers <NEW_LINE> self.buffer = StringIO() <NEW_LINE> logger.handlers = [logging.StreamHandler(self.buffer)] <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> logger.handlers = self._handlers <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> log = self.buffer.getvalue() <NEW_LINE> log = log.replace(__name__ + '.','') <NEW_LINE> return log | Capture the trace temporarily for validation. | 625990663539df3088ecda0c |
class PlantVillage(tfds.core.GeneratorBasedBuilder): <NEW_LINE> <INDENT> VERSION = tfds.core.Version("1.0.0") <NEW_LINE> def _info(self): <NEW_LINE> <INDENT> return tfds.core.DatasetInfo( builder=self, description=_DESCRIPTION, features=tfds.features.FeaturesDict({ "image": tfds.features.Image(), "image/filename": tfds.features.Text(), "label": tfds.features.ClassLabel(names=_LABELS) }), supervised_keys=("image", "label"), homepage="https://arxiv.org/abs/1511.08060", citation=_CITATION, ) <NEW_LINE> <DEDENT> def _split_generators(self, dl_manager): <NEW_LINE> <INDENT> path = dl_manager.download_and_extract(_URL) <NEW_LINE> return [ tfds.core.SplitGenerator( name=tfds.Split.TRAIN, gen_kwargs={"datapath": path}) ] <NEW_LINE> <DEDENT> def _generate_examples(self, datapath): <NEW_LINE> <INDENT> for label in _LABELS: <NEW_LINE> <INDENT> fuzzy_label = label.replace(" ", "[_ ]").replace(",", "[_,]") <NEW_LINE> glob_path = os.path.join( datapath, "Plant_leave_diseases_dataset_without_augmentation", fuzzy_label, "*.[jJ][pP][gG]") <NEW_LINE> for fpath in tf.io.gfile.glob(glob_path): <NEW_LINE> <INDENT> fname = os.path.basename(fpath) <NEW_LINE> record = { "image": fpath, "image/filename": fname, "label": label, } <NEW_LINE> yield "{}/{}".format(label, fname), record | The PlantVillage dataset of healthy and unhealthy leaves. | 62599066009cb60464d02ca6 |
class ClusterTestThingy(Test): <NEW_LINE> <INDENT> @cluster(num_nodes=10) <NEW_LINE> def test_bad_num_nodes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @cluster(num_nodes=0) <NEW_LINE> def test_good_num_nodes(self): <NEW_LINE> <INDENT> pass | Fake ducktape test class | 625990663317a56b869bf0f9 |
class InstanceNorm1d(torch.nn.InstanceNorm1d): <NEW_LINE> <INDENT> def __init__(self, num_features, weight, bias, scale, zero_point, eps=1e-5, momentum=0.1, affine=False, track_running_stats=False): <NEW_LINE> <INDENT> super(InstanceNorm1d, self).__init__( num_features, eps, momentum, affine, track_running_stats) <NEW_LINE> self.weight = weight <NEW_LINE> self.bias = bias <NEW_LINE> self.register_buffer('scale', torch.tensor(scale)) <NEW_LINE> self.register_buffer('zero_point', torch.tensor(zero_point)) <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> return torch.ops.quantized.instance_norm( input, self.weight, self.bias, self.eps, self.scale, self.zero_point) <NEW_LINE> <DEDENT> def _get_name(self): <NEW_LINE> <INDENT> return 'QuantizedInstanceNorm1d' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_float(cls, mod): <NEW_LINE> <INDENT> scale, zero_point = mod.activation_post_process.calculate_qparams() <NEW_LINE> new_mod = cls( mod.num_features, mod.weight, mod.bias, float(scale), int(zero_point), mod.eps, mod.affine) <NEW_LINE> return new_mod | This is the quantized version of :class:`~torch.nn.InstanceNorm1d`.
Additional args:
* **scale** - quantization scale of the output, type: double.
* **zero_point** - quantization zero point of the output, type: long. | 625990667047854f46340b22 |
class EchoWithData(base_tests.SimpleProtocol): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> logging.info("Running EchoWithData test") <NEW_LINE> logging.info("Sending Echo With Data ...") <NEW_LINE> request = ofp.message.echo_request() <NEW_LINE> request.data = 'OpenFlow Will Rule The World' <NEW_LINE> self.controller.message_send(request) <NEW_LINE> logging.info("Waiting for Echo Reply with data field copied from Echo Request") <NEW_LINE> (response, pkt) = self.controller.poll(exp_msg=ofp.OFPT_ECHO_REPLY, timeout=1) <NEW_LINE> self.assertTrue(response is not None, "Did not get echo reply (with data)") <NEW_LINE> self.assertEqual(response.type, ofp.OFPT_ECHO_REPLY, 'Response is not echo_reply') <NEW_LINE> self.assertEqual(request.xid, response.xid, 'Response xid does not match the request Xid') <NEW_LINE> self.assertEqual(request.data, response.data, 'Response data does not match request data') | Verify if OFPT_ECHO_REQUEST has data field,
switch responds back with OFPT_ECHO_REPLY with data field copied into it. | 625990665fdd1c0f98e5f6f2 |
class RequestBody: <NEW_LINE> <INDENT> def __init__(self, name: str) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def to_request(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return request(self.name) | Base class for RPC request data. | 625990667d847024c075db46 |
class SEMScaleBar(WorkingScaleBar): <NEW_LINE> <INDENT> ref_width = 114e-3 <NEW_LINE> def __init__(self, mag=0, num_px=128, width=5, brush=None, pen=None, offset=None): <NEW_LINE> <INDENT> frame_size = self.ref_width/mag <NEW_LINE> val = frame_size/5 <NEW_LINE> ord = math.log10(val) <NEW_LINE> val = 10**math.floor(ord) * round(10**(round((ord - math.floor(ord)) * 10) / 10)) <NEW_LINE> size = val*num_px/frame_size <NEW_LINE> if mag < 1: <NEW_LINE> <INDENT> mag = 1 <NEW_LINE> suffix = 'px' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> suffix = 'm' <NEW_LINE> <DEDENT> WorkingScaleBar.__init__(self, size, val=val, width=width, pen=pen, brush=brush, suffix=suffix, offset=offset) | pyqtgraph.ScaleBar which scales properly and chooses a bar size based on
a given image size and magnification.
See https://github.com/pyqtgraph/pyqtgraph/issues/437 for details on the
scaling problem with pyqtgraph.ScaleBar. Credit to user sjmvm for the
solution.
Attributes:
ref_width (float): Size in meters of the reference used for
magnification calculation. Set to Polaroid 545 width
of 11.4 cm.
size (float): The width of the scalebar in view pixels.
_width (int): The width of the scale bar.
brush
pen
offset | 625990666e29344779b01dbe |
class DuplicateKeyError(BuildCacheBaseException): <NEW_LINE> <INDENT> def __init__(self, fromFile, lineNo, keyType, keyValue, prevLineNo): <NEW_LINE> <INDENT> super().__init__(fromFile, lineNo, "Second occurrance of {keytype} \"{keyval}\", " "previous entry at line {prev}.".format( keytype=keyType, keyval=keyValue, prev=prevLineNo )) | Raised when an item is being redefined. | 62599066fff4ab517ebcef89 |
class CondaEnvExistsError(CondaError): <NEW_LINE> <INDENT> pass | Conda environment already exists | 625990669c8ee82313040d3f |
class rwalk(): <NEW_LINE> <INDENT> def __init__(self,steps,walkers): <NEW_LINE> <INDENT> self.steps=steps <NEW_LINE> self.walkers=walkers <NEW_LINE> self.x=np.zeros(int(self.steps)) <NEW_LINE> self.t=np.linspace(-self.steps,self.steps,2*self.steps+1) <NEW_LINE> self.x2=[[0]*(2*self.steps+1) for i in range(2)] <NEW_LINE> self.x3=0 <NEW_LINE> <DEDENT> def calculate(self): <NEW_LINE> <INDENT> for n in range(int(self.walkers)): <NEW_LINE> <INDENT> self.x3=0 <NEW_LINE> for i in range(int(self.steps)): <NEW_LINE> <INDENT> temp1=random.uniform(0,1) <NEW_LINE> if temp1<=0.5: <NEW_LINE> <INDENT> self.x3=self.x3-1 <NEW_LINE> <DEDENT> if temp1>0.5: <NEW_LINE> <INDENT> self.x3=self.x3+1 <NEW_LINE> <DEDENT> <DEDENT> y=self.x3 <NEW_LINE> self.x2[1][int(self.steps+y)]+=1 <NEW_LINE> <DEDENT> x4=[] <NEW_LINE> for n in range(len(self.x2[1])): <NEW_LINE> <INDENT> x4.append(self.x2[1][n]/self.walkers) <NEW_LINE> <DEDENT> return [self.t,x4] | random-walker distributions in one dimension | 6259906676e4537e8c3f0cf1 |
class rule_file_seq_equal(rule_seq_match): <NEW_LINE> <INDENT> def __init__(self, line, text): <NEW_LINE> <INDENT> rule_seq_match.__init__(self, line, re.escape(text)) <NEW_LINE> <DEDENT> def runs_to_redact(self, fi): <NEW_LINE> <INDENT> return fi.byte_runs() | Redacts any file containing a sequence the equals the given string | 625990660c0af96317c57916 |
class InvalidThirdPositionLetterValidationError(PostcodeValidationError): <NEW_LINE> <INDENT> pass | Given postcode third position letter is invalid | 6259906691f36d47f2231a46 |
class EnquiryRequestor(BaseContent): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> implements(IEnquiryRequestor) <NEW_LINE> archetype_name = 'EnquiryRequestor' <NEW_LINE> meta_type = 'EnquiryRequestor' <NEW_LINE> portal_type = 'EnquiryRequestor' <NEW_LINE> allowed_content_types = [] <NEW_LINE> filter_content_types = 0 <NEW_LINE> global_allow = 0 <NEW_LINE> immediate_view = 'base_view' <NEW_LINE> default_view = 'base_view' <NEW_LINE> suppl_views = () <NEW_LINE> typeDescription = "EnquiryRequestor" <NEW_LINE> typeDescMsgId = 'description_edit_enquiryrequestor' <NEW_LINE> _at_rename_after_creation = True <NEW_LINE> schema = EnquiryRequestor_schema <NEW_LINE> security.declarePublic('getPassword') <NEW_LINE> def getPassword(self): <NEW_LINE> <INDENT> return self.schema['password'].get(self) | Requestor
| 62599066a219f33f346c7f75 |
class GoogleSearch: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.base = 'https://www.google.com.br/search?q=' <NEW_LINE> self.mode = "&sorce=lmns" <NEW_LINE> self.soup = None <NEW_LINE> self.__limit = 3 <NEW_LINE> <DEDENT> def search(self, text): <NEW_LINE> <INDENT> snippets = [] <NEW_LINE> try: <NEW_LINE> <INDENT> r = requests.get(self.base + text.replace(" ", "+") + self.mode) <NEW_LINE> self.soup = BeautifulSoup(r.text, "lxml") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return text <NEW_LINE> <DEDENT> cont_results = self.soup.body.find(id="resultStats").text <NEW_LINE> snippets = self.__get_snippets() <NEW_LINE> return cont_results, snippets <NEW_LINE> <DEDENT> def __get_snippets(self): <NEW_LINE> <INDENT> snnipets = [] <NEW_LINE> results = self.soup.body.find_all("div", class_="g") <NEW_LINE> for index, value in enumerate(results): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> aux = Snippet(index + 30, value.find("a").text, value.find("span", class_="st").text, URL_REGEX.findall(value.find("a")["href"])[0]) <NEW_LINE> snnipets.append(aux) <NEW_LINE> if len(snnipets) == self.__limit: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return snnipets | Class to make searchs on Google and get some results in the first page. | 62599066d268445f2663a714 |
class ArticleRevision(BaseRevisionMixin, models.Model): <NEW_LINE> <INDENT> article = models.ForeignKey('Article', on_delete=models.CASCADE, verbose_name=_(u'article')) <NEW_LINE> content = models.TextField(blank=True, verbose_name=_(u'article contents')) <NEW_LINE> title = models.CharField( max_length=512, verbose_name=_(u'article title'), null=False, blank=False, help_text=_(u'Each revision contains a title field that must be filled out, ' u'even if the title has not changed') ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s (%d)" % (self.title, self.revision_number) <NEW_LINE> <DEDENT> def inherit_predecessor(self, article): <NEW_LINE> <INDENT> predecessor = article.current_revision <NEW_LINE> self.article = predecessor.article <NEW_LINE> self.content = predecessor.content <NEW_LINE> self.title = predecessor.title <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if (not self.id and not self.previous_revision and self.article and self.article.current_revision and self.article.current_revision != self): <NEW_LINE> <INDENT> self.previous_revision = self.article.current_revision <NEW_LINE> <DEDENT> if not self.revision_number: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> previous_revision = self.article.articlerevision_set.latest() <NEW_LINE> self.revision_number = previous_revision.revision_number + 1 <NEW_LINE> <DEDENT> except ArticleRevision.DoesNotExist: <NEW_LINE> <INDENT> self.revision_number = 1 <NEW_LINE> <DEDENT> <DEDENT> super(ArticleRevision, self).save(*args, **kwargs) <NEW_LINE> if not self.article.current_revision: <NEW_LINE> <INDENT> self.article.current_revision = self <NEW_LINE> self.article.save() <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> app_label = settings.APP_LABEL <NEW_LINE> get_latest_by = 'revision_number' <NEW_LINE> ordering = ('created',) <NEW_LINE> unique_together = ('article', 'revision_number') | This is where main revision data is stored. To make it easier to
copy, do NEVER create m2m relationships. | 62599066d7e4931a7ef3d73d |
class IndexExpression(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'array': 'Expression', 'index': 'Expression' } <NEW_LINE> attribute_map = { 'type': 'type', 'array': 'array', 'index': 'index' } <NEW_LINE> def __init__(self, type=None, array=None, index=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._array = None <NEW_LINE> self._index = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if array is not None: <NEW_LINE> <INDENT> self.array = array <NEW_LINE> <DEDENT> if index is not None: <NEW_LINE> <INDENT> self.index = index <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def array(self): <NEW_LINE> <INDENT> return self._array <NEW_LINE> <DEDENT> @array.setter <NEW_LINE> def array(self, array): <NEW_LINE> <INDENT> self._array = array <NEW_LINE> <DEDENT> @property <NEW_LINE> def index(self): <NEW_LINE> <INDENT> return self._index <NEW_LINE> <DEDENT> @index.setter <NEW_LINE> def index(self, index): <NEW_LINE> <INDENT> self._index = index <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IndexExpression): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 625990661f037a2d8b9e5422 |
class Uninterrupt: <NEW_LINE> <INDENT> def __init__(self, sigs=(signal.SIGINT, signal.SIGTERM), verbose=False): <NEW_LINE> <INDENT> self.sigs = sigs <NEW_LINE> self.verbose = verbose <NEW_LINE> self.interrupted = False <NEW_LINE> self.orig_handlers = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.orig_handlers is not None: <NEW_LINE> <INDENT> raise ValueError("Can only enter `Uninterrupt` once!") <NEW_LINE> <DEDENT> self.interrupted = False <NEW_LINE> self.orig_handlers = [signal.getsignal(sig) for sig in self.sigs] <NEW_LINE> def handler(signum, frame): <NEW_LINE> <INDENT> del signum <NEW_LINE> del frame <NEW_LINE> self.release() <NEW_LINE> self.interrupted = True <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("Interruption scheduled...", flush=True) <NEW_LINE> <DEDENT> <DEDENT> for sig in self.sigs: <NEW_LINE> <INDENT> signal.signal(sig, handler) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, type_, value, tb): <NEW_LINE> <INDENT> self.release() <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> if self.orig_handlers is not None: <NEW_LINE> <INDENT> for sig, orig in zip(self.sigs, self.orig_handlers): <NEW_LINE> <INDENT> signal.signal(sig, orig) <NEW_LINE> <DEDENT> self.orig_handlers = None | Context manager to gracefully handle interrupts.
Use as:
with Uninterrupt() as u:
while not u.interrupted:
# train | 6259906676e4537e8c3f0cf2 |
class AuthenticateApiHandlerTests(VideoXBlockTestBase): <NEW_LINE> <INDENT> @patch.object(VideoXBlock, 'authenticate_video_api') <NEW_LINE> def test_auth_video_api_handler_delegates_call(self, auth_video_api_mock): <NEW_LINE> <INDENT> request_mock = arrange_request_mock('"test-token-123"') <NEW_LINE> auth_video_api_mock.return_value = {}, '' <NEW_LINE> result_response = self.xblock.authenticate_video_api_handler(request_mock) <NEW_LINE> result = result_response.body <NEW_LINE> self.assertEqual( result, bytes(json.dumps({'success_message': 'Successfully authenticated to the video platform.'}), 'utf-8') ) <NEW_LINE> auth_video_api_mock.assert_called_once_with('test-token-123') | Test cases for `VideoXBlock.authenticate_video_api_handler`. | 6259906699cbb53fe6832653 |
class UNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_channels: int=3, num_classes: int=2): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.num_channels = num_channels <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.in_conv = nn.Sequential( ConvBlock(num_channels, 64), ConvBlock(64, 64) ) <NEW_LINE> self.down1 = _DownBlock(64, 128) <NEW_LINE> self.down2 = _DownBlock(128, 256) <NEW_LINE> self.down3 = _DownBlock(256, 512) <NEW_LINE> self.center = nn.Sequential( _DownBlock(512, 1024), nn.ConvTranspose2d(1024, 512, 2, stride=2) ) <NEW_LINE> self.up1 = _UpBlock(512, 256) <NEW_LINE> self.up2 = _UpBlock(256, 128) <NEW_LINE> self.up3 = _UpBlock(128, 64) <NEW_LINE> self.out_conv = nn.Sequential( ConvBlock(128, 64), ConvBlock(64, 64), nn.Conv2d(64, num_classes, kernel_size=1, padding=0) ) <NEW_LINE> <DEDENT> def forward(self, x: Tensor): <NEW_LINE> <INDENT> x1 = self.in_conv(x) <NEW_LINE> x2 = self.down1(x1) <NEW_LINE> x3 = self.down2(x2) <NEW_LINE> x4 = self.down3(x3) <NEW_LINE> x = self.center(x4) <NEW_LINE> x = self.up1(x, x4) <NEW_LINE> x = self.up2(x, x3) <NEW_LINE> x = self.up3(x, x2) <NEW_LINE> z = torch.cat((x1, x), dim=1) <NEW_LINE> out = self.out_conv(z) <NEW_LINE> return out | The U-Net architecture.
See https://arxiv.org/pdf/1505.04597.pdf | 6259906671ff763f4b5e8f14 |
class DisambiguationTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> self.a = Space.objects.create(name='a', slug='a') <NEW_LINE> self.b = Space.objects.create(name='b', slug='b') <NEW_LINE> Property.objects.create(name='a', slug='a') <NEW_LINE> self.c = Property.objects.create(name='c', slug='c') <NEW_LINE> <DEDENT> def test_single_objects(self): <NEW_LINE> <INDENT> response = self.client.get('/brubeck/%s/' % self.b.slug) <NEW_LINE> self.assertRedirects(response, self.b.get_absolute_url()) <NEW_LINE> response = self.client.get('/brubeck/%s/' % self.c.slug) <NEW_LINE> self.assertRedirects(response, self.c.get_absolute_url()) <NEW_LINE> <DEDENT> def test_non_existent(self): <NEW_LINE> <INDENT> response = self.client.get('/brubeck/non-existent-slug/') <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> <DEDENT> def test_disambiguation(self): <NEW_LINE> <INDENT> response = self.client.get('/brubeck/%s/' % self.a.slug) <NEW_LINE> self.assertTemplateUsed(response, 'brubeck/disambiguation.html') | Tests that the slug disambiguation view works. Note that (by necessity)
ambiguous urls won't be produced by reversing, so this test assumes
that brubeck urls are set up under '/brubeck/' | 625990663539df3088ecda0e |
class Error(Model): <NEW_LINE> <INDENT> _validation = { 'code': {'required': True}, 'sub_code': {'readonly': True}, 'message': {'required': True}, 'more_details': {'readonly': True}, 'parameter': {'readonly': True}, 'value': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'sub_code': {'key': 'subCode', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'more_details': {'key': 'moreDetails', 'type': 'str'}, 'parameter': {'key': 'parameter', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, message: str, code="None", **kwargs) -> None: <NEW_LINE> <INDENT> super(Error, self).__init__(**kwargs) <NEW_LINE> self.code = code <NEW_LINE> self.sub_code = None <NEW_LINE> self.message = message <NEW_LINE> self.more_details = None <NEW_LINE> self.parameter = None <NEW_LINE> self.value = None | Defines the error that occurred.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param code: Required. The error code that identifies the category of
error. Possible values include: 'None', 'ServerError', 'InvalidRequest',
'RateLimitExceeded', 'InvalidAuthorization', 'InsufficientAuthorization'.
Default value: "None" .
:type code: str or
~azure.cognitiveservices.search.websearch.models.ErrorCode
:ivar sub_code: The error code that further helps to identify the error.
Possible values include: 'UnexpectedError', 'ResourceError',
'NotImplemented', 'ParameterMissing', 'ParameterInvalidValue',
'HttpNotAllowed', 'Blocked', 'AuthorizationMissing',
'AuthorizationRedundancy', 'AuthorizationDisabled', 'AuthorizationExpired'
:vartype sub_code: str or
~azure.cognitiveservices.search.websearch.models.ErrorSubCode
:param message: Required. A description of the error.
:type message: str
:ivar more_details: A description that provides additional information
about the error.
:vartype more_details: str
:ivar parameter: The parameter in the request that caused the error.
:vartype parameter: str
:ivar value: The parameter's value in the request that was not valid.
:vartype value: str | 625990667047854f46340b24 |
class AssignChore(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAuthenticated, IsAccountActivated) <NEW_LINE> def get_object(self, pk): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Chore.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Chore.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, pk, format=None): <NEW_LINE> <INDENT> chore = self.get_object(pk) <NEW_LINE> if chore.assigned is True: <NEW_LINE> <INDENT> return Response('"'+chore.name+'"'+' is already assigned', status=status.HTTP_412_PRECONDITION_FAILED) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seri = ChoreSerializer(chore) <NEW_LINE> serializer = ChoreSerializer(chore, data=seri.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save(assigned_to=self.request.user, assigned=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | Assign a Chore | 625990665fdd1c0f98e5f6f4 |
class TestCrossBox(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.rectangle = Mock(x=1, y=2, width=3, height=4) <NEW_LINE> self.expected_vertices = ( 1, 2, 4, 2, 1, 2, 1, 6, 4, 2, 1, 6, 1, 6, 4, 6, 4, 6, 1, 2, 4, 6, 4, 2) <NEW_LINE> self.vertex_count = (len(self.expected_vertices) // 2) <NEW_LINE> self.default_color = (0, 0, 0) * self.vertex_count <NEW_LINE> self.new_coordinates = (2, 4) <NEW_LINE> self.new_expected_vertices = [ 2, 4, 5, 4, 2, 4, 2, 8, 5, 4, 2, 8, 2, 8, 5, 8, 5, 8, 2, 4, 5, 8, 5, 4] <NEW_LINE> <DEDENT> @patch('engine.graphics.cross_box.vertex_list') <NEW_LINE> def test_creates_vertex_list_without_batch(self, mock_vertex_list): <NEW_LINE> <INDENT> CrossBox(self.rectangle) <NEW_LINE> mock_vertex_list.assert_called_once_with( self.vertex_count, ('v2i', self.expected_vertices), ('c3B', self.default_color)) <NEW_LINE> <DEDENT> @patch('engine.graphics.cross_box.vertex_list') <NEW_LINE> def test_sets_color_of_vertex_list(self, mock_vertex_list): <NEW_LINE> <INDENT> color = (1, 2, 3) <NEW_LINE> expected_colors = color * self.vertex_count <NEW_LINE> CrossBox(self.rectangle, color) <NEW_LINE> mock_vertex_list.assert_called_once_with( self.vertex_count, ('v2i', self.expected_vertices), ('c3B', expected_colors)) <NEW_LINE> <DEDENT> @patch('engine.graphics.cross_box.vertex_list') <NEW_LINE> def test_repositions_vertex_list(self, mock_vertex_list): <NEW_LINE> <INDENT> mock_vertex_list.return_value.vertices = self.expected_vertices <NEW_LINE> cross_box = CrossBox(self.rectangle) <NEW_LINE> cross_box.set_position(self.new_coordinates) <NEW_LINE> self.assertEqual( self.new_expected_vertices, mock_vertex_list.return_value.vertices) <NEW_LINE> <DEDENT> def test_uses_batch(self): <NEW_LINE> <INDENT> batch = Mock() <NEW_LINE> CrossBox(self.rectangle, batch=batch) <NEW_LINE> batch.add.assert_called_once_with( self.vertex_count, GL_LINES, None, ('v2i', self.expected_vertices), ('c3B', self.default_color)) <NEW_LINE> <DEDENT> def test_sets_color_with_batch(self): <NEW_LINE> <INDENT> color = (1, 2, 3) <NEW_LINE> expected_colors = color * self.vertex_count <NEW_LINE> batch = Mock() <NEW_LINE> CrossBox(self.rectangle, color, batch) <NEW_LINE> batch.add.assert_called_once_with( self.vertex_count, GL_LINES, None, ('v2i', self.expected_vertices), ('c3B', expected_colors)) <NEW_LINE> <DEDENT> def test_repositions_batch(self): <NEW_LINE> <INDENT> batch = Mock() <NEW_LINE> batch.add.return_value.vertices = self.expected_vertices <NEW_LINE> cross_box = CrossBox(self.rectangle, batch=batch) <NEW_LINE> cross_box.set_position(self.new_coordinates) <NEW_LINE> self.assertEqual( self.new_expected_vertices, batch.add.return_value.vertices) | Test rendering of cross box graphics. | 625990667d43ff2487427fc8 |
class TauClient(Tau): <NEW_LINE> <INDENT> def __init__(self, host='localhost', port=6283): <NEW_LINE> <INDENT> self._backend = ServerBackend(host, port) | Shortcut for Tau(ServerBackend(...)). | 6259906656b00c62f0fb403d |
class TaskFinished: <NEW_LINE> <INDENT> def __init__(self, request: Request, task: WebsaunaTask): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> self.task = task | This task is fired when a Celery task finishes regardless if the task failed or not.
Intended to be used to clean up thread current context sensitive data.
This is called before ``request._process_finished_callbacks()`` is called. This is **not** called when tasks are executed eagerly. | 62599066627d3e7fe0e085fa |
class Me(Item): <NEW_LINE> <INDENT> def __call__(self, **kwargs): <NEW_LINE> <INDENT> return self.get(**kwargs) <NEW_LINE> <DEDENT> URL = Item.prepare_url('me') <NEW_LINE> def get(self, **kwargs): <NEW_LINE> <INDENT> kwargs['url'] = self.URL <NEW_LINE> return self.transport.set_method("GET").request(**kwargs) | Get private information
Required scope - "private_data"|"private_data_email"|"private_data_phone" | 6259906644b2445a339b7518 |
@tvm._ffi.register_object("auto_scheduler.ProgramBuilder") <NEW_LINE> class ProgramBuilder(Object): <NEW_LINE> <INDENT> def build(self, measure_inputs, verbose=1): <NEW_LINE> <INDENT> return _ffi_api.ProgramBuilderBuild(self, measure_inputs, verbose) | The base class of ProgramBuilders. | 625990668e7ae83300eea7ff |
class EquipmentManagerBase: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def getPossibleEquipment(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def setPossibleEquipment( equipmentList ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def insertEquipment( equipmentName ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def removeEquipment( equipmentName ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def removalIsPossible( equipmentName ): <NEW_LINE> <INDENT> pass | Generic equipment manager, Data Access Layer independant.
It manages the list of rooms' possible equipment.
Equipment is just a string.
The list of possible equipment is defined by CRBS admin.
This list is used mainly for generating user web interface.
(System must know, what equipment user may ask for to generate [v] checkboxes) | 62599066462c4b4f79dbd177 |
class FCLSTMDeterministicPolicy(ContinuousDeterministicPolicy): <NEW_LINE> <INDENT> def __init__(self, n_input_channels, n_hidden_layers, n_hidden_channels, action_size, min_action=None, max_action=None, bound_action=True, nonlinearity=F.relu, last_wscale=1.): <NEW_LINE> <INDENT> self.n_input_channels = n_input_channels <NEW_LINE> self.n_hidden_layers = n_hidden_layers <NEW_LINE> self.n_hidden_channels = n_hidden_channels <NEW_LINE> self.action_size = action_size <NEW_LINE> self.min_action = min_action <NEW_LINE> self.max_action = max_action <NEW_LINE> self.bound_action = bound_action <NEW_LINE> if self.bound_action: <NEW_LINE> <INDENT> def action_filter(x): <NEW_LINE> <INDENT> return bound_by_tanh( x, self.min_action, self.max_action) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> action_filter = None <NEW_LINE> <DEDENT> model = chainer.Chain( fc=MLP(self.n_input_channels, n_hidden_channels, (self.n_hidden_channels,) * self.n_hidden_layers, nonlinearity=nonlinearity, ), lstm=L.LSTM(n_hidden_channels, n_hidden_channels), out=L.Linear(n_hidden_channels, action_size, initialW=LeCunNormal(last_wscale)), ) <NEW_LINE> def model_call(model, x): <NEW_LINE> <INDENT> h = nonlinearity(model.fc(x)) <NEW_LINE> h = model.lstm(h) <NEW_LINE> h = model.out(h) <NEW_LINE> return h <NEW_LINE> <DEDENT> super().__init__( model=model, model_call=model_call, action_filter=action_filter) | Fully-connected deterministic policy with LSTM.
Args:
n_input_channels (int): Number of input channels.
n_hidden_layers (int): Number of hidden layers.
n_hidden_channels (int): Number of hidden channels.
action_size (int): Size of actions.
min_action (ndarray or None): Minimum action. Used only if bound_action
is set to True.
min_action (ndarray or None): Minimum action. Used only if bound_action
is set to True.
bound_action (bool): If set to True, actions are bounded to
[min_action, max_action] by tanh.
nonlinearity (callable): Nonlinearity between layers. It must accept a
Variable as an argument and return a Variable with the same shape.
Nonlinearities with learnable parameters such as PReLU are not
supported.
last_wscale (float): Scale of weight initialization of the last layer.
| 6259906632920d7e50bc77b7 |
class StudentTestCases(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.Student = std.Student('Bob', 'Billy', 'Billy bobbing', 4.0) <NEW_LINE> <DEDENT> '''Method to delete a test object''' <NEW_LINE> def tearDown(self): <NEW_LINE> <INDENT> del self.Student <NEW_LINE> <DEDENT> '''Test to ensure attributes are being properly applied, and optional attribute not required''' <NEW_LINE> def test_object_created_required_attributes(self): <NEW_LINE> <INDENT> self.assertEqual(self.Student._last_name, 'Bob') <NEW_LINE> self.assertEqual(self.Student._first_name, 'Billy') <NEW_LINE> self.assertEqual(self.Student._major, 'Billy bobbing') <NEW_LINE> <DEDENT> '''Test to ensure attributes are being properly applied and optional attribute also applied correctly''' <NEW_LINE> def test_object_created_all_attributes(self): <NEW_LINE> <INDENT> self.assertEqual(self.Student._last_name, 'Bob') <NEW_LINE> self.assertEqual(self.Student._first_name, 'Billy') <NEW_LINE> self.assertEqual(self.Student._major, 'Billy bobbing') <NEW_LINE> self.assertEqual(self.Student._gpa, 4.0) <NEW_LINE> <DEDENT> '''Test to ensure str() returns proper string''' <NEW_LINE> def test_student_str(self): <NEW_LINE> <INDENT> self.assertEqual(str(self.Student), 'Bob, Billy is majoring in Billy bobbing with a GPA of 4.0') <NEW_LINE> <DEDENT> '''Test to ensure ValueError exception is thrown with incorrect lname input''' <NEW_LINE> def test_object_not_created_error_last_name(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> test = std.Student('0000', 'Billy', 'Billy bobbing') <NEW_LINE> <DEDENT> <DEDENT> '''Test to ensure ValueError exception is thrown with incorrect fname input''' <NEW_LINE> def test_object_not_created_error_first_name(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> test = std.Student('Bob', '0000', 'Billy bobbing') <NEW_LINE> <DEDENT> <DEDENT> '''Test to ensure ValueError exception is thrown with incorrect major input''' <NEW_LINE> def test_object_not_created_error_major(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> test = std.Student('Bob', 'Billy', '0000') <NEW_LINE> <DEDENT> <DEDENT> '''Test to ensure ValueError exception is thrown with incorrect GPA input''' <NEW_LINE> def test_object_not_created_error_gpa(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> test = std.Student('Bob', 'Billy', 'Billy bobbing', 'abcd') <NEW_LINE> test = std.Student('Bob', 'Billy', 'Billy bobbing', 5.0) <NEW_LINE> test = std.Student('Bob', 'Billy', 'Billy bobbing', -1) | Method to set up a test object | 62599066fff4ab517ebcef8c |
class BlankPageSet(page_set_module.PageSet): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(BlankPageSet, self).__init__() <NEW_LINE> self.AddPage(BlankPage('file://blank_page/blank_page.html', self)) | A single blank page. | 62599066e76e3b2f99fda171 |
class LALR(LR0): <NEW_LINE> <INDENT> def __init__(self, sym_production, alternative, position): <NEW_LINE> <INDENT> LR0.__init__(self, sym_production, alternative, position) <NEW_LINE> self.lookaheads = set() <NEW_LINE> self.new_lookaheads = set() <NEW_LINE> self.subscribeds = set() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> assert self.lookaheads <NEW_LINE> assert not self.new_lookaheads <NEW_LINE> del self.new_lookaheads <NEW_LINE> del self.subscribeds <NEW_LINE> <DEDENT> def self_factory(self, sym_production, alternative, position): <NEW_LINE> <INDENT> return LALR(sym_production, alternative, position) <NEW_LINE> <DEDENT> def add_new(self, lookahead): <NEW_LINE> <INDENT> if lookahead not in self.lookaheads: <NEW_LINE> <INDENT> self.new_lookaheads.add(lookahead) <NEW_LINE> <DEDENT> <DEDENT> def add_news(self, lookaheads): <NEW_LINE> <INDENT> self.new_lookaheads.update(lookaheads) <NEW_LINE> self.new_lookaheads -= self.lookaheads <NEW_LINE> <DEDENT> def subscribe(self, item_lalr): <NEW_LINE> <INDENT> self.subscribeds.add(item_lalr) <NEW_LINE> <DEDENT> def propagate(self): <NEW_LINE> <INDENT> if self.new_lookaheads: <NEW_LINE> <INDENT> for subscribed in self.subscribeds: <NEW_LINE> <INDENT> subscribed.add_news(self.new_lookaheads) <NEW_LINE> <DEDENT> self.lookaheads.update(self.new_lookaheads) <NEW_LINE> self.new_lookaheads.clear() <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def followers(self, grammar): <NEW_LINE> <INDENT> assert not hasattr(self, 'new_lookaheads') and not hasattr(self, 'subscribed') <NEW_LINE> return frozenset(self.lookaheads) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return LR0.__hash__(self) | See __init__ | 625990664428ac0f6e659ca3 |
@dataclass_json <NEW_LINE> @dataclass <NEW_LINE> class ImportProduct(Product): <NEW_LINE> <INDENT> mirror_from: Optional[Url] = None <NEW_LINE> base_iris: Optional[List[str]] = None <NEW_LINE> is_large: bool = False <NEW_LINE> module_type : Optional[str] = None <NEW_LINE> module_type_slme : str = "BOT" <NEW_LINE> annotation_properties : List[str] = field(default_factory=lambda: ['rdfs:label', 'IAO:0000115']) <NEW_LINE> slme_individuals : str = "include" <NEW_LINE> use_base: bool = False <NEW_LINE> make_base: bool = False <NEW_LINE> use_gzipped: bool = False | Represents an individual import
Examples: 'uberon' (in go)
Imports are typically built from an upstream source, but this can be configured | 62599066a17c0f6771d5d75f |
@ddt.ddt <NEW_LINE> class CourseBlocksSignalTest(ModuleStoreTestCase): <NEW_LINE> <INDENT> ENABLED_SIGNALS = ['course_deleted', 'course_published'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(CourseBlocksSignalTest, self).setUp() <NEW_LINE> self.course = CourseFactory.create() <NEW_LINE> self.course_usage_key = self.store.make_course_usage_key(self.course.id) <NEW_LINE> <DEDENT> def test_course_update(self): <NEW_LINE> <INDENT> test_display_name = "Lightsabers 101" <NEW_LINE> bs_manager = get_block_structure_manager(self.course.id) <NEW_LINE> orig_block_structure = bs_manager.get_collected() <NEW_LINE> self.assertTrue(is_course_in_block_structure_cache(self.course.id, self.store)) <NEW_LINE> self.assertNotEqual( test_display_name, orig_block_structure.get_xblock_field(self.course_usage_key, 'display_name') ) <NEW_LINE> self.course.display_name = test_display_name <NEW_LINE> self.store.update_item(self.course, self.user.id) <NEW_LINE> updated_block_structure = bs_manager.get_collected() <NEW_LINE> self.assertEqual( test_display_name, updated_block_structure.get_xblock_field(self.course_usage_key, 'display_name') ) <NEW_LINE> <DEDENT> @ddt.data(True, False) <NEW_LINE> @patch('openedx.core.djangoapps.content.block_structure.manager.BlockStructureManager.clear') <NEW_LINE> def test_cache_invalidation(self, invalidate_cache_enabled, mock_bs_manager_clear): <NEW_LINE> <INDENT> test_display_name = "Jedi 101" <NEW_LINE> with waffle().override(INVALIDATE_CACHE_ON_PUBLISH, active=invalidate_cache_enabled): <NEW_LINE> <INDENT> self.course.display_name = test_display_name <NEW_LINE> self.store.update_item(self.course, self.user.id) <NEW_LINE> <DEDENT> self.assertEqual(mock_bs_manager_clear.called, invalidate_cache_enabled) <NEW_LINE> <DEDENT> def test_course_delete(self): <NEW_LINE> <INDENT> bs_manager = get_block_structure_manager(self.course.id) <NEW_LINE> self.assertIsNotNone(bs_manager.get_collected()) <NEW_LINE> self.assertTrue(is_course_in_block_structure_cache(self.course.id, self.store)) <NEW_LINE> self.store.delete_course(self.course.id, self.user.id) <NEW_LINE> with self.assertRaises(ItemNotFoundError): <NEW_LINE> <INDENT> bs_manager.get_collected() <NEW_LINE> <DEDENT> self.assertFalse(is_course_in_block_structure_cache(self.course.id, self.store)) <NEW_LINE> <DEDENT> @ddt.data( (CourseLocator(org='org', course='course', run='run'), True), (LibraryLocator(org='org', library='course'), False), ) <NEW_LINE> @ddt.unpack <NEW_LINE> @patch('openedx.core.djangoapps.content.block_structure.tasks.update_course_in_cache_v2.apply_async') <NEW_LINE> def test_update_only_for_courses(self, key, expect_update_called, mock_update): <NEW_LINE> <INDENT> update_block_structure_on_course_publish(sender=None, course_key=key) <NEW_LINE> self.assertEqual(mock_update.called, expect_update_called) | Tests for the Course Blocks signal | 625990662ae34c7f260ac859 |
class SingleComponentResponseOfDestinyInventoryComponent(object): <NEW_LINE> <INDENT> swagger_types = { 'data': 'ComponentsschemasDestinyEntitiesInventoryDestinyInventoryComponent', 'privacy': 'ComponentsschemasComponentsComponentPrivacySetting' } <NEW_LINE> attribute_map = { 'data': 'data', 'privacy': 'privacy' } <NEW_LINE> def __init__(self, data=None, privacy=None): <NEW_LINE> <INDENT> self._data = None <NEW_LINE> self._privacy = None <NEW_LINE> self.discriminator = None <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> if privacy is not None: <NEW_LINE> <INDENT> self.privacy = privacy <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def privacy(self): <NEW_LINE> <INDENT> return self._privacy <NEW_LINE> <DEDENT> @privacy.setter <NEW_LINE> def privacy(self, privacy): <NEW_LINE> <INDENT> self._privacy = privacy <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SingleComponentResponseOfDestinyInventoryComponent): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599066aad79263cf42ff2a |
class TimeFilter(WebFilter): <NEW_LINE> <INDENT> SECONDS_MARGIN = 30 <NEW_LINE> def filter_response_params(self, params): <NEW_LINE> <INDENT> http_date = params.get('Date') <NEW_LINE> if http_date: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> host_time = datetime.fromtimestamp( email.utils.mktime_tz( email.utils.parsedate_tz(http_date))) <NEW_LINE> seconds_off = timedelta_to_seconds(host_time - now()) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> self.log.debug('bad http_date: {}\n{}'. format(http_date, traceback.format_exc())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if abs(seconds_off) > self.SECONDS_MARGIN: <NEW_LINE> <INDENT> self.log.warning( 'host time off {} seconds from local time'. format(seconds_off)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.log.debug('missing date') <NEW_LINE> <DEDENT> return params | Track time as reported by http servers.
When the time is significantly off, it may indicate packet staining. | 6259906644b2445a339b7519 |
class TestClient(object): <NEW_LINE> <INDENT> def __init__(self, host, port): <NEW_LINE> <INDENT> self.conn = http.client.HTTPConnection(host, port) <NEW_LINE> <DEDENT> def query(self, url, method='GET', params=None, headers={}): <NEW_LINE> <INDENT> if params: <NEW_LINE> <INDENT> params = urllib.parse.urlencode(params) <NEW_LINE> <DEDENT> self.conn.request(method, url, params, headers=headers) <NEW_LINE> r = self.conn.getresponse() <NEW_LINE> r.body = r.read().decode("utf-8") <NEW_LINE> return r | Helper to make request to the rainfall app.
Created automatically by RainfallTestCase. | 62599066fff4ab517ebcef8d |
class UndirectedNode(_Node): <NEW_LINE> <INDENT> def __init__(self, g): <NEW_LINE> <INDENT> super().__init__(g) <NEW_LINE> self.__edges = {} <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.nb_neighbors <NEW_LINE> <DEDENT> @property <NEW_LINE> def nb_neighbors(self): <NEW_LINE> <INDENT> return len(self.__edges) <NEW_LINE> <DEDENT> @property <NEW_LINE> def neighbors(self): <NEW_LINE> <INDENT> return iter(self.__edges.keys()) <NEW_LINE> <DEDENT> def is_neighbor_of(self, v): <NEW_LINE> <INDENT> return v in self.__edges <NEW_LINE> <DEDENT> def _remove_neighbor(self, v): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del self.__edges[v] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if isinstance(v, UndirectedNode): <NEW_LINE> <INDENT> raise NodeError(self._graph, self, 'The node ' + str(v) + ' is not a neighbor of this node.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def incident_edges(self): <NEW_LINE> <INDENT> return iter(self.__edges.values()) <NEW_LINE> <DEDENT> def get_incident_edge(self, v): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.__edges[v] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if isinstance(v, UndirectedNode): <NEW_LINE> <INDENT> raise NodeError(self._graph, self, str(v) + " is not a neighbor of the node.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def is_incident_to(self, e): <NEW_LINE> <INDENT> return e in self.__edges.values() <NEW_LINE> <DEDENT> def _add_incident_edge(self, e): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> v = e.neighbor(self) <NEW_LINE> if v is not None: <NEW_LINE> <INDENT> self.__edges[v] = e <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(e, Edge): <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LinkError(self._graph, e, str(self) + ' is not one of the extremities.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> if not isinstance(e, Edge): <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _remove_incident_edge(self, e): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> v = e.neighbor(self) <NEW_LINE> if v is not None: <NEW_LINE> <INDENT> self._remove_neighbor(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(e, Edge): <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LinkError(self._graph, e, str(self) + ' is not one of the extremities.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> if not isinstance(e, Edge): <NEW_LINE> <INDENT> raise TypeError() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise | Vertice (or node) of an undirected graph.
This class represents any node of an undirected graph. This class should not be manually instantiated. Use
the method `UndirectedGraph.add_node` instead. Otherwise an unexpected behaviour may occurs.
With this class, it is possible to access to all the incident edges and to the corresponding neighbors of the node.
- the property `nb_neighbors` and `len(self)` return the number of neighbors.
- the property `neighbors` and the method `is_neighbor_of` give access to the neighbors.
- the property `incident_edges` and the methods `get_incident_edge` and `is_incident_to` give access to the
incident_edges.
Moreover, a node has a unique index that can be used to easily identify that node and that is used when the node
is printed with `str`. That index is accessible with the property `index`. | 625990668e7ae83300eea800 |
class Package(Command): <NEW_LINE> <INDENT> description = "Run wheels for dependencies and submodules dependencies" <NEW_LINE> user_options = [] <NEW_LINE> def __init__(self, dist): <NEW_LINE> <INDENT> Command.__init__(self, dist) <NEW_LINE> <DEDENT> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def localize_requirements(): <NEW_LINE> <INDENT> dependencies = filter(None, open("requirements.txt").read().split("\n")) <NEW_LINE> local_dependencies = [] <NEW_LINE> for dependency in dependencies: <NEW_LINE> <INDENT> if dependency: <NEW_LINE> <INDENT> if "egg=" in dependency: <NEW_LINE> <INDENT> pkg_name = dependency.split("egg=")[-1] <NEW_LINE> local_dependencies.append(pkg_name) <NEW_LINE> <DEDENT> elif "git+" in dependency: <NEW_LINE> <INDENT> pkg_name = dependency.split("/")[-1].split(".")[0] <NEW_LINE> local_dependencies.append(pkg_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> local_dependencies.append(dependency) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print("local packages in wheel: %s" % local_dependencies) <NEW_LINE> os.rename("requirements.txt", "requirements.orig") <NEW_LINE> with open("requirements.txt", "w") as requirements_file: <NEW_LINE> <INDENT> requirements_file.write("\n".join(local_dependencies)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def execute(command, capture_output=False): <NEW_LINE> <INDENT> print("Running shell command: %s" % command) <NEW_LINE> if capture_output: <NEW_LINE> <INDENT> return subprocess.check_output(shlex.split(command)) <NEW_LINE> <DEDENT> process = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE) <NEW_LINE> while True: <NEW_LINE> <INDENT> output = process.stdout.readline() <NEW_LINE> if output == b'' and process.poll() is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if output: <NEW_LINE> <INDENT> print(output.strip()) <NEW_LINE> <DEDENT> <DEDENT> return_code = process.poll() <NEW_LINE> if return_code != 0: <NEW_LINE> <INDENT> print("Error running command %s - exit code: %s" % (command, return_code)) <NEW_LINE> raise IOError("Shell Command Failed") <NEW_LINE> <DEDENT> return return_code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def restore_requirements_txt(): <NEW_LINE> <INDENT> if os.path.exists("requirements.orig"): <NEW_LINE> <INDENT> print("Restoring original requirements.txt file") <NEW_LINE> os.remove("requirements.txt") <NEW_LINE> os.rename("requirements.orig", "requirements.txt") <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> print("recreate {0} directory".format(WHEELHOUSE)) <NEW_LINE> shutil.rmtree(WHEELHOUSE, ignore_errors=True) <NEW_LINE> os.makedirs(WHEELHOUSE) <NEW_LINE> print("Packing dependencies in requirements.txt into wheelhouse") <NEW_LINE> self.execute("pip wheel --wheel-dir={dir} -r requirements.txt".format(dir=WHEELHOUSE)) <NEW_LINE> print("Generating local requirements.txt") <NEW_LINE> self.localize_requirements() <NEW_LINE> print("Packing code and wheelhouse into dist") <NEW_LINE> self.run_command("sdist") <NEW_LINE> self.restore_requirements_txt() | Package Code and Dependencies into wheelhouse | 625990668da39b475be0495c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.