body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
17d739dbbe8728fb06c24aad2d1d32bee5060406583af13f2e3fa0b39ba7ecea
@crprofileset.command(name='verify_url', pass_context=True) async def crprofileset_verify_url(self, ctx, url): 'Set player verification endpoint' self.model.verify_url = url (await self.bot.say('Verification URL updated.')) (await self.bot.delete_message(ctx.message))
Set player verification endpoint
crprofile/crprofile.py
crprofileset_verify_url
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='verify_url', pass_context=True) async def crprofileset_verify_url(self, ctx, url): self.model.verify_url = url (await self.bot.say('Verification URL updated.')) (await self.bot.delete_message(ctx.message))
@crprofileset.command(name='verify_url', pass_context=True) async def crprofileset_verify_url(self, ctx, url): self.model.verify_url = url (await self.bot.say('Verification URL updated.')) (await self.bot.delete_message(ctx.message))<|docstring|>Set player verification endpoint<|endoftext|>
c71772f86032381cc9d3d26b2ac1cdb4f3e50773979b05f7a23a0a2f6f3b3365
@crprofileset.command(name='auth', pass_context=True) async def crprofileset_auth(self, ctx, token): 'Set auth header' self.model.auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))
Set auth header
crprofile/crprofile.py
crprofileset_auth
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='auth', pass_context=True) async def crprofileset_auth(self, ctx, token): self.model.auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))
@crprofileset.command(name='auth', pass_context=True) async def crprofileset_auth(self, ctx, token): self.model.auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))<|docstring|>Set auth header<|endoftext|>
a391ebc29b0aae59da9652089beec45936eeae7bdde6be1a3207d7c051e0aff9
@crprofileset.command(name='official_auth', pass_context=True) async def crprofileset_official_auth(self, ctx, token): 'Set auth header' self.model.official_auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))
Set auth header
crprofile/crprofile.py
crprofileset_official_auth
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='official_auth', pass_context=True) async def crprofileset_official_auth(self, ctx, token): self.model.official_auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))
@crprofileset.command(name='official_auth', pass_context=True) async def crprofileset_official_auth(self, ctx, token): self.model.official_auth = token (await self.bot.say('Auth updated.')) (await self.bot.delete_message(ctx.message))<|docstring|>Set auth header<|endoftext|>
86971ccf05a0398bddc91461b2dcf40d61c4d6ca85f8625a7ca2a5535b8bdb9c
@crprofileset.command(name='initserver', pass_context=True) async def crprofileset_initserver(self, ctx): 'Init CR Profile: server settings.' server = ctx.message.server self.model.init_server(server) (await self.bot.say('Server settings initialized.'))
Init CR Profile: server settings.
crprofile/crprofile.py
crprofileset_initserver
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='initserver', pass_context=True) async def crprofileset_initserver(self, ctx): server = ctx.message.server self.model.init_server(server) (await self.bot.say('Server settings initialized.'))
@crprofileset.command(name='initserver', pass_context=True) async def crprofileset_initserver(self, ctx): server = ctx.message.server self.model.init_server(server) (await self.bot.say('Server settings initialized.'))<|docstring|>Init CR Profile: server settings.<|endoftext|>
60f239d586f55da8508e28fca1af5bc704af1aab7a21600ce0710ee81554a0fe
@crprofileset.command(name='initplayers', pass_context=True) async def crprofileset_initplayers(self, ctx): 'Init CR Profile: players settings.' server = ctx.message.server self.model.init_players(server) (await self.bot.say('Clan settings initialized.'))
Init CR Profile: players settings.
crprofile/crprofile.py
crprofileset_initplayers
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='initplayers', pass_context=True) async def crprofileset_initplayers(self, ctx): server = ctx.message.server self.model.init_players(server) (await self.bot.say('Clan settings initialized.'))
@crprofileset.command(name='initplayers', pass_context=True) async def crprofileset_initplayers(self, ctx): server = ctx.message.server self.model.init_players(server) (await self.bot.say('Clan settings initialized.'))<|docstring|>Init CR Profile: players settings.<|endoftext|>
c3e0f632d0ddfff7f762f5408acf5a908bd96660dd74ad501b3dafcf46931851
@crprofileset.command(name='badgeurl', pass_context=True) async def crprofileset_badgeurl(self, ctx, url): 'badge URL base.\n\n Format:\n If path is hhttp://domain.com/path/LQQ\n Enter http://domain.com/path/\n ' self.model.badge_url = url (await self.bot.say('Badge URL updated.'))
badge URL base. Format: If path is hhttp://domain.com/path/LQQ Enter http://domain.com/path/
crprofile/crprofile.py
crprofileset_badgeurl
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='badgeurl', pass_context=True) async def crprofileset_badgeurl(self, ctx, url): 'badge URL base.\n\n Format:\n If path is hhttp://domain.com/path/LQQ\n Enter http://domain.com/path/\n ' self.model.badge_url = url (await self.bot.say('Badge URL updated.'))
@crprofileset.command(name='badgeurl', pass_context=True) async def crprofileset_badgeurl(self, ctx, url): 'badge URL base.\n\n Format:\n If path is hhttp://domain.com/path/LQQ\n Enter http://domain.com/path/\n ' self.model.badge_url = url (await self.bot.say('Badge URL updated.'))<|docstring|>badge URL base. Format: If path is hhttp://domain.com/path/LQQ Enter http://domain.com/path/<|endoftext|>
4b98e76512518c1b3057c3edeeb546477fd5231793e6e73cc03e752fe07fc650
@crprofileset.command(name='apitoken', pass_context=True) async def crprofileset_apiauth(self, ctx, token): 'API Authentication token.' self.model.profile_api_token = token (await self.bot.say('API token saved.'))
API Authentication token.
crprofile/crprofile.py
crprofileset_apiauth
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='apitoken', pass_context=True) async def crprofileset_apiauth(self, ctx, token): self.model.profile_api_token = token (await self.bot.say('API token saved.'))
@crprofileset.command(name='apitoken', pass_context=True) async def crprofileset_apiauth(self, ctx, token): self.model.profile_api_token = token (await self.bot.say('API token saved.'))<|docstring|>API Authentication token.<|endoftext|>
4bbc19232e68a55af4a01b248db7f04cab94fb900e00494db8f190c37956b734
@crprofileset.command(name='api_provider', pass_context=True) async def crprofileset_api_provider(self, ctx, value): 'API Provider.\n\n Accepted values:\n cr-api\n official\n ' if ((value == 'cr-api') or (value == 'official')): self.model.api_provider = value (await self.bot.say('API provider saved.')) else: (await self.bot.say('Not a valid provider.'))
API Provider. Accepted values: cr-api official
crprofile/crprofile.py
crprofileset_api_provider
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='api_provider', pass_context=True) async def crprofileset_api_provider(self, ctx, value): 'API Provider.\n\n Accepted values:\n cr-api\n official\n ' if ((value == 'cr-api') or (value == 'official')): self.model.api_provider = value (await self.bot.say('API provider saved.')) else: (await self.bot.say('Not a valid provider.'))
@crprofileset.command(name='api_provider', pass_context=True) async def crprofileset_api_provider(self, ctx, value): 'API Provider.\n\n Accepted values:\n cr-api\n official\n ' if ((value == 'cr-api') or (value == 'official')): self.model.api_provider = value (await self.bot.say('API provider saved.')) else: (await self.bot.say('Not a valid provider.'))<|docstring|>API Provider. Accepted values: cr-api official<|endoftext|>
a8aa16fac31020a284773c96e42d1ebf1b26a0510c811c8fc7a86bfe7d59477f
@crprofileset.command(name='rmmembertag', pass_context=True) async def crprofileset_rm_member_tag(self, ctx, member: discord.Member): 'Remove player tag of a user.' server = ctx.message.server self.model.rm_player_tag(server, member=member) (await self.bot.say('Removed player tag for {}'.format(member)))
Remove player tag of a user.
crprofile/crprofile.py
crprofileset_rm_member_tag
zodpixel/SML-Cogs
17
python
@crprofileset.command(name='rmmembertag', pass_context=True) async def crprofileset_rm_member_tag(self, ctx, member: discord.Member): server = ctx.message.server self.model.rm_player_tag(server, member=member) (await self.bot.say('Removed player tag for {}'.format(member)))
@crprofileset.command(name='rmmembertag', pass_context=True) async def crprofileset_rm_member_tag(self, ctx, member: discord.Member): server = ctx.message.server self.model.rm_player_tag(server, member=member) (await self.bot.say('Removed player tag for {}'.format(member)))<|docstring|>Remove player tag of a user.<|endoftext|>
e5ffb64fdae9d0de8e516b7223b9fe3026576e2ef7fc98834c2fe37c24d0c145
@checks.mod_or_permissions() @crprofileset.command(name='rmtag', pass_context=True) async def crprofileset_rm_tag(self, ctx, tag): 'Remove player tag of a user.' server = ctx.message.server self.model.rm_player_tag(server, tag=tag) (await self.bot.say('Removed player tag {} from associated member'.format(tag)))
Remove player tag of a user.
crprofile/crprofile.py
crprofileset_rm_tag
zodpixel/SML-Cogs
17
python
@checks.mod_or_permissions() @crprofileset.command(name='rmtag', pass_context=True) async def crprofileset_rm_tag(self, ctx, tag): server = ctx.message.server self.model.rm_player_tag(server, tag=tag) (await self.bot.say('Removed player tag {} from associated member'.format(tag)))
@checks.mod_or_permissions() @crprofileset.command(name='rmtag', pass_context=True) async def crprofileset_rm_tag(self, ctx, tag): server = ctx.message.server self.model.rm_player_tag(server, tag=tag) (await self.bot.say('Removed player tag {} from associated member'.format(tag)))<|docstring|>Remove player tag of a user.<|endoftext|>
12814ec72dd281090bc7ad509a97ec3ee2881f34cf6415a9da283a7aa6520a3a
@commands.group(pass_context=True, no_pm=True) async def crprofile(self, ctx): 'Clash Royale Player Profile.' if (self.model.auth is None): (await self.bot.say('You must have a cr-api.com developer key to run this command. Please visit http://docs.cr-api.com/#/authentication to learn how to obtain one, then run `!crprofileset auth insert_developer_key` to set it.')) return if (ctx.invoked_subcommand is None): (await self.bot.send_cmd_help(ctx))
Clash Royale Player Profile.
crprofile/crprofile.py
crprofile
zodpixel/SML-Cogs
17
python
@commands.group(pass_context=True, no_pm=True) async def crprofile(self, ctx): if (self.model.auth is None): (await self.bot.say('You must have a cr-api.com developer key to run this command. Please visit http://docs.cr-api.com/#/authentication to learn how to obtain one, then run `!crprofileset auth insert_developer_key` to set it.')) return if (ctx.invoked_subcommand is None): (await self.bot.send_cmd_help(ctx))
@commands.group(pass_context=True, no_pm=True) async def crprofile(self, ctx): if (self.model.auth is None): (await self.bot.say('You must have a cr-api.com developer key to run this command. Please visit http://docs.cr-api.com/#/authentication to learn how to obtain one, then run `!crprofileset auth insert_developer_key` to set it.')) return if (ctx.invoked_subcommand is None): (await self.bot.send_cmd_help(ctx))<|docstring|>Clash Royale Player Profile.<|endoftext|>
d27cacd0865413498a9d5df45120d5bee702b1dc94e0894f5c4bd1b81c6c4690
@crprofile.command(name='settag', pass_context=True, no_pm=True) async def crprofile_settag(self, ctx, playertag, member: discord.Member=None): 'Set playertag to discord member.\n\n Setting tag for yourself:\n !crprofile settag C0G20PR2\n\n Setting tag for others (requires Bot Commander role):\n !crprofile settag C0G20PR2 SML\n !crprofile settag C0G20PR2 @SML\n !crprofile settag C0G20PR2 @SML#6443\n ' server = ctx.message.server author = ctx.message.author sctag = SCTag(playertag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return allowed = False if (member is None): allowed = True elif (member.id == author.id): allowed = True else: botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES] botcommander_roles = set(botcommander_roles) author_roles = set(author.roles) if len(author_roles.intersection(botcommander_roles)): allowed = True if (not allowed): (await self.bot.say('Only Bot Commanders can set tags for others.')) return if (member is None): member = ctx.message.author self.model.set_player(server, member, sctag.tag) (await self.bot.say('Associated player tag #{} with Discord Member {}.'.format(sctag.tag, member.display_name)))
Set playertag to discord member. Setting tag for yourself: !crprofile settag C0G20PR2 Setting tag for others (requires Bot Commander role): !crprofile settag C0G20PR2 SML !crprofile settag C0G20PR2 @SML !crprofile settag C0G20PR2 @SML#6443
crprofile/crprofile.py
crprofile_settag
zodpixel/SML-Cogs
17
python
@crprofile.command(name='settag', pass_context=True, no_pm=True) async def crprofile_settag(self, ctx, playertag, member: discord.Member=None): 'Set playertag to discord member.\n\n Setting tag for yourself:\n !crprofile settag C0G20PR2\n\n Setting tag for others (requires Bot Commander role):\n !crprofile settag C0G20PR2 SML\n !crprofile settag C0G20PR2 @SML\n !crprofile settag C0G20PR2 @SML#6443\n ' server = ctx.message.server author = ctx.message.author sctag = SCTag(playertag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return allowed = False if (member is None): allowed = True elif (member.id == author.id): allowed = True else: botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES] botcommander_roles = set(botcommander_roles) author_roles = set(author.roles) if len(author_roles.intersection(botcommander_roles)): allowed = True if (not allowed): (await self.bot.say('Only Bot Commanders can set tags for others.')) return if (member is None): member = ctx.message.author self.model.set_player(server, member, sctag.tag) (await self.bot.say('Associated player tag #{} with Discord Member {}.'.format(sctag.tag, member.display_name)))
@crprofile.command(name='settag', pass_context=True, no_pm=True) async def crprofile_settag(self, ctx, playertag, member: discord.Member=None): 'Set playertag to discord member.\n\n Setting tag for yourself:\n !crprofile settag C0G20PR2\n\n Setting tag for others (requires Bot Commander role):\n !crprofile settag C0G20PR2 SML\n !crprofile settag C0G20PR2 @SML\n !crprofile settag C0G20PR2 @SML#6443\n ' server = ctx.message.server author = ctx.message.author sctag = SCTag(playertag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return allowed = False if (member is None): allowed = True elif (member.id == author.id): allowed = True else: botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES] botcommander_roles = set(botcommander_roles) author_roles = set(author.roles) if len(author_roles.intersection(botcommander_roles)): allowed = True if (not allowed): (await self.bot.say('Only Bot Commanders can set tags for others.')) return if (member is None): member = ctx.message.author self.model.set_player(server, member, sctag.tag) (await self.bot.say('Associated player tag #{} with Discord Member {}.'.format(sctag.tag, member.display_name)))<|docstring|>Set playertag to discord member. Setting tag for yourself: !crprofile settag C0G20PR2 Setting tag for others (requires Bot Commander role): !crprofile settag C0G20PR2 SML !crprofile settag C0G20PR2 @SML !crprofile settag C0G20PR2 @SML#6443<|endoftext|>
11c0d0981681c84872230169e98ab76a1e1ea664d16df67b7691be828c87e9e7
@crprofile.command(name='gettag', pass_context=True, no_pm=True) async def crprofile_gettag(self, ctx, member: discord.Member=None): 'Get playertag from Discord member.' server = ctx.message.server author = ctx.message.author if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('Cannot find associated player tag.')) return (await self.bot.say('Player tag for {} is #{}'.format(member.display_name, tag)))
Get playertag from Discord member.
crprofile/crprofile.py
crprofile_gettag
zodpixel/SML-Cogs
17
python
@crprofile.command(name='gettag', pass_context=True, no_pm=True) async def crprofile_gettag(self, ctx, member: discord.Member=None): server = ctx.message.server author = ctx.message.author if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('Cannot find associated player tag.')) return (await self.bot.say('Player tag for {} is #{}'.format(member.display_name, tag)))
@crprofile.command(name='gettag', pass_context=True, no_pm=True) async def crprofile_gettag(self, ctx, member: discord.Member=None): server = ctx.message.server author = ctx.message.author if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('Cannot find associated player tag.')) return (await self.bot.say('Player tag for {} is #{}'.format(member.display_name, tag)))<|docstring|>Get playertag from Discord member.<|endoftext|>
77490a0440a6cf932bccae82476a03820475fd9dd28e89c846dfd053fafe11c9
@crprofile.command(name='tag', pass_context=True, no_pm=True) async def crprofile_tag(self, ctx, tag): 'Player profile by tag\n\n Display player info\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return (await self.display_profile(ctx, tag))
Player profile by tag Display player info
crprofile/crprofile.py
crprofile_tag
zodpixel/SML-Cogs
17
python
@crprofile.command(name='tag', pass_context=True, no_pm=True) async def crprofile_tag(self, ctx, tag): 'Player profile by tag\n\n Display player info\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return (await self.display_profile(ctx, tag))
@crprofile.command(name='tag', pass_context=True, no_pm=True) async def crprofile_tag(self, ctx, tag): 'Player profile by tag\n\n Display player info\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return (await self.display_profile(ctx, tag))<|docstring|>Player profile by tag Display player info<|endoftext|>
c413b38892e0085628b4f5b5414850c8b083591ab8a9903671ad75f4dc2f9563
async def get_profile(self, ctx, member: discord.Member=None, **kwargs): 'Logic for profile' (await self.bot.type()) author = ctx.message.author server = ctx.message.server if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('{} has not set player tag with the bot yet. '.format(member.display_name))) racf_cog = self.bot.get_cog('RACF') if (racf_cog is None): (await self.bot.say('Pleaes run `{}crprofile settag` to set your player tag.'.format(ctx.prefix))) else: (await self.bot.say('Please run `{}crsettag` to set your player tag.'.format(ctx.prefix))) return (await self.display_profile(ctx, tag, **kwargs))
Logic for profile
crprofile/crprofile.py
get_profile
zodpixel/SML-Cogs
17
python
async def get_profile(self, ctx, member: discord.Member=None, **kwargs): (await self.bot.type()) author = ctx.message.author server = ctx.message.server if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('{} has not set player tag with the bot yet. '.format(member.display_name))) racf_cog = self.bot.get_cog('RACF') if (racf_cog is None): (await self.bot.say('Pleaes run `{}crprofile settag` to set your player tag.'.format(ctx.prefix))) else: (await self.bot.say('Please run `{}crsettag` to set your player tag.'.format(ctx.prefix))) return (await self.display_profile(ctx, tag, **kwargs))
async def get_profile(self, ctx, member: discord.Member=None, **kwargs): (await self.bot.type()) author = ctx.message.author server = ctx.message.server if (member is None): member = author tag = (await self.model.member2tag(server, member)) if (tag is None): (await self.bot.say('{} has not set player tag with the bot yet. '.format(member.display_name))) racf_cog = self.bot.get_cog('RACF') if (racf_cog is None): (await self.bot.say('Pleaes run `{}crprofile settag` to set your player tag.'.format(ctx.prefix))) else: (await self.bot.say('Please run `{}crsettag` to set your player tag.'.format(ctx.prefix))) return (await self.display_profile(ctx, tag, **kwargs))<|docstring|>Logic for profile<|endoftext|>
b444a0989c36550efeb363aa064d222974fe1d6e58355a607334ffaec34e41c9
@crprofile.command(name='get', pass_context=True, no_pm=True) async def crprofile_get(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview', 'stats'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Player profile if member is not entered, retrieve own profile
crprofile/crprofile.py
crprofile_get
zodpixel/SML-Cogs
17
python
@crprofile.command(name='get', pass_context=True, no_pm=True) async def crprofile_get(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview', 'stats'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='get', pass_context=True, no_pm=True) async def crprofile_get(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview', 'stats'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Player profile if member is not entered, retrieve own profile<|endoftext|>
940ee8528da20413fd57725e095bc3c8f5ca117cc457ce0d0752a877aa18ee45
@crprofile.command(name='cards', pass_context=True, no_pm=True) async def crprofile_cards(self, ctx, member: discord.Member=None): 'Card collection.' try: (await self.get_profile(ctx, member, sections=['cards'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Card collection.
crprofile/crprofile.py
crprofile_cards
zodpixel/SML-Cogs
17
python
@crprofile.command(name='cards', pass_context=True, no_pm=True) async def crprofile_cards(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['cards'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='cards', pass_context=True, no_pm=True) async def crprofile_cards(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['cards'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Card collection.<|endoftext|>
74a3ba387580ace7487d20b4659928eba14c0b75bea1692aae4e8ba452667067
@crprofile.command(name='trade', pass_context=True, no_pm=True) async def crprofile_trade(self, ctx, member: discord.Member=None): 'Tradeable cards.' try: (await self.get_profile(ctx, member, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Tradeable cards.
crprofile/crprofile.py
crprofile_trade
zodpixel/SML-Cogs
17
python
@crprofile.command(name='trade', pass_context=True, no_pm=True) async def crprofile_trade(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='trade', pass_context=True, no_pm=True) async def crprofile_trade(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Tradeable cards.<|endoftext|>
a3fe98d114cc72860be71b50b3f5c1fe95a235463d672191fda79439ad8ba2f4
@crprofile.command(name='tradetag', pass_context=True, no_pm=True) async def crprofile_tradetag(self, ctx, tag): 'Tradeable cards by tag.' tag = clean_tag(tag) try: (await self.display_profile(ctx, tag, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Tradeable cards by tag.
crprofile/crprofile.py
crprofile_tradetag
zodpixel/SML-Cogs
17
python
@crprofile.command(name='tradetag', pass_context=True, no_pm=True) async def crprofile_tradetag(self, ctx, tag): tag = clean_tag(tag) try: (await self.display_profile(ctx, tag, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='tradetag', pass_context=True, no_pm=True) async def crprofile_tradetag(self, ctx, tag): tag = clean_tag(tag) try: (await self.display_profile(ctx, tag, sections=['trade'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Tradeable cards by tag.<|endoftext|>
8f8f3c34f7fa7be9fc85af3af7351176d54aff203618c637f6e5ced05ea1d39a
@crprofile.command(name='chests', pass_context=True, no_pm=True) async def crprofile_chests(self, ctx, member: discord.Member=None): 'Upcoming chests.' try: (await self.get_profile(ctx, member, sections=['chests'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Upcoming chests.
crprofile/crprofile.py
crprofile_chests
zodpixel/SML-Cogs
17
python
@crprofile.command(name='chests', pass_context=True, no_pm=True) async def crprofile_chests(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['chests'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='chests', pass_context=True, no_pm=True) async def crprofile_chests(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['chests'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Upcoming chests.<|endoftext|>
fe8a601d218fe97a1d75c84a66fbb8a6bcd00c974b90636751e010f15d30ee45
@crprofile.command(name='deck', pass_context=True, no_pm=True) async def crprofile_deck(self, ctx, member: discord.Member=None): 'Current deck.' try: (await self.get_profile(ctx, member, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Current deck.
crprofile/crprofile.py
crprofile_deck
zodpixel/SML-Cogs
17
python
@crprofile.command(name='deck', pass_context=True, no_pm=True) async def crprofile_deck(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='deck', pass_context=True, no_pm=True) async def crprofile_deck(self, ctx, member: discord.Member=None): try: (await self.get_profile(ctx, member, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Current deck.<|endoftext|>
c1574c30be111726e331c01c0b9f4e707e209d3ac02fb29c77070a7220e41574
@crprofile.command(name='tagdeck', pass_context=True, no_pm=True) async def crprofile_tagdeck(self, ctx, tag): 'Current deck of player tag.' try: (await self.display_profile(ctx, tag, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Current deck of player tag.
crprofile/crprofile.py
crprofile_tagdeck
zodpixel/SML-Cogs
17
python
@crprofile.command(name='tagdeck', pass_context=True, no_pm=True) async def crprofile_tagdeck(self, ctx, tag): try: (await self.display_profile(ctx, tag, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='tagdeck', pass_context=True, no_pm=True) async def crprofile_tagdeck(self, ctx, tag): try: (await self.display_profile(ctx, tag, sections=['deck'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Current deck of player tag.<|endoftext|>
a2435975d09dc196b33849f9725888a5d8e6c6835facb44b85ea1f56a4ee111e
@crprofile.command(name='mini', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Player profile if member is not entered, retrieve own profile
crprofile/crprofile.py
crprofile_mini
zodpixel/SML-Cogs
17
python
@crprofile.command(name='mini', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='mini', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, member: discord.Member=None): 'Player profile\n\n if member is not entered, retrieve own profile\n ' try: (await self.get_profile(ctx, member, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Player profile if member is not entered, retrieve own profile<|endoftext|>
3feb0c6392176d9d847a270a99debe87d3d4321f70908a3974b02f0bdc86a4d6
@crprofile.command(name='minitag', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, tag): 'Player profile\n\n if member is not entered, retrieve own profile\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: (await self.display_profile(ctx, tag, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
Player profile if member is not entered, retrieve own profile
crprofile/crprofile.py
crprofile_mini
zodpixel/SML-Cogs
17
python
@crprofile.command(name='minitag', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, tag): 'Player profile\n\n if member is not entered, retrieve own profile\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: (await self.display_profile(ctx, tag, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))
@crprofile.command(name='minitag', pass_context=True, no_pm=True) async def crprofile_mini(self, ctx, tag): 'Player profile\n\n if member is not entered, retrieve own profile\n ' (await self.bot.type()) sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: (await self.display_profile(ctx, tag, sections=['overview'])) except APIError as e: (await self.bot.say('API Error {status} {message}'.format(status=e.status, message=e.message)))<|docstring|>Player profile if member is not entered, retrieve own profile<|endoftext|>
086ab6ad0a344e6ce38dbf67cbe2b8d70a27d7fceb4e46e5107241a43a9e2bd0
async def display_profile(self, ctx, tag, **kwargs): 'Display profile.' sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: player_data = (await self.model.player_data(sctag.tag)) except json.decoder.JSONDecodeError: player_data = self.model.cached_player_data(tag) except asyncio.TimeoutError: player_data = self.model.cached_player_data(tag) if (player_data is None): (await self.bot.send_message(ctx.message.channel, 'Unable to load from API.')) return if player_data.is_cache: (await self.bot.send_message(ctx.message.channel, 'Unable to load from API. Showing cached data from: {}.'.format(self.model.cached_player_data_timestamp(tag)))) server = ctx.message.server for em in self.embeds_profile(player_data, server=server, **kwargs): try: (await self.bot.say(embed=em)) except discord.DiscordException as e: (await self.bot.say('Discord error: {e}'.format(e=e)))
Display profile.
crprofile/crprofile.py
display_profile
zodpixel/SML-Cogs
17
python
async def display_profile(self, ctx, tag, **kwargs): sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: player_data = (await self.model.player_data(sctag.tag)) except json.decoder.JSONDecodeError: player_data = self.model.cached_player_data(tag) except asyncio.TimeoutError: player_data = self.model.cached_player_data(tag) if (player_data is None): (await self.bot.send_message(ctx.message.channel, 'Unable to load from API.')) return if player_data.is_cache: (await self.bot.send_message(ctx.message.channel, 'Unable to load from API. Showing cached data from: {}.'.format(self.model.cached_player_data_timestamp(tag)))) server = ctx.message.server for em in self.embeds_profile(player_data, server=server, **kwargs): try: (await self.bot.say(embed=em)) except discord.DiscordException as e: (await self.bot.say('Discord error: {e}'.format(e=e)))
async def display_profile(self, ctx, tag, **kwargs): sctag = SCTag(tag) if (not sctag.valid): (await self.bot.say(sctag.invalid_error_msg)) return try: player_data = (await self.model.player_data(sctag.tag)) except json.decoder.JSONDecodeError: player_data = self.model.cached_player_data(tag) except asyncio.TimeoutError: player_data = self.model.cached_player_data(tag) if (player_data is None): (await self.bot.send_message(ctx.message.channel, 'Unable to load from API.')) return if player_data.is_cache: (await self.bot.send_message(ctx.message.channel, 'Unable to load from API. Showing cached data from: {}.'.format(self.model.cached_player_data_timestamp(tag)))) server = ctx.message.server for em in self.embeds_profile(player_data, server=server, **kwargs): try: (await self.bot.say(embed=em)) except discord.DiscordException as e: (await self.bot.say('Discord error: {e}'.format(e=e)))<|docstring|>Display profile.<|endoftext|>
65bd0a6f4fb328cc4a59f83c3ee077efd6773e7253da504622ae0ed9c038a9b5
def embed_profile_overview(self, player: CRPlayerModel, server=None, color=None): 'Discord Embed: profile overview.' bem = self.bot_emoji.name member = self.model.tag2member(server, player.tag) mention = '_' if (member is not None): mention = member.mention profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) if player.clan_tag: clan_url = 'http://RoyaleAPI.com/clan/{}'.format(player.clan_tag.lstrip('#')) else: clan_url = None title = player.name roles = {'member': 'Member', 'elder': 'Elder', 'coleader': 'Co-Leader', 'leader': 'Leader', 'n/a': 'N/A'} clan_role = player.clan_role description = '[{player_tag}]({profile_url})\n**[{clan_name}]({clan_url})**\n[{clan_tag}]({clan_url})\n{clan_role}'.format(player_tag=player.tag, profile_url=profile_url, clan_name=player.clan_name, clan_tag=player.clan_tag, clan_url=clan_url, clan_role=roles.get(player.clan_role.lower(), 'N/A')) em = discord.Embed(title=title, description=description, color=color, url=profile_url) em.set_thumbnail(url=player.arena_url) header = {'Trophies': player.trophy_value(bem('trophy')), player.arena_text: '{} {}'.format(player.arena_subtitle, player.arena_emoji(self.bot_emoji)), 'Rank': player.rank_str(self.bot_emoji), 'Discord': mention} for (k, v) in header.items(): em.add_field(name=k, value=v) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
Discord Embed: profile overview.
crprofile/crprofile.py
embed_profile_overview
zodpixel/SML-Cogs
17
python
def embed_profile_overview(self, player: CRPlayerModel, server=None, color=None): bem = self.bot_emoji.name member = self.model.tag2member(server, player.tag) mention = '_' if (member is not None): mention = member.mention profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) if player.clan_tag: clan_url = 'http://RoyaleAPI.com/clan/{}'.format(player.clan_tag.lstrip('#')) else: clan_url = None title = player.name roles = {'member': 'Member', 'elder': 'Elder', 'coleader': 'Co-Leader', 'leader': 'Leader', 'n/a': 'N/A'} clan_role = player.clan_role description = '[{player_tag}]({profile_url})\n**[{clan_name}]({clan_url})**\n[{clan_tag}]({clan_url})\n{clan_role}'.format(player_tag=player.tag, profile_url=profile_url, clan_name=player.clan_name, clan_tag=player.clan_tag, clan_url=clan_url, clan_role=roles.get(player.clan_role.lower(), 'N/A')) em = discord.Embed(title=title, description=description, color=color, url=profile_url) em.set_thumbnail(url=player.arena_url) header = {'Trophies': player.trophy_value(bem('trophy')), player.arena_text: '{} {}'.format(player.arena_subtitle, player.arena_emoji(self.bot_emoji)), 'Rank': player.rank_str(self.bot_emoji), 'Discord': mention} for (k, v) in header.items(): em.add_field(name=k, value=v) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
def embed_profile_overview(self, player: CRPlayerModel, server=None, color=None): bem = self.bot_emoji.name member = self.model.tag2member(server, player.tag) mention = '_' if (member is not None): mention = member.mention profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) if player.clan_tag: clan_url = 'http://RoyaleAPI.com/clan/{}'.format(player.clan_tag.lstrip('#')) else: clan_url = None title = player.name roles = {'member': 'Member', 'elder': 'Elder', 'coleader': 'Co-Leader', 'leader': 'Leader', 'n/a': 'N/A'} clan_role = player.clan_role description = '[{player_tag}]({profile_url})\n**[{clan_name}]({clan_url})**\n[{clan_tag}]({clan_url})\n{clan_role}'.format(player_tag=player.tag, profile_url=profile_url, clan_name=player.clan_name, clan_tag=player.clan_tag, clan_url=clan_url, clan_role=roles.get(player.clan_role.lower(), 'N/A')) em = discord.Embed(title=title, description=description, color=color, url=profile_url) em.set_thumbnail(url=player.arena_url) header = {'Trophies': player.trophy_value(bem('trophy')), player.arena_text: '{} {}'.format(player.arena_subtitle, player.arena_emoji(self.bot_emoji)), 'Rank': player.rank_str(self.bot_emoji), 'Discord': mention} for (k, v) in header.items(): em.add_field(name=k, value=v) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em<|docstring|>Discord Embed: profile overview.<|endoftext|>
6b808a4c52af042074617ec6cba61df9b6ecc730852ef57b001f6b29aed5f2be
def embed_profile_stats(self, player: CRPlayerModel, color=None): 'Discord Embed: profile stats.' em = discord.Embed(title=' ', color=color) bem = self.bot_emoji.name def fmt(num, emoji_name): emoji = self.bot_emoji.name(emoji_name) if (emoji is not None): return '{:,} {}'.format(num, emoji) if (player.tourney_cards_per_game is None): tourney_cards_per_game = 'N/A' else: tourney_cards_per_game = '{:.3f}'.format(player.tourney_cards_per_game) stats = OrderedDict([('Ladder Wins / Losses', player.win_losses(bem('battle'))), ('Ladder Win Percentage', '{:.3%} {}'.format(player.win_ratio, bem('battle'))), ('Total Games', fmt(player.total_games, 'battle')), ('Challenge Max Wins', fmt(player.challenge_max_wins, 'tournament')), ('Challenge Cards Won', fmt(player.challenge_cards_won, 'tournament')), ('Three-Crown Wins', fmt(player.three_crown_wins, 'crownblue')), ('Tourney Cards Won', fmt(player.tourney_cards_won, 'tournament')), ('Tourney Games', fmt(player.tourney_games, 'tournament')), ('Tourney Cards/Game', '{} {}'.format(tourney_cards_per_game, bem('tournament'))), ('Cards Found', fmt(player.cards_found, 'cards')), ('Total Donations', fmt(player.total_donations, 'cards')), ('Level', fmt(player.level, 'experience')), ('Favorite Card', player.fave_card(self.bot_emoji))]) for (k, v) in stats.items(): em.add_field(name=k, value=v) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) return em
Discord Embed: profile stats.
crprofile/crprofile.py
embed_profile_stats
zodpixel/SML-Cogs
17
python
def embed_profile_stats(self, player: CRPlayerModel, color=None): em = discord.Embed(title=' ', color=color) bem = self.bot_emoji.name def fmt(num, emoji_name): emoji = self.bot_emoji.name(emoji_name) if (emoji is not None): return '{:,} {}'.format(num, emoji) if (player.tourney_cards_per_game is None): tourney_cards_per_game = 'N/A' else: tourney_cards_per_game = '{:.3f}'.format(player.tourney_cards_per_game) stats = OrderedDict([('Ladder Wins / Losses', player.win_losses(bem('battle'))), ('Ladder Win Percentage', '{:.3%} {}'.format(player.win_ratio, bem('battle'))), ('Total Games', fmt(player.total_games, 'battle')), ('Challenge Max Wins', fmt(player.challenge_max_wins, 'tournament')), ('Challenge Cards Won', fmt(player.challenge_cards_won, 'tournament')), ('Three-Crown Wins', fmt(player.three_crown_wins, 'crownblue')), ('Tourney Cards Won', fmt(player.tourney_cards_won, 'tournament')), ('Tourney Games', fmt(player.tourney_games, 'tournament')), ('Tourney Cards/Game', '{} {}'.format(tourney_cards_per_game, bem('tournament'))), ('Cards Found', fmt(player.cards_found, 'cards')), ('Total Donations', fmt(player.total_donations, 'cards')), ('Level', fmt(player.level, 'experience')), ('Favorite Card', player.fave_card(self.bot_emoji))]) for (k, v) in stats.items(): em.add_field(name=k, value=v) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) return em
def embed_profile_stats(self, player: CRPlayerModel, color=None): em = discord.Embed(title=' ', color=color) bem = self.bot_emoji.name def fmt(num, emoji_name): emoji = self.bot_emoji.name(emoji_name) if (emoji is not None): return '{:,} {}'.format(num, emoji) if (player.tourney_cards_per_game is None): tourney_cards_per_game = 'N/A' else: tourney_cards_per_game = '{:.3f}'.format(player.tourney_cards_per_game) stats = OrderedDict([('Ladder Wins / Losses', player.win_losses(bem('battle'))), ('Ladder Win Percentage', '{:.3%} {}'.format(player.win_ratio, bem('battle'))), ('Total Games', fmt(player.total_games, 'battle')), ('Challenge Max Wins', fmt(player.challenge_max_wins, 'tournament')), ('Challenge Cards Won', fmt(player.challenge_cards_won, 'tournament')), ('Three-Crown Wins', fmt(player.three_crown_wins, 'crownblue')), ('Tourney Cards Won', fmt(player.tourney_cards_won, 'tournament')), ('Tourney Games', fmt(player.tourney_games, 'tournament')), ('Tourney Cards/Game', '{} {}'.format(tourney_cards_per_game, bem('tournament'))), ('Cards Found', fmt(player.cards_found, 'cards')), ('Total Donations', fmt(player.total_donations, 'cards')), ('Level', fmt(player.level, 'experience')), ('Favorite Card', player.fave_card(self.bot_emoji))]) for (k, v) in stats.items(): em.add_field(name=k, value=v) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) return em<|docstring|>Discord Embed: profile stats.<|endoftext|>
2615bd807a442e08d8bd376f891cd2826c809f592a33ea5a12fb7e9cf5f29ec1
def embed_profile_cards(self, player: CRPlayerModel, color=None): 'Card Collection.' profile_url = 'http://RoyaleAPI.com/player/{}/cards'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=profile_url) cards = player.card_collection(self.bot_emoji) cards.sort(key=(lambda x: ((- normalized_card_level(x)), x.get('type_sort', 0), (- x.get('elixir'))))) for rarity in ['Common', 'Rare', 'Epic', 'Legendary']: value = [] for card in cards: if (card is not None): if (card['rarity'] == rarity): value.append('{}{}'.format(card['emoji'], normalized_card_level(card))) if value: em.add_field(name=rarity, value=' '.join(value), inline=False) else: em.add_field(name=rarity, value='None', inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
Card Collection.
crprofile/crprofile.py
embed_profile_cards
zodpixel/SML-Cogs
17
python
def embed_profile_cards(self, player: CRPlayerModel, color=None): profile_url = 'http://RoyaleAPI.com/player/{}/cards'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=profile_url) cards = player.card_collection(self.bot_emoji) cards.sort(key=(lambda x: ((- normalized_card_level(x)), x.get('type_sort', 0), (- x.get('elixir'))))) for rarity in ['Common', 'Rare', 'Epic', 'Legendary']: value = [] for card in cards: if (card is not None): if (card['rarity'] == rarity): value.append('{}{}'.format(card['emoji'], normalized_card_level(card))) if value: em.add_field(name=rarity, value=' '.join(value), inline=False) else: em.add_field(name=rarity, value='None', inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
def embed_profile_cards(self, player: CRPlayerModel, color=None): profile_url = 'http://RoyaleAPI.com/player/{}/cards'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=profile_url) cards = player.card_collection(self.bot_emoji) cards.sort(key=(lambda x: ((- normalized_card_level(x)), x.get('type_sort', 0), (- x.get('elixir'))))) for rarity in ['Common', 'Rare', 'Epic', 'Legendary']: value = [] for card in cards: if (card is not None): if (card['rarity'] == rarity): value.append('{}{}'.format(card['emoji'], normalized_card_level(card))) if value: em.add_field(name=rarity, value=' '.join(value), inline=False) else: em.add_field(name=rarity, value='None', inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em<|docstring|>Card Collection.<|endoftext|>
9aa58a6f633a868223efc154ea35590be42f22061eab36b1a9e1ff15568fc4de
def embed_profile_chests(self, player: CRPlayerModel, color=None): 'Upcoming chests' profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}: Chest Cycle'.format(player.name, player.tag), color=color, url=profile_url) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
Upcoming chests
crprofile/crprofile.py
embed_profile_chests
zodpixel/SML-Cogs
17
python
def embed_profile_chests(self, player: CRPlayerModel, color=None): profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}: Chest Cycle'.format(player.name, player.tag), color=color, url=profile_url) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
def embed_profile_chests(self, player: CRPlayerModel, color=None): profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}: Chest Cycle'.format(player.name, player.tag), color=color, url=profile_url) em.add_field(name='Chests', value=player.chest_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em<|docstring|>Upcoming chests<|endoftext|>
f91916684b8b2b819f61b6ed0abcc15962b03e0c9e80969dbbfc17e39c45f0e7
def embed_profile_deck(self, player: CRPlayerModel, color=None): 'Current deck.' profile_url = 'https://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) desc_copy = '[Copy deck]({url})'.format(url=player.deck_link) desc_stats = '[Deck stats]({url})'.format(url=player.deck_stats_url) desc_log = '[Battle Log](https://RoyaleAPI.com/player/{tag})'.format(tag=player.tag.lstrip('#')) desc = ' • '.join([desc_copy, desc_stats, desc_log]) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), description=desc, color=color) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
Current deck.
crprofile/crprofile.py
embed_profile_deck
zodpixel/SML-Cogs
17
python
def embed_profile_deck(self, player: CRPlayerModel, color=None): profile_url = 'https://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) desc_copy = '[Copy deck]({url})'.format(url=player.deck_link) desc_stats = '[Deck stats]({url})'.format(url=player.deck_stats_url) desc_log = '[Battle Log](https://RoyaleAPI.com/player/{tag})'.format(tag=player.tag.lstrip('#')) desc = ' • '.join([desc_copy, desc_stats, desc_log]) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), description=desc, color=color) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
def embed_profile_deck(self, player: CRPlayerModel, color=None): profile_url = 'https://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) desc_copy = '[Copy deck]({url})'.format(url=player.deck_link) desc_stats = '[Deck stats]({url})'.format(url=player.deck_stats_url) desc_log = '[Battle Log](https://RoyaleAPI.com/player/{tag})'.format(tag=player.tag.lstrip('#')) desc = ' • '.join([desc_copy, desc_stats, desc_log]) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), description=desc, color=color) em.add_field(name='Deck', value=player.deck_list(self.bot_emoji), inline=False) em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em<|docstring|>Current deck.<|endoftext|>
63511c16a84801aa03086191cdab4f7b91f350f6d05866139a50be81e516246f
def embed_profile_trade(self, player: CRPlayerModel, color=None): 'Current deck.' decklink_url = player.deck_link profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=decklink_url) trade_list = player.trade_list(self.bot_emoji) for rarity in ['Legendary', 'Epic', 'Rare', 'Common']: value = ' '.join(trade_list[rarity]) value = value.strip() if value: em.add_field(name='Trade: {}'.format(rarity), value=value, inline=False) if (len(em.fields) == 0): em.add_field(name='No tradable cards', value='N/A') em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
Current deck.
crprofile/crprofile.py
embed_profile_trade
zodpixel/SML-Cogs
17
python
def embed_profile_trade(self, player: CRPlayerModel, color=None): decklink_url = player.deck_link profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=decklink_url) trade_list = player.trade_list(self.bot_emoji) for rarity in ['Legendary', 'Epic', 'Rare', 'Common']: value = ' '.join(trade_list[rarity]) value = value.strip() if value: em.add_field(name='Trade: {}'.format(rarity), value=value, inline=False) if (len(em.fields) == 0): em.add_field(name='No tradable cards', value='N/A') em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em
def embed_profile_trade(self, player: CRPlayerModel, color=None): decklink_url = player.deck_link profile_url = 'http://RoyaleAPI.com/player/{}'.format(player.tag.lstrip('#')) em = discord.Embed(title='{} #{}'.format(player.name, player.tag), color=color, url=decklink_url) trade_list = player.trade_list(self.bot_emoji) for rarity in ['Legendary', 'Epic', 'Rare', 'Common']: value = ' '.join(trade_list[rarity]) value = value.strip() if value: em.add_field(name='Trade: {}'.format(rarity), value=value, inline=False) if (len(em.fields) == 0): em.add_field(name='No tradable cards', value='N/A') em.set_footer(text=profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') return em<|docstring|>Current deck.<|endoftext|>
4bbeccb1515bf344eea3a01b8d6a73b0d4a5fb8362260d305552474b024d7660
def embeds_profile(self, player: CRPlayerModel, server=None, sections=('overview', 'stats')): 'Return Discord Embed of player profile.' embeds = [] color = random_discord_color() if ('overview' in sections): embeds.append(self.embed_profile_overview(player, server=server, color=color)) if ('stats' in sections): embeds.append(self.embed_profile_stats(player, color=color)) if ('cards' in sections): embeds.append(self.embed_profile_cards(player, color=color)) if ('chests' in sections): embeds.append(self.embed_profile_chests(player, color=color)) if ('deck' in sections): embeds.append(self.embed_profile_deck(player, color=color)) if ('trade' in sections): embeds.append(self.embed_profile_trade(player, color=color)) return embeds
Return Discord Embed of player profile.
crprofile/crprofile.py
embeds_profile
zodpixel/SML-Cogs
17
python
def embeds_profile(self, player: CRPlayerModel, server=None, sections=('overview', 'stats')): embeds = [] color = random_discord_color() if ('overview' in sections): embeds.append(self.embed_profile_overview(player, server=server, color=color)) if ('stats' in sections): embeds.append(self.embed_profile_stats(player, color=color)) if ('cards' in sections): embeds.append(self.embed_profile_cards(player, color=color)) if ('chests' in sections): embeds.append(self.embed_profile_chests(player, color=color)) if ('deck' in sections): embeds.append(self.embed_profile_deck(player, color=color)) if ('trade' in sections): embeds.append(self.embed_profile_trade(player, color=color)) return embeds
def embeds_profile(self, player: CRPlayerModel, server=None, sections=('overview', 'stats')): embeds = [] color = random_discord_color() if ('overview' in sections): embeds.append(self.embed_profile_overview(player, server=server, color=color)) if ('stats' in sections): embeds.append(self.embed_profile_stats(player, color=color)) if ('cards' in sections): embeds.append(self.embed_profile_cards(player, color=color)) if ('chests' in sections): embeds.append(self.embed_profile_chests(player, color=color)) if ('deck' in sections): embeds.append(self.embed_profile_deck(player, color=color)) if ('trade' in sections): embeds.append(self.embed_profile_trade(player, color=color)) return embeds<|docstring|>Return Discord Embed of player profile.<|endoftext|>
a7d228ac4d320a6b1e74d8b772422c7a1f1e447355aabfc2a550000c320f797c
async def on_message(self, msg): 'Do transforms.' if (self.bot.user.name != 'R2.Dev'): (await self.transform_friendlink(msg))
Do transforms.
crprofile/crprofile.py
on_message
zodpixel/SML-Cogs
17
python
async def on_message(self, msg): if (self.bot.user.name != 'R2.Dev'): (await self.transform_friendlink(msg))
async def on_message(self, msg): if (self.bot.user.name != 'R2.Dev'): (await self.transform_friendlink(msg))<|docstring|>Do transforms.<|endoftext|>
7eeb60676f66cea81751dd93763583be0a2f5a5bf553160b4bb85fd7c3bb56b5
async def transform_friendlink(self, msg): 'Convert friend invite links to embeds.\n\n https://link.clashroyale.com/invite/friend/en?tag={tag}&token={token}&platform={platform}\n ' m = re.search('https://link.clashroyale.com/invite/friend/..\\?tag=([A-Z0-9]+)&token=([a-z0-9]+)&platform=([A-Za-z0-9]+)', msg.content) if (not m): return url = m.group(0) player_tag = m.group(1) try: p = (await self.model.player_data(player_tag)) except APIError as e: return profile = 'https://royaleapi.com/player/{}'.format(p.tag) battles = 'https://royaleapi.com/player/{}/battles'.format(p.tag) decks = 'https://royaleapi.com/player/{}/decks'.format(p.tag) links = '[Profile]({}) • [Battles]({}) • [Decks]({})'.format(profile, battles, decks) misc = 'Level {} • {} total games'.format(p.level, p.total_games) trophies = '{} / {} PB'.format(p.trophy_current, p.trophy_highest) challenge = '{} max wins / {} cards'.format(p.challenge_max_wins, p.challenge_cards_won) if (not p.not_in_clan): clan = '{}, [{}]({})'.format(p.clan_role.title(), p.clan_name, 'https://royaleapi.com/clan/{}'.format(p.clan_tag)) else: clan = 'Not in clan' author = 'Posted by {}'.format(msg.author.mention) em = discord.Embed(title='Friend Request - Clash Royale', description='**{name}** #{tag}\n{clan}\n{trophies}\n{misc}\n{challenge}\n{links}\n{author}'.format(name=p.name, tag=p.tag, links=links, misc=misc, trophies=trophies, clan=clan, challenge=challenge, author=author), url=url, color=discord.Color.blue()) em.add_field(name='Current Deck', value=p.deck_list(self.bot_emoji), inline=False) em.set_footer(text=p.profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') (await self.bot.send_message(msg.channel, embed=em)) try: (await self.bot.delete_message(msg)) except Exception as e: pass
Convert friend invite links to embeds. https://link.clashroyale.com/invite/friend/en?tag={tag}&token={token}&platform={platform}
crprofile/crprofile.py
transform_friendlink
zodpixel/SML-Cogs
17
python
async def transform_friendlink(self, msg): 'Convert friend invite links to embeds.\n\n https://link.clashroyale.com/invite/friend/en?tag={tag}&token={token}&platform={platform}\n ' m = re.search('https://link.clashroyale.com/invite/friend/..\\?tag=([A-Z0-9]+)&token=([a-z0-9]+)&platform=([A-Za-z0-9]+)', msg.content) if (not m): return url = m.group(0) player_tag = m.group(1) try: p = (await self.model.player_data(player_tag)) except APIError as e: return profile = 'https://royaleapi.com/player/{}'.format(p.tag) battles = 'https://royaleapi.com/player/{}/battles'.format(p.tag) decks = 'https://royaleapi.com/player/{}/decks'.format(p.tag) links = '[Profile]({}) • [Battles]({}) • [Decks]({})'.format(profile, battles, decks) misc = 'Level {} • {} total games'.format(p.level, p.total_games) trophies = '{} / {} PB'.format(p.trophy_current, p.trophy_highest) challenge = '{} max wins / {} cards'.format(p.challenge_max_wins, p.challenge_cards_won) if (not p.not_in_clan): clan = '{}, [{}]({})'.format(p.clan_role.title(), p.clan_name, 'https://royaleapi.com/clan/{}'.format(p.clan_tag)) else: clan = 'Not in clan' author = 'Posted by {}'.format(msg.author.mention) em = discord.Embed(title='Friend Request - Clash Royale', description='**{name}** #{tag}\n{clan}\n{trophies}\n{misc}\n{challenge}\n{links}\n{author}'.format(name=p.name, tag=p.tag, links=links, misc=misc, trophies=trophies, clan=clan, challenge=challenge, author=author), url=url, color=discord.Color.blue()) em.add_field(name='Current Deck', value=p.deck_list(self.bot_emoji), inline=False) em.set_footer(text=p.profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') (await self.bot.send_message(msg.channel, embed=em)) try: (await self.bot.delete_message(msg)) except Exception as e: pass
async def transform_friendlink(self, msg): 'Convert friend invite links to embeds.\n\n https://link.clashroyale.com/invite/friend/en?tag={tag}&token={token}&platform={platform}\n ' m = re.search('https://link.clashroyale.com/invite/friend/..\\?tag=([A-Z0-9]+)&token=([a-z0-9]+)&platform=([A-Za-z0-9]+)', msg.content) if (not m): return url = m.group(0) player_tag = m.group(1) try: p = (await self.model.player_data(player_tag)) except APIError as e: return profile = 'https://royaleapi.com/player/{}'.format(p.tag) battles = 'https://royaleapi.com/player/{}/battles'.format(p.tag) decks = 'https://royaleapi.com/player/{}/decks'.format(p.tag) links = '[Profile]({}) • [Battles]({}) • [Decks]({})'.format(profile, battles, decks) misc = 'Level {} • {} total games'.format(p.level, p.total_games) trophies = '{} / {} PB'.format(p.trophy_current, p.trophy_highest) challenge = '{} max wins / {} cards'.format(p.challenge_max_wins, p.challenge_cards_won) if (not p.not_in_clan): clan = '{}, [{}]({})'.format(p.clan_role.title(), p.clan_name, 'https://royaleapi.com/clan/{}'.format(p.clan_tag)) else: clan = 'Not in clan' author = 'Posted by {}'.format(msg.author.mention) em = discord.Embed(title='Friend Request - Clash Royale', description='**{name}** #{tag}\n{clan}\n{trophies}\n{misc}\n{challenge}\n{links}\n{author}'.format(name=p.name, tag=p.tag, links=links, misc=misc, trophies=trophies, clan=clan, challenge=challenge, author=author), url=url, color=discord.Color.blue()) em.add_field(name='Current Deck', value=p.deck_list(self.bot_emoji), inline=False) em.set_footer(text=p.profile_url, icon_url='https://smlbiobot.github.io/img/cr-api/cr-api-logo.png') (await self.bot.send_message(msg.channel, embed=em)) try: (await self.bot.delete_message(msg)) except Exception as e: pass<|docstring|>Convert friend invite links to embeds. https://link.clashroyale.com/invite/friend/en?tag={tag}&token={token}&platform={platform}<|endoftext|>
f621ba1c7430e34b3fc6e33a77ce3f35409b38f7dc6827f1a560a43b097a0a66
def _get_updated_endpoints(original_end_points): "Adds the keys 'logits' and 'probs' to the\n end points dictionary of ResNet50-v2.\n\n Args:\n original_end_points (dict): Original dictionary of end points\n\n Returns:\n dict: Dictionary of end points with the new keys.\n " end_points = dict(original_end_points) end_points['logits'] = tf.squeeze(end_points['resnet_v2_50/logits'], [1, 2]) end_points['probs'] = tf.nn.softmax(end_points['logits']) return end_points
Adds the keys 'logits' and 'probs' to the end points dictionary of ResNet50-v2. Args: original_end_points (dict): Original dictionary of end points Returns: dict: Dictionary of end points with the new keys.
shield/models/resnet_50_v2.py
_get_updated_endpoints
yfor1008/jpeg-defense
0
python
def _get_updated_endpoints(original_end_points): "Adds the keys 'logits' and 'probs' to the\n end points dictionary of ResNet50-v2.\n\n Args:\n original_end_points (dict): Original dictionary of end points\n\n Returns:\n dict: Dictionary of end points with the new keys.\n " end_points = dict(original_end_points) end_points['logits'] = tf.squeeze(end_points['resnet_v2_50/logits'], [1, 2]) end_points['probs'] = tf.nn.softmax(end_points['logits']) return end_points
def _get_updated_endpoints(original_end_points): "Adds the keys 'logits' and 'probs' to the\n end points dictionary of ResNet50-v2.\n\n Args:\n original_end_points (dict): Original dictionary of end points\n\n Returns:\n dict: Dictionary of end points with the new keys.\n " end_points = dict(original_end_points) end_points['logits'] = tf.squeeze(end_points['resnet_v2_50/logits'], [1, 2]) end_points['probs'] = tf.nn.softmax(end_points['logits']) return end_points<|docstring|>Adds the keys 'logits' and 'probs' to the end points dictionary of ResNet50-v2. Args: original_end_points (dict): Original dictionary of end points Returns: dict: Dictionary of end points with the new keys.<|endoftext|>
366c8c3b7a81da3d5dfa2eeab171251b21b42444be7eb016cb3a24384984d640
def __init__(self, x, num_classes=15, is_training=False): 'Initializes the tensorflow graph for the ResNet50-v2 model.\n\n Args:\n x (tf.Variable): The variable in the tensorflow graph\n that feeds into the model nodes.\n num_classes (int):\n Number of predicted classes for classification tasks.\n If 0 or None, the features before the logit layer are returned.\n is_training (bool): Whether batch_norm layers are in training mode.\n ' super(ResNet50v2, self).__init__() self.x = x self.num_classes = num_classes print(type(resnet_arg_scope())) with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=num_classes, is_training=is_training, reuse=None) self.end_points = _get_updated_endpoints(end_points) self.variables_to_restore = slim.get_variables_to_restore(exclude=[])
Initializes the tensorflow graph for the ResNet50-v2 model. Args: x (tf.Variable): The variable in the tensorflow graph that feeds into the model nodes. num_classes (int): Number of predicted classes for classification tasks. If 0 or None, the features before the logit layer are returned. is_training (bool): Whether batch_norm layers are in training mode.
shield/models/resnet_50_v2.py
__init__
yfor1008/jpeg-defense
0
python
def __init__(self, x, num_classes=15, is_training=False): 'Initializes the tensorflow graph for the ResNet50-v2 model.\n\n Args:\n x (tf.Variable): The variable in the tensorflow graph\n that feeds into the model nodes.\n num_classes (int):\n Number of predicted classes for classification tasks.\n If 0 or None, the features before the logit layer are returned.\n is_training (bool): Whether batch_norm layers are in training mode.\n ' super(ResNet50v2, self).__init__() self.x = x self.num_classes = num_classes print(type(resnet_arg_scope())) with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=num_classes, is_training=is_training, reuse=None) self.end_points = _get_updated_endpoints(end_points) self.variables_to_restore = slim.get_variables_to_restore(exclude=[])
def __init__(self, x, num_classes=15, is_training=False): 'Initializes the tensorflow graph for the ResNet50-v2 model.\n\n Args:\n x (tf.Variable): The variable in the tensorflow graph\n that feeds into the model nodes.\n num_classes (int):\n Number of predicted classes for classification tasks.\n If 0 or None, the features before the logit layer are returned.\n is_training (bool): Whether batch_norm layers are in training mode.\n ' super(ResNet50v2, self).__init__() self.x = x self.num_classes = num_classes print(type(resnet_arg_scope())) with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=num_classes, is_training=is_training, reuse=None) self.end_points = _get_updated_endpoints(end_points) self.variables_to_restore = slim.get_variables_to_restore(exclude=[])<|docstring|>Initializes the tensorflow graph for the ResNet50-v2 model. Args: x (tf.Variable): The variable in the tensorflow graph that feeds into the model nodes. num_classes (int): Number of predicted classes for classification tasks. If 0 or None, the features before the logit layer are returned. is_training (bool): Whether batch_norm layers are in training mode.<|endoftext|>
6056fa0e5e01d95be165096bb123bb343087cae7f8f68dd46b75d1ecb17798fc
def load_weights(self, checkpoint_path, sess=None): 'Load weights from a checkpoint file into the tensorflow graph.\n\n Args:\n checkpoint_path (str): Path to the checkpoint file.\n sess (tf.Session): The tensorflow session holding the model graph.\n ' if (sess is None): sess = tf.get_default_session() assert (sess is not None) saver = tf.train.Saver(self.variables_to_restore) saver.restore(sess, checkpoint_path)
Load weights from a checkpoint file into the tensorflow graph. Args: checkpoint_path (str): Path to the checkpoint file. sess (tf.Session): The tensorflow session holding the model graph.
shield/models/resnet_50_v2.py
load_weights
yfor1008/jpeg-defense
0
python
def load_weights(self, checkpoint_path, sess=None): 'Load weights from a checkpoint file into the tensorflow graph.\n\n Args:\n checkpoint_path (str): Path to the checkpoint file.\n sess (tf.Session): The tensorflow session holding the model graph.\n ' if (sess is None): sess = tf.get_default_session() assert (sess is not None) saver = tf.train.Saver(self.variables_to_restore) saver.restore(sess, checkpoint_path)
def load_weights(self, checkpoint_path, sess=None): 'Load weights from a checkpoint file into the tensorflow graph.\n\n Args:\n checkpoint_path (str): Path to the checkpoint file.\n sess (tf.Session): The tensorflow session holding the model graph.\n ' if (sess is None): sess = tf.get_default_session() assert (sess is not None) saver = tf.train.Saver(self.variables_to_restore) saver.restore(sess, checkpoint_path)<|docstring|>Load weights from a checkpoint file into the tensorflow graph. Args: checkpoint_path (str): Path to the checkpoint file. sess (tf.Session): The tensorflow session holding the model graph.<|endoftext|>
902a93f2b2c564abcffe48695341a297121a25b140f9e92b4f973407ac341c68
def get_params(self): "Lists the model's parameters.\n\n Returns:\n list: A list of the model's parameters.\n " return None
Lists the model's parameters. Returns: list: A list of the model's parameters.
shield/models/resnet_50_v2.py
get_params
yfor1008/jpeg-defense
0
python
def get_params(self): "Lists the model's parameters.\n\n Returns:\n list: A list of the model's parameters.\n " return None
def get_params(self): "Lists the model's parameters.\n\n Returns:\n list: A list of the model's parameters.\n " return None<|docstring|>Lists the model's parameters. Returns: list: A list of the model's parameters.<|endoftext|>
7e60feacda828fe18c636f74784a8f15227c6611ebc46735283b1704a6f3408f
def fprop(self, x): 'Exposes all the layers of the model.\n\n Args:\n x (tf.Variable): Tensor which is input to the model.\n\n Returns:\n dict: A dictionary mapping layer names to the corresponding\n node in the tensorflow graph.\n ' if (x is self.x): return self.end_points else: with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=self.num_classes, is_training=False, reuse=tf.AUTO_REUSE) return _get_updated_endpoints(end_points)
Exposes all the layers of the model. Args: x (tf.Variable): Tensor which is input to the model. Returns: dict: A dictionary mapping layer names to the corresponding node in the tensorflow graph.
shield/models/resnet_50_v2.py
fprop
yfor1008/jpeg-defense
0
python
def fprop(self, x): 'Exposes all the layers of the model.\n\n Args:\n x (tf.Variable): Tensor which is input to the model.\n\n Returns:\n dict: A dictionary mapping layer names to the corresponding\n node in the tensorflow graph.\n ' if (x is self.x): return self.end_points else: with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=self.num_classes, is_training=False, reuse=tf.AUTO_REUSE) return _get_updated_endpoints(end_points)
def fprop(self, x): 'Exposes all the layers of the model.\n\n Args:\n x (tf.Variable): Tensor which is input to the model.\n\n Returns:\n dict: A dictionary mapping layer names to the corresponding\n node in the tensorflow graph.\n ' if (x is self.x): return self.end_points else: with slim.arg_scope(resnet_arg_scope()): (net, end_points) = resnet_v2_50(x, num_classes=self.num_classes, is_training=False, reuse=tf.AUTO_REUSE) return _get_updated_endpoints(end_points)<|docstring|>Exposes all the layers of the model. Args: x (tf.Variable): Tensor which is input to the model. Returns: dict: A dictionary mapping layer names to the corresponding node in the tensorflow graph.<|endoftext|>
3000f1d88d18c6360fff732db5491fc9d37fa74789e57c659e3dde92e23f36bb
def finalize_options(self): 'Abstract method that is required to be overwritten'
Abstract method that is required to be overwritten
setup.py
finalize_options
mpavlase/nginx-config-builder
149
python
def finalize_options(self):
def finalize_options(self): <|docstring|>Abstract method that is required to be overwritten<|endoftext|>
22d06d819c11bd70f87fba9923c4366111478aa18202381e277b79529f6abaf7
def getHint(self, secret, guess): '\n :type secret: str\n :type guess: str\n :rtype: str\n ' cnt = defaultdict(int) A = 0 B = 0 for c in secret: cnt[c] += 1 for (i, v) in enumerate(guess): if (v == secret[i]): A += 1 cnt[v] -= 1 if (cnt[v] < 0): assert (cnt[v] == (- 1)) B -= 1 cnt[v] = 0 elif (cnt[v] > 0): B += 1 cnt[v] -= 1 return ('%dA%dB' % (A, B))
:type secret: str :type guess: str :rtype: str
299 Bulls and Cows.py
getHint
ChiFire/legend_LeetCode
1
python
def getHint(self, secret, guess): '\n :type secret: str\n :type guess: str\n :rtype: str\n ' cnt = defaultdict(int) A = 0 B = 0 for c in secret: cnt[c] += 1 for (i, v) in enumerate(guess): if (v == secret[i]): A += 1 cnt[v] -= 1 if (cnt[v] < 0): assert (cnt[v] == (- 1)) B -= 1 cnt[v] = 0 elif (cnt[v] > 0): B += 1 cnt[v] -= 1 return ('%dA%dB' % (A, B))
def getHint(self, secret, guess): '\n :type secret: str\n :type guess: str\n :rtype: str\n ' cnt = defaultdict(int) A = 0 B = 0 for c in secret: cnt[c] += 1 for (i, v) in enumerate(guess): if (v == secret[i]): A += 1 cnt[v] -= 1 if (cnt[v] < 0): assert (cnt[v] == (- 1)) B -= 1 cnt[v] = 0 elif (cnt[v] > 0): B += 1 cnt[v] -= 1 return ('%dA%dB' % (A, B))<|docstring|>:type secret: str :type guess: str :rtype: str<|endoftext|>
e9d100e4ca0766691773a21748317edff1e72f272b73c70493106a731b7545f8
def test_anbieter_mapping(self): '\n Test that all mapping point to correct fields\n ' for field in set(Anbieter.FIELD_NAME_MAPPING.values()): if (field is None): continue else: Anbieter._meta.get_field(field)
Test that all mapping point to correct fields
anbieter/tests.py
test_anbieter_mapping
CarliJoy/RoWoOekostromDB
0
python
def test_anbieter_mapping(self): '\n \n ' for field in set(Anbieter.FIELD_NAME_MAPPING.values()): if (field is None): continue else: Anbieter._meta.get_field(field)
def test_anbieter_mapping(self): '\n \n ' for field in set(Anbieter.FIELD_NAME_MAPPING.values()): if (field is None): continue else: Anbieter._meta.get_field(field)<|docstring|>Test that all mapping point to correct fields<|endoftext|>
89bef0cb915d5b31719d8fe8042a66e020419c481c64a06c1cb88e51606573cd
def test_homepage_kriterium_mapping(self): '\n Test that all mapping point to correct fields\n ' for field in set(HomepageKriterium.FIELD_NAME_MAPPING.values()): HomepageKriterium._meta.get_field(field)
Test that all mapping point to correct fields
anbieter/tests.py
test_homepage_kriterium_mapping
CarliJoy/RoWoOekostromDB
0
python
def test_homepage_kriterium_mapping(self): '\n \n ' for field in set(HomepageKriterium.FIELD_NAME_MAPPING.values()): HomepageKriterium._meta.get_field(field)
def test_homepage_kriterium_mapping(self): '\n \n ' for field in set(HomepageKriterium.FIELD_NAME_MAPPING.values()): HomepageKriterium._meta.get_field(field)<|docstring|>Test that all mapping point to correct fields<|endoftext|>
0fdf527f6c7b9ba1342f660a94d40e9931a31ab34ead6dde51c5e91eac198841
def np_arr_to_poly(np_arr): ' Using numpy 2d array ([0]-h, [1]-w) to construct polygon.\n\n Parameters\n -------\n np_arr : np.array\n contour with standard numpy 2d array format\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' np_arr = swap_wh(np_arr) point_list = np_arr_to_point_list(np_arr) poly = Polygon(point_list) return poly
Using numpy 2d array ([0]-h, [1]-w) to construct polygon. Parameters ------- np_arr : np.array contour with standard numpy 2d array format Returns ------- poly : Polygon contour with shapely polygon format
pycontour/poly_transform.py
np_arr_to_poly
PingjunChen/pycontour
8
python
def np_arr_to_poly(np_arr): ' Using numpy 2d array ([0]-h, [1]-w) to construct polygon.\n\n Parameters\n -------\n np_arr : np.array\n contour with standard numpy 2d array format\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' np_arr = swap_wh(np_arr) point_list = np_arr_to_point_list(np_arr) poly = Polygon(point_list) return poly
def np_arr_to_poly(np_arr): ' Using numpy 2d array ([0]-h, [1]-w) to construct polygon.\n\n Parameters\n -------\n np_arr : np.array\n contour with standard numpy 2d array format\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' np_arr = swap_wh(np_arr) point_list = np_arr_to_point_list(np_arr) poly = Polygon(point_list) return poly<|docstring|>Using numpy 2d array ([0]-h, [1]-w) to construct polygon. Parameters ------- np_arr : np.array contour with standard numpy 2d array format Returns ------- poly : Polygon contour with shapely polygon format<|endoftext|>
63c70674d96ecdbab3a394e90310ec7b05b7b869b9bfed54aedafe055975310a
def point_list_to_poly(point_list): ' Using point list to construct polygon.\n\n Parameters\n -------\n point_list : list\n list of point set ([0]-h, [1]-w)\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' wh_point_list = [] for ind in np.arange(len(point_list)): wh_point_list.append((point_list[ind][1], point_list[ind][0])) poly = Polygon(wh_point_list) return poly
Using point list to construct polygon. Parameters ------- point_list : list list of point set ([0]-h, [1]-w) Returns ------- poly : Polygon contour with shapely polygon format
pycontour/poly_transform.py
point_list_to_poly
PingjunChen/pycontour
8
python
def point_list_to_poly(point_list): ' Using point list to construct polygon.\n\n Parameters\n -------\n point_list : list\n list of point set ([0]-h, [1]-w)\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' wh_point_list = [] for ind in np.arange(len(point_list)): wh_point_list.append((point_list[ind][1], point_list[ind][0])) poly = Polygon(wh_point_list) return poly
def point_list_to_poly(point_list): ' Using point list to construct polygon.\n\n Parameters\n -------\n point_list : list\n list of point set ([0]-h, [1]-w)\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' wh_point_list = [] for ind in np.arange(len(point_list)): wh_point_list.append((point_list[ind][1], point_list[ind][0])) poly = Polygon(wh_point_list) return poly<|docstring|>Using point list to construct polygon. Parameters ------- point_list : list list of point set ([0]-h, [1]-w) Returns ------- poly : Polygon contour with shapely polygon format<|endoftext|>
33d9cb29b3dad2f45841ce9563deb3a7d285499f027b81801afcf3cf3ecf6247
def bbox_to_poly(min_h, min_w, max_h, max_w): ' Using bounding box to construct polygon.\n\n Parameters\n -------\n min_h : int\n minimum y coordinate of polygon\n min_w : int\n minimum x coordinate of polygon\n max_h : int\n maximum y coordinate of polygon\n max_w : int\n maximum x coordinate of polygon\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' poly = box(min_w, min_h, max_w, max_h) return poly
Using bounding box to construct polygon. Parameters ------- min_h : int minimum y coordinate of polygon min_w : int minimum x coordinate of polygon max_h : int maximum y coordinate of polygon max_w : int maximum x coordinate of polygon Returns ------- poly : Polygon contour with shapely polygon format
pycontour/poly_transform.py
bbox_to_poly
PingjunChen/pycontour
8
python
def bbox_to_poly(min_h, min_w, max_h, max_w): ' Using bounding box to construct polygon.\n\n Parameters\n -------\n min_h : int\n minimum y coordinate of polygon\n min_w : int\n minimum x coordinate of polygon\n max_h : int\n maximum y coordinate of polygon\n max_w : int\n maximum x coordinate of polygon\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' poly = box(min_w, min_h, max_w, max_h) return poly
def bbox_to_poly(min_h, min_w, max_h, max_w): ' Using bounding box to construct polygon.\n\n Parameters\n -------\n min_h : int\n minimum y coordinate of polygon\n min_w : int\n minimum x coordinate of polygon\n max_h : int\n maximum y coordinate of polygon\n max_w : int\n maximum x coordinate of polygon\n\n Returns\n -------\n poly : Polygon\n contour with shapely polygon format\n\n ' poly = box(min_w, min_h, max_w, max_h) return poly<|docstring|>Using bounding box to construct polygon. Parameters ------- min_h : int minimum y coordinate of polygon min_w : int minimum x coordinate of polygon max_h : int maximum y coordinate of polygon max_w : int maximum x coordinate of polygon Returns ------- poly : Polygon contour with shapely polygon format<|endoftext|>
4b63b3d6162334c541330e756fcfb39d3efd36b1fa65d57d77e804f356fc84a3
def poly_to_np_arr(poly): ' Convert shapely Polygon to numpy 2d array ([0]-h, [1]-w).\n\n Parameters\n -------\n poly : Polygon\n contour with shapely polygon format\n\n Returns\n -------\n cnt_arr : np.array\n contour with standard numpy 2d array format\n\n ' (x_coors, y_coors) = poly.exterior.coords.xy x_coors = x_coors[:(- 1)].tolist() y_coors = y_coors[:(- 1)].tolist() point_list = [(y, x) for (x, y) in zip(x_coors, y_coors)] cnt_arr = point_list_to_np_arr(point_list) return cnt_arr
Convert shapely Polygon to numpy 2d array ([0]-h, [1]-w). Parameters ------- poly : Polygon contour with shapely polygon format Returns ------- cnt_arr : np.array contour with standard numpy 2d array format
pycontour/poly_transform.py
poly_to_np_arr
PingjunChen/pycontour
8
python
def poly_to_np_arr(poly): ' Convert shapely Polygon to numpy 2d array ([0]-h, [1]-w).\n\n Parameters\n -------\n poly : Polygon\n contour with shapely polygon format\n\n Returns\n -------\n cnt_arr : np.array\n contour with standard numpy 2d array format\n\n ' (x_coors, y_coors) = poly.exterior.coords.xy x_coors = x_coors[:(- 1)].tolist() y_coors = y_coors[:(- 1)].tolist() point_list = [(y, x) for (x, y) in zip(x_coors, y_coors)] cnt_arr = point_list_to_np_arr(point_list) return cnt_arr
def poly_to_np_arr(poly): ' Convert shapely Polygon to numpy 2d array ([0]-h, [1]-w).\n\n Parameters\n -------\n poly : Polygon\n contour with shapely polygon format\n\n Returns\n -------\n cnt_arr : np.array\n contour with standard numpy 2d array format\n\n ' (x_coors, y_coors) = poly.exterior.coords.xy x_coors = x_coors[:(- 1)].tolist() y_coors = y_coors[:(- 1)].tolist() point_list = [(y, x) for (x, y) in zip(x_coors, y_coors)] cnt_arr = point_list_to_np_arr(point_list) return cnt_arr<|docstring|>Convert shapely Polygon to numpy 2d array ([0]-h, [1]-w). Parameters ------- poly : Polygon contour with shapely polygon format Returns ------- cnt_arr : np.array contour with standard numpy 2d array format<|endoftext|>
bb9bc7de05f6be28311bc5f0dd01a4ac108a94b2e158f2448954b54b79bd6b3b
def reshape(a, recshape=None): 'Convert a nested, non-string iterable into a flat generator and its shape.\n Raggedly shaped data will be processed recursively by calling recshape.\n If recshape is None, then ragged data wil raise ShapeError.\n To leave ragged data unshaped, pass recshape=unshape.\n ' if (isinstance(a, Shaped) and (not isinstance(a, View))): return (a.data, a.shape) elif (isinstance(a, Iterable) and (not isinstance(a, str))): data = [] shape = False ragged = False shapes = None for elt in a: (subtensor, subshape) = reshape(elt, recshape=recshape) data.append(subtensor) if (shape is False): shape = subshape elif ragged: shapes.append(subshape) elif (shape != subshape): if (recshape is None): raise ShapeError(f'array has ragged shape: expecting {shape!r}, got {subshape!r}') ragged = True shapes = ([shape] * (len(data) - 1)) shapes.append(subshape) if ragged: return (((recshape(subtensor, subshape) if subshape else subtensor) for (subtensor, subshape) in zip(data, shapes)), (len(data),)) elif shape: return ((elt for subtensor in data for elt in subtensor), (len(data), *shape)) else: return (data, (len(data),)) else: return (a, ())
Convert a nested, non-string iterable into a flat generator and its shape. Raggedly shaped data will be processed recursively by calling recshape. If recshape is None, then ragged data wil raise ShapeError. To leave ragged data unshaped, pass recshape=unshape.
titanfp/titanic/ndarray.py
reshape
billzorn/fpunreal
4
python
def reshape(a, recshape=None): 'Convert a nested, non-string iterable into a flat generator and its shape.\n Raggedly shaped data will be processed recursively by calling recshape.\n If recshape is None, then ragged data wil raise ShapeError.\n To leave ragged data unshaped, pass recshape=unshape.\n ' if (isinstance(a, Shaped) and (not isinstance(a, View))): return (a.data, a.shape) elif (isinstance(a, Iterable) and (not isinstance(a, str))): data = [] shape = False ragged = False shapes = None for elt in a: (subtensor, subshape) = reshape(elt, recshape=recshape) data.append(subtensor) if (shape is False): shape = subshape elif ragged: shapes.append(subshape) elif (shape != subshape): if (recshape is None): raise ShapeError(f'array has ragged shape: expecting {shape!r}, got {subshape!r}') ragged = True shapes = ([shape] * (len(data) - 1)) shapes.append(subshape) if ragged: return (((recshape(subtensor, subshape) if subshape else subtensor) for (subtensor, subshape) in zip(data, shapes)), (len(data),)) elif shape: return ((elt for subtensor in data for elt in subtensor), (len(data), *shape)) else: return (data, (len(data),)) else: return (a, ())
def reshape(a, recshape=None): 'Convert a nested, non-string iterable into a flat generator and its shape.\n Raggedly shaped data will be processed recursively by calling recshape.\n If recshape is None, then ragged data wil raise ShapeError.\n To leave ragged data unshaped, pass recshape=unshape.\n ' if (isinstance(a, Shaped) and (not isinstance(a, View))): return (a.data, a.shape) elif (isinstance(a, Iterable) and (not isinstance(a, str))): data = [] shape = False ragged = False shapes = None for elt in a: (subtensor, subshape) = reshape(elt, recshape=recshape) data.append(subtensor) if (shape is False): shape = subshape elif ragged: shapes.append(subshape) elif (shape != subshape): if (recshape is None): raise ShapeError(f'array has ragged shape: expecting {shape!r}, got {subshape!r}') ragged = True shapes = ([shape] * (len(data) - 1)) shapes.append(subshape) if ragged: return (((recshape(subtensor, subshape) if subshape else subtensor) for (subtensor, subshape) in zip(data, shapes)), (len(data),)) elif shape: return ((elt for subtensor in data for elt in subtensor), (len(data), *shape)) else: return (data, (len(data),)) else: return (a, ())<|docstring|>Convert a nested, non-string iterable into a flat generator and its shape. Raggedly shaped data will be processed recursively by calling recshape. If recshape is None, then ragged data wil raise ShapeError. To leave ragged data unshaped, pass recshape=unshape.<|endoftext|>
1e76c5e350fc96dd064a1bb4a23e45451df67666d104a81444ddbb95a5272332
def unshape_tuple(data, shape): 'Expand a flat iterable and its shape into a nested tuple.\n ' a = data for dim in reversed(shape[1:]): a = zip(*([iter(a)] * dim)) return tuple(a)
Expand a flat iterable and its shape into a nested tuple.
titanfp/titanic/ndarray.py
unshape_tuple
billzorn/fpunreal
4
python
def unshape_tuple(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = zip(*([iter(a)] * dim)) return tuple(a)
def unshape_tuple(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = zip(*([iter(a)] * dim)) return tuple(a)<|docstring|>Expand a flat iterable and its shape into a nested tuple.<|endoftext|>
dde06d46b03b19604d824d010d787a2d5640b0da6c4759c1aac6825b3b621797
def unshape_list(data, shape): 'Expand a flat list and its shape into a nested list.\n ' a = data for dim in reversed(shape[1:]): a = [a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))] return a
Expand a flat list and its shape into a nested list.
titanfp/titanic/ndarray.py
unshape_list
billzorn/fpunreal
4
python
def unshape_list(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = [a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))] return a
def unshape_list(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = [a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))] return a<|docstring|>Expand a flat list and its shape into a nested list.<|endoftext|>
978ba36717f50178de977e87b75fb243d3be6aee795806ddcff6bd3181d6ca9c
def unshape_gen(data, shape): 'Expand a flat list and its shape into a nested generator.\n ' a = data for dim in reversed(shape[1:]): a = (a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))) return a
Expand a flat list and its shape into a nested generator.
titanfp/titanic/ndarray.py
unshape_gen
billzorn/fpunreal
4
python
def unshape_gen(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = (a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))) return a
def unshape_gen(data, shape): '\n ' a = data for dim in reversed(shape[1:]): a = (a[(chunk * dim):((chunk + 1) * dim)] for chunk in range((len(a) // dim))) return a<|docstring|>Expand a flat list and its shape into a nested generator.<|endoftext|>
a327ba1f1f3e87d84a764dc061728171f3aed8ca83a8c08a9830691da4a84eba
def describe(a, descr=repr, sep=', ', lparen='(', rparen=')'): 'Convert a shaped or unshaped iterable into a one-line string,\n using the provided printing method and separators.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): return ''.join([lparen, sep.join((describe(elt, descr=descr, sep=sep, lparen=lparen, rparen=rparen) for elt in a)), rparen]) else: return descr(a)
Convert a shaped or unshaped iterable into a one-line string, using the provided printing method and separators.
titanfp/titanic/ndarray.py
describe
billzorn/fpunreal
4
python
def describe(a, descr=repr, sep=', ', lparen='(', rparen=')'): 'Convert a shaped or unshaped iterable into a one-line string,\n using the provided printing method and separators.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): return .join([lparen, sep.join((describe(elt, descr=descr, sep=sep, lparen=lparen, rparen=rparen) for elt in a)), rparen]) else: return descr(a)
def describe(a, descr=repr, sep=', ', lparen='(', rparen=')'): 'Convert a shaped or unshaped iterable into a one-line string,\n using the provided printing method and separators.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): return .join([lparen, sep.join((describe(elt, descr=descr, sep=sep, lparen=lparen, rparen=rparen) for elt in a)), rparen]) else: return descr(a)<|docstring|>Convert a shaped or unshaped iterable into a one-line string, using the provided printing method and separators.<|endoftext|>
16ec861dc4a0c96367d3b44ad1ed28d809cd7b7a3c4860f644edbd2348e56a0a
def describe_nd(a, descr=repr, dimsep=dimsep_array, lparen='(', rparen=')', depth=0): 'Convert a shaped or unshaped iterable into a string and a count of dimensions,\n using the provided printing method and separators.\n dimsep is a function that computes the separator given a logical depth and height\n from the top and bottom of the data structure, and the parentheses.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): if a: (rows, heights) = zip(*(describe_nd(elt, descr=descr, dimsep=dimsep, lparen=lparen, rparen=rparen, depth=(depth + 1)) for elt in a)) height = (max(heights) + 1) sep = dimsep(depth, height, lparen, rparen) return (''.join([lparen, sep.join(rows), rparen]), height) else: return ((lparen + rparen), 1) else: return (descr(a), 0)
Convert a shaped or unshaped iterable into a string and a count of dimensions, using the provided printing method and separators. dimsep is a function that computes the separator given a logical depth and height from the top and bottom of the data structure, and the parentheses.
titanfp/titanic/ndarray.py
describe_nd
billzorn/fpunreal
4
python
def describe_nd(a, descr=repr, dimsep=dimsep_array, lparen='(', rparen=')', depth=0): 'Convert a shaped or unshaped iterable into a string and a count of dimensions,\n using the provided printing method and separators.\n dimsep is a function that computes the separator given a logical depth and height\n from the top and bottom of the data structure, and the parentheses.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): if a: (rows, heights) = zip(*(describe_nd(elt, descr=descr, dimsep=dimsep, lparen=lparen, rparen=rparen, depth=(depth + 1)) for elt in a)) height = (max(heights) + 1) sep = dimsep(depth, height, lparen, rparen) return (.join([lparen, sep.join(rows), rparen]), height) else: return ((lparen + rparen), 1) else: return (descr(a), 0)
def describe_nd(a, descr=repr, dimsep=dimsep_array, lparen='(', rparen=')', depth=0): 'Convert a shaped or unshaped iterable into a string and a count of dimensions,\n using the provided printing method and separators.\n dimsep is a function that computes the separator given a logical depth and height\n from the top and bottom of the data structure, and the parentheses.\n ' if (isinstance(a, Iterable) and (not isinstance(a, str))): if a: (rows, heights) = zip(*(describe_nd(elt, descr=descr, dimsep=dimsep, lparen=lparen, rparen=rparen, depth=(depth + 1)) for elt in a)) height = (max(heights) + 1) sep = dimsep(depth, height, lparen, rparen) return (.join([lparen, sep.join(rows), rparen]), height) else: return ((lparen + rparen), 1) else: return (descr(a), 0)<|docstring|>Convert a shaped or unshaped iterable into a string and a count of dimensions, using the provided printing method and separators. dimsep is a function that computes the separator given a logical depth and height from the top and bottom of the data structure, and the parentheses.<|endoftext|>
b711967069254a36e269af2ac96d111c8e0e3b0591b603401fa7328eb1e22e20
def locate(shape, pos): 'Given a shape and a position vector, return the index of that position in the flat array.\n ' idx = 0 scale = 1 for (dim, coord) in zip(reversed(shape), reversed(pos)): idx += (coord * scale) scale *= dim return idx
Given a shape and a position vector, return the index of that position in the flat array.
titanfp/titanic/ndarray.py
locate
billzorn/fpunreal
4
python
def locate(shape, pos): '\n ' idx = 0 scale = 1 for (dim, coord) in zip(reversed(shape), reversed(pos)): idx += (coord * scale) scale *= dim return idx
def locate(shape, pos): '\n ' idx = 0 scale = 1 for (dim, coord) in zip(reversed(shape), reversed(pos)): idx += (coord * scale) scale *= dim return idx<|docstring|>Given a shape and a position vector, return the index of that position in the flat array.<|endoftext|>
7f060b6f522785774e35cd6a883950cd93448eacfe8bc4eca5080ad9cd72ea55
def position(shape, idx): 'Given a shape and a flat index, return the corresponding position vector.\n ' quot = idx pos = [] for dim in reversed(shape): (quot, rem) = divmod(quot, dim) pos.append(rem) return tuple(reversed(pos))
Given a shape and a flat index, return the corresponding position vector.
titanfp/titanic/ndarray.py
position
billzorn/fpunreal
4
python
def position(shape, idx): '\n ' quot = idx pos = [] for dim in reversed(shape): (quot, rem) = divmod(quot, dim) pos.append(rem) return tuple(reversed(pos))
def position(shape, idx): '\n ' quot = idx pos = [] for dim in reversed(shape): (quot, rem) = divmod(quot, dim) pos.append(rem) return tuple(reversed(pos))<|docstring|>Given a shape and a flat index, return the corresponding position vector.<|endoftext|>
569b0acbcc486ba568b1b9d987ab4c2d39bc113c3f20c021cc282468b4f3b76e
def check_bounds(shape, pos): 'Given a shape, check if a position vector is in bounds for that shape.\n Raises IndexError if the position is out of bounds.\n ' for (dim, coord) in zip(shape, pos): if ((coord < 0) or (dim <= coord)): raise IndexError(f'{pos!r} out of range for shape {shape!r}')
Given a shape, check if a position vector is in bounds for that shape. Raises IndexError if the position is out of bounds.
titanfp/titanic/ndarray.py
check_bounds
billzorn/fpunreal
4
python
def check_bounds(shape, pos): 'Given a shape, check if a position vector is in bounds for that shape.\n Raises IndexError if the position is out of bounds.\n ' for (dim, coord) in zip(shape, pos): if ((coord < 0) or (dim <= coord)): raise IndexError(f'{pos!r} out of range for shape {shape!r}')
def check_bounds(shape, pos): 'Given a shape, check if a position vector is in bounds for that shape.\n Raises IndexError if the position is out of bounds.\n ' for (dim, coord) in zip(shape, pos): if ((coord < 0) or (dim <= coord)): raise IndexError(f'{pos!r} out of range for shape {shape!r}')<|docstring|>Given a shape, check if a position vector is in bounds for that shape. Raises IndexError if the position is out of bounds.<|endoftext|>
7a9589bf9970c99b7df2179a92ec0082f0dba5d45ef5792b8600fcbe212fa479
def calc_size(shape): 'Compute the size of a shape (the len of the backing flat array).\n ' if shape: scale = 1 for dim in shape: scale *= dim return scale else: return 0
Compute the size of a shape (the len of the backing flat array).
titanfp/titanic/ndarray.py
calc_size
billzorn/fpunreal
4
python
def calc_size(shape): '\n ' if shape: scale = 1 for dim in shape: scale *= dim return scale else: return 0
def calc_size(shape): '\n ' if shape: scale = 1 for dim in shape: scale *= dim return scale else: return 0<|docstring|>Compute the size of a shape (the len of the backing flat array).<|endoftext|>
3f9c2878f8db67a835460edb6bb0eafdaf380a4015fdb904a811febd67cebc49
def check_size(data, shape): 'Given a shape and a flat sequence, check if the sequence has the expected length.\n Raises ShapeError if the length is wrong.\n ' if shape: scale = 1 for dim in shape: scale *= dim else: scale = 0 if (len(data) != scale): raise ShapeError(f'shape {shape!r} should have total size {scale!s}, got {len(data)!s}')
Given a shape and a flat sequence, check if the sequence has the expected length. Raises ShapeError if the length is wrong.
titanfp/titanic/ndarray.py
check_size
billzorn/fpunreal
4
python
def check_size(data, shape): 'Given a shape and a flat sequence, check if the sequence has the expected length.\n Raises ShapeError if the length is wrong.\n ' if shape: scale = 1 for dim in shape: scale *= dim else: scale = 0 if (len(data) != scale): raise ShapeError(f'shape {shape!r} should have total size {scale!s}, got {len(data)!s}')
def check_size(data, shape): 'Given a shape and a flat sequence, check if the sequence has the expected length.\n Raises ShapeError if the length is wrong.\n ' if shape: scale = 1 for dim in shape: scale *= dim else: scale = 0 if (len(data) != scale): raise ShapeError(f'shape {shape!r} should have total size {scale!s}, got {len(data)!s}')<|docstring|>Given a shape and a flat sequence, check if the sequence has the expected length. Raises ShapeError if the length is wrong.<|endoftext|>
bf52dc4f02b85a272299c58ce5f70f8f961532c60454187bf53123b7c477d24f
def calc_strides(shape): 'Calculate stride values for a shape.\n Returns the computed strides, and the overall size of the shape.\n ' if shape: scale = 1 strides = [] for dim in reversed(shape): strides.append(scale) scale *= dim return (tuple(reversed(strides)), scale) else: return ((), 0)
Calculate stride values for a shape. Returns the computed strides, and the overall size of the shape.
titanfp/titanic/ndarray.py
calc_strides
billzorn/fpunreal
4
python
def calc_strides(shape): 'Calculate stride values for a shape.\n Returns the computed strides, and the overall size of the shape.\n ' if shape: scale = 1 strides = [] for dim in reversed(shape): strides.append(scale) scale *= dim return (tuple(reversed(strides)), scale) else: return ((), 0)
def calc_strides(shape): 'Calculate stride values for a shape.\n Returns the computed strides, and the overall size of the shape.\n ' if shape: scale = 1 strides = [] for dim in reversed(shape): strides.append(scale) scale *= dim return (tuple(reversed(strides)), scale) else: return ((), 0)<|docstring|>Calculate stride values for a shape. Returns the computed strides, and the overall size of the shape.<|endoftext|>
a61234325afe805fce013a5a8b379c33fe5a48db64feca185f16d9b7b2cfe8e1
def calc_offset(shape, strides, lookup): 'Given a shape with strides and a lookup, calculate a start offset and new strides.\n Returns the start offset, the new shape, the new strides, and the rest of the lookup.\n ' new_shape = [] new_strides = [] start = 0 fused = 0 for (dim, stride, query) in zip(shape, strides, lookup): if isinstance(query, int): if (query < 0): query = (dim - query) if ((query < 0) or (dim <= query)): raise IndexError(f'index {query!r} out of range for dimension {fused!s} of shape {shape!r}') start += (stride * query) elif isinstance(query, slice): raise NotImplementedError('slicing currently not supported') (q_start, q_stop, q_stride) = query.indices(dim) extent = (q_stop - q_start) new_shape.append(max(0, (extent // q_stride))) new_strides.append((stride * q_stride)) start += (q_start * stride) else: raise TypeError(f'index for dimension {fused!s} must be integer or slice, got {query!r}') fused += 1 return (start, (*new_shape, *shape[fused:]), (*new_strides, *strides[fused:]), tuple(lookup[fused:]))
Given a shape with strides and a lookup, calculate a start offset and new strides. Returns the start offset, the new shape, the new strides, and the rest of the lookup.
titanfp/titanic/ndarray.py
calc_offset
billzorn/fpunreal
4
python
def calc_offset(shape, strides, lookup): 'Given a shape with strides and a lookup, calculate a start offset and new strides.\n Returns the start offset, the new shape, the new strides, and the rest of the lookup.\n ' new_shape = [] new_strides = [] start = 0 fused = 0 for (dim, stride, query) in zip(shape, strides, lookup): if isinstance(query, int): if (query < 0): query = (dim - query) if ((query < 0) or (dim <= query)): raise IndexError(f'index {query!r} out of range for dimension {fused!s} of shape {shape!r}') start += (stride * query) elif isinstance(query, slice): raise NotImplementedError('slicing currently not supported') (q_start, q_stop, q_stride) = query.indices(dim) extent = (q_stop - q_start) new_shape.append(max(0, (extent // q_stride))) new_strides.append((stride * q_stride)) start += (q_start * stride) else: raise TypeError(f'index for dimension {fused!s} must be integer or slice, got {query!r}') fused += 1 return (start, (*new_shape, *shape[fused:]), (*new_strides, *strides[fused:]), tuple(lookup[fused:]))
def calc_offset(shape, strides, lookup): 'Given a shape with strides and a lookup, calculate a start offset and new strides.\n Returns the start offset, the new shape, the new strides, and the rest of the lookup.\n ' new_shape = [] new_strides = [] start = 0 fused = 0 for (dim, stride, query) in zip(shape, strides, lookup): if isinstance(query, int): if (query < 0): query = (dim - query) if ((query < 0) or (dim <= query)): raise IndexError(f'index {query!r} out of range for dimension {fused!s} of shape {shape!r}') start += (stride * query) elif isinstance(query, slice): raise NotImplementedError('slicing currently not supported') (q_start, q_stop, q_stride) = query.indices(dim) extent = (q_stop - q_start) new_shape.append(max(0, (extent // q_stride))) new_strides.append((stride * q_stride)) start += (q_start * stride) else: raise TypeError(f'index for dimension {fused!s} must be integer or slice, got {query!r}') fused += 1 return (start, (*new_shape, *shape[fused:]), (*new_strides, *strides[fused:]), tuple(lookup[fused:]))<|docstring|>Given a shape with strides and a lookup, calculate a start offset and new strides. Returns the start offset, the new shape, the new strides, and the rest of the lookup.<|endoftext|>
9611c8697ea2d4f35185dd0c1a0e3580d7c7a231859cd0142d47d1f815c4be1c
def check_offset(data, shape, start, strides): 'Check if a shape with a start offset and given strides is in bounds for some backing list.\n Raises ShapeError if the shape does not fit within the data.\n ' min_offset = 0 max_offset = 0 for (dim, stride) in zip(shape, strides): offset = (max(0, (dim - 1)) * stride) if (offset < 0): min_offset += offset else: max_offset += offset if (((start + min_offset) < 0) or (len(data) <= (start + max_offset))): raise ShapeError(f'shape {shape!r} with strides {strides!r} extends from {(start + min_offset)!s} to {(start + max_offset)!s}, out of bounds for data with length {len(data)!s}')
Check if a shape with a start offset and given strides is in bounds for some backing list. Raises ShapeError if the shape does not fit within the data.
titanfp/titanic/ndarray.py
check_offset
billzorn/fpunreal
4
python
def check_offset(data, shape, start, strides): 'Check if a shape with a start offset and given strides is in bounds for some backing list.\n Raises ShapeError if the shape does not fit within the data.\n ' min_offset = 0 max_offset = 0 for (dim, stride) in zip(shape, strides): offset = (max(0, (dim - 1)) * stride) if (offset < 0): min_offset += offset else: max_offset += offset if (((start + min_offset) < 0) or (len(data) <= (start + max_offset))): raise ShapeError(f'shape {shape!r} with strides {strides!r} extends from {(start + min_offset)!s} to {(start + max_offset)!s}, out of bounds for data with length {len(data)!s}')
def check_offset(data, shape, start, strides): 'Check if a shape with a start offset and given strides is in bounds for some backing list.\n Raises ShapeError if the shape does not fit within the data.\n ' min_offset = 0 max_offset = 0 for (dim, stride) in zip(shape, strides): offset = (max(0, (dim - 1)) * stride) if (offset < 0): min_offset += offset else: max_offset += offset if (((start + min_offset) < 0) or (len(data) <= (start + max_offset))): raise ShapeError(f'shape {shape!r} with strides {strides!r} extends from {(start + min_offset)!s} to {(start + max_offset)!s}, out of bounds for data with length {len(data)!s}')<|docstring|>Check if a shape with a start offset and given strides is in bounds for some backing list. Raises ShapeError if the shape does not fit within the data.<|endoftext|>
c5c06fdf353370c38c0b5d92a14073625142b2e8ed240064b1b97c84dbe6f1df
def _mk_view(cls): "Create a new view type from an existing n-dimensional sequence type.\n\n Due to the way inheritance works with assigning to __class__\n it is necessary that the derived view type inherit directly from the base sequence type\n or we won't be able to reify due to differing object layout.\n\n The only way to implement this inheritance pattern\n without duplicating the full implementation of the view type\n is to create each view type dynamically.\n " class NewView(cls, View): 'An offset view of an n-dimensional sequence.' @property def data(self): self.reify() return self._data @property def shape(self): self.reify() return self._shape @property def size(self): self.reify() return self._size @property def strides(self): self.reify() return self._strides @property def start(self): return self._start def reify(self): cls = self.real_type (data_gen, shape) = reshape(self, recshape=cls) self._data = self.backing_type(data_gen) self._shape = shape (self._strides, self._size) = calc_strides(self._shape) del self._start self.__class__ = cls def __init__(self, data, shape, start=0, strides=None): self._data = data self._shape = shape self._start = start if strides: self._strides = strides self._size = calc_size(self._shape) else: (self._strides, self._size) = calc_strides(self._shape) check_offset(self._data, self._shape, self._start, self._strides) _data_size_abs_threshold = 64 _data_size_rel_threshold = 0.5 def __repr__(self): dlen = len(self._data) if ((dlen <= self._data_size_abs_threshold) or ((self._size / dlen) >= _data_size_rel_threshold)): dstr = repr(self._data) else: dstr = f"'{type(self._data).__name__}' object of length {dlen!s}" return f'{type(self).__name__}({dstr}, {self._shape!r}, start={self._start!s}, strides={self._strides!r})' NewView.__name__ = (cls.__name__ + 'View') return NewView
Create a new view type from an existing n-dimensional sequence type. Due to the way inheritance works with assigning to __class__ it is necessary that the derived view type inherit directly from the base sequence type or we won't be able to reify due to differing object layout. The only way to implement this inheritance pattern without duplicating the full implementation of the view type is to create each view type dynamically.
titanfp/titanic/ndarray.py
_mk_view
billzorn/fpunreal
4
python
def _mk_view(cls): "Create a new view type from an existing n-dimensional sequence type.\n\n Due to the way inheritance works with assigning to __class__\n it is necessary that the derived view type inherit directly from the base sequence type\n or we won't be able to reify due to differing object layout.\n\n The only way to implement this inheritance pattern\n without duplicating the full implementation of the view type\n is to create each view type dynamically.\n " class NewView(cls, View): 'An offset view of an n-dimensional sequence.' @property def data(self): self.reify() return self._data @property def shape(self): self.reify() return self._shape @property def size(self): self.reify() return self._size @property def strides(self): self.reify() return self._strides @property def start(self): return self._start def reify(self): cls = self.real_type (data_gen, shape) = reshape(self, recshape=cls) self._data = self.backing_type(data_gen) self._shape = shape (self._strides, self._size) = calc_strides(self._shape) del self._start self.__class__ = cls def __init__(self, data, shape, start=0, strides=None): self._data = data self._shape = shape self._start = start if strides: self._strides = strides self._size = calc_size(self._shape) else: (self._strides, self._size) = calc_strides(self._shape) check_offset(self._data, self._shape, self._start, self._strides) _data_size_abs_threshold = 64 _data_size_rel_threshold = 0.5 def __repr__(self): dlen = len(self._data) if ((dlen <= self._data_size_abs_threshold) or ((self._size / dlen) >= _data_size_rel_threshold)): dstr = repr(self._data) else: dstr = f"'{type(self._data).__name__}' object of length {dlen!s}" return f'{type(self).__name__}({dstr}, {self._shape!r}, start={self._start!s}, strides={self._strides!r})' NewView.__name__ = (cls.__name__ + 'View') return NewView
def _mk_view(cls): "Create a new view type from an existing n-dimensional sequence type.\n\n Due to the way inheritance works with assigning to __class__\n it is necessary that the derived view type inherit directly from the base sequence type\n or we won't be able to reify due to differing object layout.\n\n The only way to implement this inheritance pattern\n without duplicating the full implementation of the view type\n is to create each view type dynamically.\n " class NewView(cls, View): 'An offset view of an n-dimensional sequence.' @property def data(self): self.reify() return self._data @property def shape(self): self.reify() return self._shape @property def size(self): self.reify() return self._size @property def strides(self): self.reify() return self._strides @property def start(self): return self._start def reify(self): cls = self.real_type (data_gen, shape) = reshape(self, recshape=cls) self._data = self.backing_type(data_gen) self._shape = shape (self._strides, self._size) = calc_strides(self._shape) del self._start self.__class__ = cls def __init__(self, data, shape, start=0, strides=None): self._data = data self._shape = shape self._start = start if strides: self._strides = strides self._size = calc_size(self._shape) else: (self._strides, self._size) = calc_strides(self._shape) check_offset(self._data, self._shape, self._start, self._strides) _data_size_abs_threshold = 64 _data_size_rel_threshold = 0.5 def __repr__(self): dlen = len(self._data) if ((dlen <= self._data_size_abs_threshold) or ((self._size / dlen) >= _data_size_rel_threshold)): dstr = repr(self._data) else: dstr = f"'{type(self._data).__name__}' object of length {dlen!s}" return f'{type(self).__name__}({dstr}, {self._shape!r}, start={self._start!s}, strides={self._strides!r})' NewView.__name__ = (cls.__name__ + 'View') return NewView<|docstring|>Create a new view type from an existing n-dimensional sequence type. Due to the way inheritance works with assigning to __class__ it is necessary that the derived view type inherit directly from the base sequence type or we won't be able to reify due to differing object layout. The only way to implement this inheritance pattern without duplicating the full implementation of the view type is to create each view type dynamically.<|endoftext|>
7ddf55fb2fb9ae22f49e46e736a54f7764b7636e45260500b2666d50800b0963
def forward(self, input_ids, attention_mask): '\n\t\tInputs:\n\t\t\t-input_ids : Tensor of shape [B, T] containing token ids of sequences\n\t\t\t-attention_mask : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens\n\t\t\t(where B is the batch size and T is the input length)\n\t\t' (reps, _) = self.bert(input_ids=input_ids, attention_mask=attention_mask) cls_reps = reps[(:, 0)] logits = self.cls_layer(cls_reps) return logits
Inputs: -input_ids : Tensor of shape [B, T] containing token ids of sequences -attention_mask : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens (where B is the batch size and T is the input length)
language/models.py
forward
MadryLab/DebuggableDeepNetworks
32
python
def forward(self, input_ids, attention_mask): '\n\t\tInputs:\n\t\t\t-input_ids : Tensor of shape [B, T] containing token ids of sequences\n\t\t\t-attention_mask : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens\n\t\t\t(where B is the batch size and T is the input length)\n\t\t' (reps, _) = self.bert(input_ids=input_ids, attention_mask=attention_mask) cls_reps = reps[(:, 0)] logits = self.cls_layer(cls_reps) return logits
def forward(self, input_ids, attention_mask): '\n\t\tInputs:\n\t\t\t-input_ids : Tensor of shape [B, T] containing token ids of sequences\n\t\t\t-attention_mask : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens\n\t\t\t(where B is the batch size and T is the input length)\n\t\t' (reps, _) = self.bert(input_ids=input_ids, attention_mask=attention_mask) cls_reps = reps[(:, 0)] logits = self.cls_layer(cls_reps) return logits<|docstring|>Inputs: -input_ids : Tensor of shape [B, T] containing token ids of sequences -attention_mask : Tensor of shape [B, T] containing attention masks to be used to avoid contibution of PAD tokens (where B is the batch size and T is the input length)<|endoftext|>
c13d0da99f854fbc3612926c761c086e9678a58aa41a46502dc525cd17d6308f
def memoized_parse_block(code): 'Memoized version of parse_block.' try: result = parse_block_memo[code] except KeyError: try: parsed = COMPILER.parse_block(code) except Exception as err: result = err else: result = parsed parse_block_memo[code] = result if isinstance(result, Exception): raise result else: return result
Memoized version of parse_block.
coconut/icoconut/root.py
memoized_parse_block
CS121Fresh/runner
0
python
def memoized_parse_block(code): try: result = parse_block_memo[code] except KeyError: try: parsed = COMPILER.parse_block(code) except Exception as err: result = err else: result = parsed parse_block_memo[code] = result if isinstance(result, Exception): raise result else: return result
def memoized_parse_block(code): try: result = parse_block_memo[code] except KeyError: try: parsed = COMPILER.parse_block(code) except Exception as err: result = err else: result = parsed parse_block_memo[code] = result if isinstance(result, Exception): raise result else: return result<|docstring|>Memoized version of parse_block.<|endoftext|>
a33334254ba87823bedf4ba1057dc0a111e0be6434295bb9eb5768e192447918
def memoized_parse_sys(code): 'Memoized version of parse_sys.' return COMPILER.header_proc(memoized_parse_block(code), header='sys', initial='none')
Memoized version of parse_sys.
coconut/icoconut/root.py
memoized_parse_sys
CS121Fresh/runner
0
python
def memoized_parse_sys(code): return COMPILER.header_proc(memoized_parse_block(code), header='sys', initial='none')
def memoized_parse_sys(code): return COMPILER.header_proc(memoized_parse_block(code), header='sys', initial='none')<|docstring|>Memoized version of parse_sys.<|endoftext|>
635ebafe3bb7654d5161aa82bc527ec30a8f8af4acdf370fe2d9cfd5d59da34e
def ast_parse(self, source, *args, **kwargs): 'Version of ast_parse that compiles Coconut code first.' try: compiled = memoized_parse_sys(source) except CoconutException as err: raise err.syntax_err() else: return super(CoconutCompiler, self).ast_parse(compiled, *args, **kwargs)
Version of ast_parse that compiles Coconut code first.
coconut/icoconut/root.py
ast_parse
CS121Fresh/runner
0
python
def ast_parse(self, source, *args, **kwargs): try: compiled = memoized_parse_sys(source) except CoconutException as err: raise err.syntax_err() else: return super(CoconutCompiler, self).ast_parse(compiled, *args, **kwargs)
def ast_parse(self, source, *args, **kwargs): try: compiled = memoized_parse_sys(source) except CoconutException as err: raise err.syntax_err() else: return super(CoconutCompiler, self).ast_parse(compiled, *args, **kwargs)<|docstring|>Version of ast_parse that compiles Coconut code first.<|endoftext|>
f6394d331642d6000f480ff0dc1d89d8c9c6906f7125cb94e28f6f81b3254f19
def cache(self, code, *args, **kwargs): 'Version of cache that compiles Coconut code first.' try: compiled = memoized_parse_sys(code) except CoconutException: traceback.print_exc() return None else: return super(CoconutCompiler, self).cache(compiled, *args, **kwargs)
Version of cache that compiles Coconut code first.
coconut/icoconut/root.py
cache
CS121Fresh/runner
0
python
def cache(self, code, *args, **kwargs): try: compiled = memoized_parse_sys(code) except CoconutException: traceback.print_exc() return None else: return super(CoconutCompiler, self).cache(compiled, *args, **kwargs)
def cache(self, code, *args, **kwargs): try: compiled = memoized_parse_sys(code) except CoconutException: traceback.print_exc() return None else: return super(CoconutCompiler, self).cache(compiled, *args, **kwargs)<|docstring|>Version of cache that compiles Coconut code first.<|endoftext|>
9b3df0f3d55d643327cc78f030f7493e62130a909eaf1d2f93532f96c1e944e7
def __init__(self, *args, **kwargs): 'Version of __init__ that sets up Coconut code compilation.' super(CoconutSplitter, self).__init__(*args, **kwargs) self._compile = self._coconut_compile
Version of __init__ that sets up Coconut code compilation.
coconut/icoconut/root.py
__init__
CS121Fresh/runner
0
python
def __init__(self, *args, **kwargs): super(CoconutSplitter, self).__init__(*args, **kwargs) self._compile = self._coconut_compile
def __init__(self, *args, **kwargs): super(CoconutSplitter, self).__init__(*args, **kwargs) self._compile = self._coconut_compile<|docstring|>Version of __init__ that sets up Coconut code compilation.<|endoftext|>
1aae8dc460325507b4d05e801c5dfec61755b35e8747c9676dd5c146db8316cb
def _coconut_compile(self, source, *args, **kwargs): 'Version of _compile that checks Coconut code.\n None means that the code should not be run as is.\n Any other value means that it can.' if source.endswith('\n\n'): return True elif should_indent(source): return None else: return True
Version of _compile that checks Coconut code. None means that the code should not be run as is. Any other value means that it can.
coconut/icoconut/root.py
_coconut_compile
CS121Fresh/runner
0
python
def _coconut_compile(self, source, *args, **kwargs): 'Version of _compile that checks Coconut code.\n None means that the code should not be run as is.\n Any other value means that it can.' if source.endswith('\n\n'): return True elif should_indent(source): return None else: return True
def _coconut_compile(self, source, *args, **kwargs): 'Version of _compile that checks Coconut code.\n None means that the code should not be run as is.\n Any other value means that it can.' if source.endswith('\n\n'): return True elif should_indent(source): return None else: return True<|docstring|>Version of _compile that checks Coconut code. None means that the code should not be run as is. Any other value means that it can.<|endoftext|>
e8ea3996ab99a37aa6214678ed998f803482de2751e5a79c19be75f7da1d9ba6
def init_instance_attrs(self): 'Version of init_instance_attrs that uses CoconutCompiler.' super(CoconutShell, self).init_instance_attrs() self.compile = CoconutCompiler()
Version of init_instance_attrs that uses CoconutCompiler.
coconut/icoconut/root.py
init_instance_attrs
CS121Fresh/runner
0
python
def init_instance_attrs(self): super(CoconutShell, self).init_instance_attrs() self.compile = CoconutCompiler()
def init_instance_attrs(self): super(CoconutShell, self).init_instance_attrs() self.compile = CoconutCompiler()<|docstring|>Version of init_instance_attrs that uses CoconutCompiler.<|endoftext|>
5a37a0170132e7deabb7d8e165acd6e0e1e785b8165197bd6e27d47f134339d3
def init_create_namespaces(self, *args, **kwargs): 'Version of init_create_namespaces that adds Coconut built-ins to globals.' super(CoconutShell, self).init_create_namespaces(*args, **kwargs) RUNNER.update_vars(self.user_global_ns)
Version of init_create_namespaces that adds Coconut built-ins to globals.
coconut/icoconut/root.py
init_create_namespaces
CS121Fresh/runner
0
python
def init_create_namespaces(self, *args, **kwargs): super(CoconutShell, self).init_create_namespaces(*args, **kwargs) RUNNER.update_vars(self.user_global_ns)
def init_create_namespaces(self, *args, **kwargs): super(CoconutShell, self).init_create_namespaces(*args, **kwargs) RUNNER.update_vars(self.user_global_ns)<|docstring|>Version of init_create_namespaces that adds Coconut built-ins to globals.<|endoftext|>
0ffea3c0220e52118ecaeca5c68c34c827c2e7f7a67da7cd01616ecdf321493b
def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=None): 'Version of run_cell that always uses shell_futures.' return super(CoconutShell, self).run_cell(raw_cell, store_history, silent, shell_futures=True)
Version of run_cell that always uses shell_futures.
coconut/icoconut/root.py
run_cell
CS121Fresh/runner
0
python
def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=None): return super(CoconutShell, self).run_cell(raw_cell, store_history, silent, shell_futures=True)
def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=None): return super(CoconutShell, self).run_cell(raw_cell, store_history, silent, shell_futures=True)<|docstring|>Version of run_cell that always uses shell_futures.<|endoftext|>
50b6c461784b75457aff90a953924b397292eaa4c698bf9595ef6ca7d3e88ca9
def user_expressions(self, expressions): 'Version of user_expressions that compiles Coconut code first.' compiled_expressions = {} for (key, expr) in expressions.items(): try: compiled_expressions[key] = COMPILER.parse_eval(expr) except CoconutException: compiled_expressions[key] = expr return super(CoconutShell, self).user_expressions(compiled_expressions)
Version of user_expressions that compiles Coconut code first.
coconut/icoconut/root.py
user_expressions
CS121Fresh/runner
0
python
def user_expressions(self, expressions): compiled_expressions = {} for (key, expr) in expressions.items(): try: compiled_expressions[key] = COMPILER.parse_eval(expr) except CoconutException: compiled_expressions[key] = expr return super(CoconutShell, self).user_expressions(compiled_expressions)
def user_expressions(self, expressions): compiled_expressions = {} for (key, expr) in expressions.items(): try: compiled_expressions[key] = COMPILER.parse_eval(expr) except CoconutException: compiled_expressions[key] = expr return super(CoconutShell, self).user_expressions(compiled_expressions)<|docstring|>Version of user_expressions that compiles Coconut code first.<|endoftext|>
fba6cdc8881867e39db7354c2174658c645fac328154160e3b35e90c925739b6
def test_purge_old_personal_api_key_events_rejects_invalid_arguments(self): 'The purge_old_personal_api_key_events command should reject invalid arguments' event = PersonApiKeyEventFactory(time=(datetime.datetime.now() - datetime.timedelta(days=30))) with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '-15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15.3') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15', '15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', 'abc', '15') self.assertCountEqual(PersonApiKeyEvent.objects.all(), [event])
The purge_old_personal_api_key_events command should reject invalid arguments
ietf/person/management/commands/tests.py
test_purge_old_personal_api_key_events_rejects_invalid_arguments
Spectre17/datatracker
25
python
def test_purge_old_personal_api_key_events_rejects_invalid_arguments(self): event = PersonApiKeyEventFactory(time=(datetime.datetime.now() - datetime.timedelta(days=30))) with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '-15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15.3') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15', '15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', 'abc', '15') self.assertCountEqual(PersonApiKeyEvent.objects.all(), [event])
def test_purge_old_personal_api_key_events_rejects_invalid_arguments(self): event = PersonApiKeyEventFactory(time=(datetime.datetime.now() - datetime.timedelta(days=30))) with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '-15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15.3') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', '15', '15') with self.assertRaises(CommandError): self._call_command('purge_old_personal_api_key_events', 'abc', '15') self.assertCountEqual(PersonApiKeyEvent.objects.all(), [event])<|docstring|>The purge_old_personal_api_key_events command should reject invalid arguments<|endoftext|>
d9495b538d491cc13f8bfc95daec225606cb90643f5e73427285d954c9265177
def push_monitor(model, name, transfer_experience=False, save_records=False): '\n When you load a model in a yaml file and you want to store its\n old monitor under a different name and start a new monitor, wrap\n the model in this function call.\n\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n The model you loaded\n name : str\n Will save the old monitor to model.name\n transfer_experience : bool\n If True, the new monitor will start with its epochs seen,\n batches seen, and examples seen set to where the old monitor\n left off. This is nice for stitching together learning curves\n across multiple stages of learning.\n save_records : bool\n If True, val_record, batch_record, example_record, epoch_record,\n and time_record of the new monitor will be initialzed with the\n records of old monitor.\n\n Returns\n -------\n model : WRITEME\n Returns the model itself so you can use an !obj:push_monitor\n call as the definition of a model in a YAML file.\n ' assert hasattr(model, 'monitor') old_monitor = model.monitor setattr(model, name, old_monitor) del model.monitor if transfer_experience: monitor = Monitor.get_monitor(model) assert (monitor is not old_monitor) monitor._num_batches_seen = old_monitor._num_batches_seen monitor._examples_seen = old_monitor._examples_seen monitor._epochs_seen = old_monitor._epochs_seen if save_records: monitor.on_channel_conflict = 'copy_history' monitor.channels = copy.copy(old_monitor.channels) for (key, value) in list(monitor.channels.items()): value.prereqs = None return model
When you load a model in a yaml file and you want to store its old monitor under a different name and start a new monitor, wrap the model in this function call. Parameters ---------- model : pylearn2.models.model.Model The model you loaded name : str Will save the old monitor to model.name transfer_experience : bool If True, the new monitor will start with its epochs seen, batches seen, and examples seen set to where the old monitor left off. This is nice for stitching together learning curves across multiple stages of learning. save_records : bool If True, val_record, batch_record, example_record, epoch_record, and time_record of the new monitor will be initialzed with the records of old monitor. Returns ------- model : WRITEME Returns the model itself so you can use an !obj:push_monitor call as the definition of a model in a YAML file.
pylearn2/monitor.py
push_monitor
fxyu/pylearn2
2,045
python
def push_monitor(model, name, transfer_experience=False, save_records=False): '\n When you load a model in a yaml file and you want to store its\n old monitor under a different name and start a new monitor, wrap\n the model in this function call.\n\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n The model you loaded\n name : str\n Will save the old monitor to model.name\n transfer_experience : bool\n If True, the new monitor will start with its epochs seen,\n batches seen, and examples seen set to where the old monitor\n left off. This is nice for stitching together learning curves\n across multiple stages of learning.\n save_records : bool\n If True, val_record, batch_record, example_record, epoch_record,\n and time_record of the new monitor will be initialzed with the\n records of old monitor.\n\n Returns\n -------\n model : WRITEME\n Returns the model itself so you can use an !obj:push_monitor\n call as the definition of a model in a YAML file.\n ' assert hasattr(model, 'monitor') old_monitor = model.monitor setattr(model, name, old_monitor) del model.monitor if transfer_experience: monitor = Monitor.get_monitor(model) assert (monitor is not old_monitor) monitor._num_batches_seen = old_monitor._num_batches_seen monitor._examples_seen = old_monitor._examples_seen monitor._epochs_seen = old_monitor._epochs_seen if save_records: monitor.on_channel_conflict = 'copy_history' monitor.channels = copy.copy(old_monitor.channels) for (key, value) in list(monitor.channels.items()): value.prereqs = None return model
def push_monitor(model, name, transfer_experience=False, save_records=False): '\n When you load a model in a yaml file and you want to store its\n old monitor under a different name and start a new monitor, wrap\n the model in this function call.\n\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n The model you loaded\n name : str\n Will save the old monitor to model.name\n transfer_experience : bool\n If True, the new monitor will start with its epochs seen,\n batches seen, and examples seen set to where the old monitor\n left off. This is nice for stitching together learning curves\n across multiple stages of learning.\n save_records : bool\n If True, val_record, batch_record, example_record, epoch_record,\n and time_record of the new monitor will be initialzed with the\n records of old monitor.\n\n Returns\n -------\n model : WRITEME\n Returns the model itself so you can use an !obj:push_monitor\n call as the definition of a model in a YAML file.\n ' assert hasattr(model, 'monitor') old_monitor = model.monitor setattr(model, name, old_monitor) del model.monitor if transfer_experience: monitor = Monitor.get_monitor(model) assert (monitor is not old_monitor) monitor._num_batches_seen = old_monitor._num_batches_seen monitor._examples_seen = old_monitor._examples_seen monitor._epochs_seen = old_monitor._epochs_seen if save_records: monitor.on_channel_conflict = 'copy_history' monitor.channels = copy.copy(old_monitor.channels) for (key, value) in list(monitor.channels.items()): value.prereqs = None return model<|docstring|>When you load a model in a yaml file and you want to store its old monitor under a different name and start a new monitor, wrap the model in this function call. Parameters ---------- model : pylearn2.models.model.Model The model you loaded name : str Will save the old monitor to model.name transfer_experience : bool If True, the new monitor will start with its epochs seen, batches seen, and examples seen set to where the old monitor left off. This is nice for stitching together learning curves across multiple stages of learning. save_records : bool If True, val_record, batch_record, example_record, epoch_record, and time_record of the new monitor will be initialzed with the records of old monitor. Returns ------- model : WRITEME Returns the model itself so you can use an !obj:push_monitor call as the definition of a model in a YAML file.<|endoftext|>
43600a31e18dc27fed663bc7f1de5eb152522f8bca165bcc159bd7bcada3f03c
def read_channel(model, channel_name, monitor_name='monitor'): '\n Returns the last value recorded in a channel.\n\n Parameters\n ----------\n model : Model\n The model to read the channel from\n channel_name : str\n The name of the channel to read from\n monitor_name : str, optional\n The name of the Monitor to read from\n (In case you want to read from an old Monitor moved by\n `push_monitor`)\n\n Returns\n -------\n value : float\n The last value recorded in this monitoring channel\n ' return getattr(model, monitor_name).channels[channel_name].val_record[(- 1)]
Returns the last value recorded in a channel. Parameters ---------- model : Model The model to read the channel from channel_name : str The name of the channel to read from monitor_name : str, optional The name of the Monitor to read from (In case you want to read from an old Monitor moved by `push_monitor`) Returns ------- value : float The last value recorded in this monitoring channel
pylearn2/monitor.py
read_channel
fxyu/pylearn2
2,045
python
def read_channel(model, channel_name, monitor_name='monitor'): '\n Returns the last value recorded in a channel.\n\n Parameters\n ----------\n model : Model\n The model to read the channel from\n channel_name : str\n The name of the channel to read from\n monitor_name : str, optional\n The name of the Monitor to read from\n (In case you want to read from an old Monitor moved by\n `push_monitor`)\n\n Returns\n -------\n value : float\n The last value recorded in this monitoring channel\n ' return getattr(model, monitor_name).channels[channel_name].val_record[(- 1)]
def read_channel(model, channel_name, monitor_name='monitor'): '\n Returns the last value recorded in a channel.\n\n Parameters\n ----------\n model : Model\n The model to read the channel from\n channel_name : str\n The name of the channel to read from\n monitor_name : str, optional\n The name of the Monitor to read from\n (In case you want to read from an old Monitor moved by\n `push_monitor`)\n\n Returns\n -------\n value : float\n The last value recorded in this monitoring channel\n ' return getattr(model, monitor_name).channels[channel_name].val_record[(- 1)]<|docstring|>Returns the last value recorded in a channel. Parameters ---------- model : Model The model to read the channel from channel_name : str The name of the channel to read from monitor_name : str, optional The name of the Monitor to read from (In case you want to read from an old Monitor moved by `push_monitor`) Returns ------- value : float The last value recorded in this monitoring channel<|endoftext|>
851c2766bd9c79f35ea4853dd2377b2cd7b34b6a763cc60763a46300104fbbc6
def get_channel(model, dataset, channel, cost, batch_size): "\n Make a temporary monitor and return the value of a channel in it.\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n Will evaluate the channel for this Model.\n dataset : pylearn2.datasets.Dataset\n The Dataset to run on\n channel : str\n A string identifying the channel name to evaluate\n cost : pylearn2.costs.Cost\n The Cost to setup for monitoring\n batch_size : int\n The size of the batch to use when running the monitor\n\n Returns\n -------\n value : WRITEME\n The value of the requested channel.\n\n Notes\n -----\n This doesn't modify the model (unless some of the channel prereqs do).\n In particular, it does not change model.monitor.\n " monitor = Monitor(model) monitor.setup(dataset=dataset, cost=cost, batch_size=batch_size) monitor() channels = monitor.channels channel = channels[channel] val_record = channel.val_record (value,) = val_record return value
Make a temporary monitor and return the value of a channel in it. Parameters ---------- model : pylearn2.models.model.Model Will evaluate the channel for this Model. dataset : pylearn2.datasets.Dataset The Dataset to run on channel : str A string identifying the channel name to evaluate cost : pylearn2.costs.Cost The Cost to setup for monitoring batch_size : int The size of the batch to use when running the monitor Returns ------- value : WRITEME The value of the requested channel. Notes ----- This doesn't modify the model (unless some of the channel prereqs do). In particular, it does not change model.monitor.
pylearn2/monitor.py
get_channel
fxyu/pylearn2
2,045
python
def get_channel(model, dataset, channel, cost, batch_size): "\n Make a temporary monitor and return the value of a channel in it.\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n Will evaluate the channel for this Model.\n dataset : pylearn2.datasets.Dataset\n The Dataset to run on\n channel : str\n A string identifying the channel name to evaluate\n cost : pylearn2.costs.Cost\n The Cost to setup for monitoring\n batch_size : int\n The size of the batch to use when running the monitor\n\n Returns\n -------\n value : WRITEME\n The value of the requested channel.\n\n Notes\n -----\n This doesn't modify the model (unless some of the channel prereqs do).\n In particular, it does not change model.monitor.\n " monitor = Monitor(model) monitor.setup(dataset=dataset, cost=cost, batch_size=batch_size) monitor() channels = monitor.channels channel = channels[channel] val_record = channel.val_record (value,) = val_record return value
def get_channel(model, dataset, channel, cost, batch_size): "\n Make a temporary monitor and return the value of a channel in it.\n\n Parameters\n ----------\n model : pylearn2.models.model.Model\n Will evaluate the channel for this Model.\n dataset : pylearn2.datasets.Dataset\n The Dataset to run on\n channel : str\n A string identifying the channel name to evaluate\n cost : pylearn2.costs.Cost\n The Cost to setup for monitoring\n batch_size : int\n The size of the batch to use when running the monitor\n\n Returns\n -------\n value : WRITEME\n The value of the requested channel.\n\n Notes\n -----\n This doesn't modify the model (unless some of the channel prereqs do).\n In particular, it does not change model.monitor.\n " monitor = Monitor(model) monitor.setup(dataset=dataset, cost=cost, batch_size=batch_size) monitor() channels = monitor.channels channel = channels[channel] val_record = channel.val_record (value,) = val_record return value<|docstring|>Make a temporary monitor and return the value of a channel in it. Parameters ---------- model : pylearn2.models.model.Model Will evaluate the channel for this Model. dataset : pylearn2.datasets.Dataset The Dataset to run on channel : str A string identifying the channel name to evaluate cost : pylearn2.costs.Cost The Cost to setup for monitoring batch_size : int The size of the batch to use when running the monitor Returns ------- value : WRITEME The value of the requested channel. Notes ----- This doesn't modify the model (unless some of the channel prereqs do). In particular, it does not change model.monitor.<|endoftext|>
bccbeabe18d6f6ee001b319bcec8c081b137fa402f593befed215d442b80e3cc
def get_monitor_doc(var): '\n Returns the __doc__ field of var or None. This field is used on\n theano Variables to document the meaning of monitor channels.\n\n Parameters\n ----------\n var : theano.gof.Variable\n The variable to get the documentation of\n\n Returns\n -------\n doc : str or None\n var.__doc__ if var has an instance-level doc, otherwise None\n ' doc = None if (var.__doc__ is not var.__class__.__doc__): doc = var.__doc__ return doc
Returns the __doc__ field of var or None. This field is used on theano Variables to document the meaning of monitor channels. Parameters ---------- var : theano.gof.Variable The variable to get the documentation of Returns ------- doc : str or None var.__doc__ if var has an instance-level doc, otherwise None
pylearn2/monitor.py
get_monitor_doc
fxyu/pylearn2
2,045
python
def get_monitor_doc(var): '\n Returns the __doc__ field of var or None. This field is used on\n theano Variables to document the meaning of monitor channels.\n\n Parameters\n ----------\n var : theano.gof.Variable\n The variable to get the documentation of\n\n Returns\n -------\n doc : str or None\n var.__doc__ if var has an instance-level doc, otherwise None\n ' doc = None if (var.__doc__ is not var.__class__.__doc__): doc = var.__doc__ return doc
def get_monitor_doc(var): '\n Returns the __doc__ field of var or None. This field is used on\n theano Variables to document the meaning of monitor channels.\n\n Parameters\n ----------\n var : theano.gof.Variable\n The variable to get the documentation of\n\n Returns\n -------\n doc : str or None\n var.__doc__ if var has an instance-level doc, otherwise None\n ' doc = None if (var.__doc__ is not var.__class__.__doc__): doc = var.__doc__ return doc<|docstring|>Returns the __doc__ field of var or None. This field is used on theano Variables to document the meaning of monitor channels. Parameters ---------- var : theano.gof.Variable The variable to get the documentation of Returns ------- doc : str or None var.__doc__ if var has an instance-level doc, otherwise None<|endoftext|>
0f50bc99d8fda887284d17e123df0b8646c0fbb239d40ed413a2ad2cb0e8b106
def _build_data_specs(self): '\n Computes a nested data_specs for input and all channels\n\n Also computes the mapping to flatten it. This function is\n called from redo_theano.\n ' (m_space, m_source) = self.model.get_monitoring_data_specs() input_spaces = [m_space] input_sources = [m_source] for channel in self.channels.values(): space = channel.data_specs[0] assert isinstance(space, Space) input_spaces.append(space) input_sources.append(channel.data_specs[1]) nested_space = CompositeSpace(input_spaces) nested_source = tuple(input_sources) self._nested_data_specs = (nested_space, nested_source) self._data_specs_mapping = DataSpecsMapping(self._nested_data_specs) flat_space = self._data_specs_mapping.flatten(nested_space, return_tuple=True) flat_source = self._data_specs_mapping.flatten(nested_source, return_tuple=True) self._flat_data_specs = (CompositeSpace(flat_space), flat_source)
Computes a nested data_specs for input and all channels Also computes the mapping to flatten it. This function is called from redo_theano.
pylearn2/monitor.py
_build_data_specs
fxyu/pylearn2
2,045
python
def _build_data_specs(self): '\n Computes a nested data_specs for input and all channels\n\n Also computes the mapping to flatten it. This function is\n called from redo_theano.\n ' (m_space, m_source) = self.model.get_monitoring_data_specs() input_spaces = [m_space] input_sources = [m_source] for channel in self.channels.values(): space = channel.data_specs[0] assert isinstance(space, Space) input_spaces.append(space) input_sources.append(channel.data_specs[1]) nested_space = CompositeSpace(input_spaces) nested_source = tuple(input_sources) self._nested_data_specs = (nested_space, nested_source) self._data_specs_mapping = DataSpecsMapping(self._nested_data_specs) flat_space = self._data_specs_mapping.flatten(nested_space, return_tuple=True) flat_source = self._data_specs_mapping.flatten(nested_source, return_tuple=True) self._flat_data_specs = (CompositeSpace(flat_space), flat_source)
def _build_data_specs(self): '\n Computes a nested data_specs for input and all channels\n\n Also computes the mapping to flatten it. This function is\n called from redo_theano.\n ' (m_space, m_source) = self.model.get_monitoring_data_specs() input_spaces = [m_space] input_sources = [m_source] for channel in self.channels.values(): space = channel.data_specs[0] assert isinstance(space, Space) input_spaces.append(space) input_sources.append(channel.data_specs[1]) nested_space = CompositeSpace(input_spaces) nested_source = tuple(input_sources) self._nested_data_specs = (nested_space, nested_source) self._data_specs_mapping = DataSpecsMapping(self._nested_data_specs) flat_space = self._data_specs_mapping.flatten(nested_space, return_tuple=True) flat_source = self._data_specs_mapping.flatten(nested_source, return_tuple=True) self._flat_data_specs = (CompositeSpace(flat_space), flat_source)<|docstring|>Computes a nested data_specs for input and all channels Also computes the mapping to flatten it. This function is called from redo_theano.<|endoftext|>
d175c435015a096df301f692e54598154c07fb1fba3deec2069f8efd57874cca
def set_theano_function_mode(self, mode): '\n .. todo::\n\n WRITEME\n\n Parameters\n ----------\n mode : theano.compile.Mode\n Theano functions for the monitoring channels will be\n compiled and run using this mode.\n ' if (self.theano_function_mode != mode): self._dirty = True self.theano_function_mode = mode
.. todo:: WRITEME Parameters ---------- mode : theano.compile.Mode Theano functions for the monitoring channels will be compiled and run using this mode.
pylearn2/monitor.py
set_theano_function_mode
fxyu/pylearn2
2,045
python
def set_theano_function_mode(self, mode): '\n .. todo::\n\n WRITEME\n\n Parameters\n ----------\n mode : theano.compile.Mode\n Theano functions for the monitoring channels will be\n compiled and run using this mode.\n ' if (self.theano_function_mode != mode): self._dirty = True self.theano_function_mode = mode
def set_theano_function_mode(self, mode): '\n .. todo::\n\n WRITEME\n\n Parameters\n ----------\n mode : theano.compile.Mode\n Theano functions for the monitoring channels will be\n compiled and run using this mode.\n ' if (self.theano_function_mode != mode): self._dirty = True self.theano_function_mode = mode<|docstring|>.. todo:: WRITEME Parameters ---------- mode : theano.compile.Mode Theano functions for the monitoring channels will be compiled and run using this mode.<|endoftext|>
8888bee1e642db432aeb77e71edaf1a97f935736f3071e3c7ef4e00e7e5fce97
def add_dataset(self, dataset, mode='sequential', batch_size=None, num_batches=None, seed=None): "\n Determines the data used to calculate the values of each channel.\n\n Parameters\n ----------\n dataset : object\n A `pylearn2.datasets.Dataset` object.\n mode : str or object, optional\n Iteration mode; see the docstring of the `iterator` method\n on `pylearn2.datasets.Dataset` for details.\n batch_size : int, optional\n The size of an individual batch. Optional if `mode` is\n 'sequential' and `num_batches` is specified (batch size\n will be calculated based on full dataset size).\n num_batches : int, optional\n The total number of batches. Unnecessary if `mode` is\n 'sequential' and `batch_size` is specified (number of\n batches will be calculated based on full dataset size).\n seed : int, optional\n Optional. The seed to be used for random iteration modes.\n " if (not isinstance(dataset, list)): dataset = [dataset] if (not isinstance(mode, list)): mode = [mode] if (not isinstance(batch_size, list)): batch_size = [batch_size] if (not isinstance(num_batches, list)): num_batches = [num_batches] if (seed is None): seed = ([None] * len(dataset)) if (not isinstance(seed, list)): seed = [seed] if (len(mode) != len(dataset)): raise ValueError((((('Received ' + str(len(dataset))) + ' dataset but ') + str(len(mode))) + ' modes.')) if any([(len(l) != len(dataset)) for l in [batch_size, seed]]): raise ValueError(('make sure each dataset has its iteration ' + 'batch size and number of batches.')) for (d, m, b, n, sd) in safe_izip(dataset, mode, batch_size, num_batches, seed): try: it = d.iterator(mode=m, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) except ValueError as exc: reraise_as(ValueError((('invalid iteration parameters in ' + 'Monitor.add_dataset: ') + str(exc)))) if it.stochastic: if (sd is None): raise TypeError(('Monitor requires a seed when using ' + 'stochastic iteration modes.')) if (not isinstance(sd, (list, tuple, int))): raise TypeError((('Monitor requires a seed (not a random ' + 'number generator) when using ') + 'stochastic iteration modes.')) else: assert (sd is None) if (d not in self._datasets): self._datasets.append(d) self._iteration_mode.append(m) self._batch_size.append(b) self._num_batches.append(n) self._rng_seed.append(sd)
Determines the data used to calculate the values of each channel. Parameters ---------- dataset : object A `pylearn2.datasets.Dataset` object. mode : str or object, optional Iteration mode; see the docstring of the `iterator` method on `pylearn2.datasets.Dataset` for details. batch_size : int, optional The size of an individual batch. Optional if `mode` is 'sequential' and `num_batches` is specified (batch size will be calculated based on full dataset size). num_batches : int, optional The total number of batches. Unnecessary if `mode` is 'sequential' and `batch_size` is specified (number of batches will be calculated based on full dataset size). seed : int, optional Optional. The seed to be used for random iteration modes.
pylearn2/monitor.py
add_dataset
fxyu/pylearn2
2,045
python
def add_dataset(self, dataset, mode='sequential', batch_size=None, num_batches=None, seed=None): "\n Determines the data used to calculate the values of each channel.\n\n Parameters\n ----------\n dataset : object\n A `pylearn2.datasets.Dataset` object.\n mode : str or object, optional\n Iteration mode; see the docstring of the `iterator` method\n on `pylearn2.datasets.Dataset` for details.\n batch_size : int, optional\n The size of an individual batch. Optional if `mode` is\n 'sequential' and `num_batches` is specified (batch size\n will be calculated based on full dataset size).\n num_batches : int, optional\n The total number of batches. Unnecessary if `mode` is\n 'sequential' and `batch_size` is specified (number of\n batches will be calculated based on full dataset size).\n seed : int, optional\n Optional. The seed to be used for random iteration modes.\n " if (not isinstance(dataset, list)): dataset = [dataset] if (not isinstance(mode, list)): mode = [mode] if (not isinstance(batch_size, list)): batch_size = [batch_size] if (not isinstance(num_batches, list)): num_batches = [num_batches] if (seed is None): seed = ([None] * len(dataset)) if (not isinstance(seed, list)): seed = [seed] if (len(mode) != len(dataset)): raise ValueError((((('Received ' + str(len(dataset))) + ' dataset but ') + str(len(mode))) + ' modes.')) if any([(len(l) != len(dataset)) for l in [batch_size, seed]]): raise ValueError(('make sure each dataset has its iteration ' + 'batch size and number of batches.')) for (d, m, b, n, sd) in safe_izip(dataset, mode, batch_size, num_batches, seed): try: it = d.iterator(mode=m, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) except ValueError as exc: reraise_as(ValueError((('invalid iteration parameters in ' + 'Monitor.add_dataset: ') + str(exc)))) if it.stochastic: if (sd is None): raise TypeError(('Monitor requires a seed when using ' + 'stochastic iteration modes.')) if (not isinstance(sd, (list, tuple, int))): raise TypeError((('Monitor requires a seed (not a random ' + 'number generator) when using ') + 'stochastic iteration modes.')) else: assert (sd is None) if (d not in self._datasets): self._datasets.append(d) self._iteration_mode.append(m) self._batch_size.append(b) self._num_batches.append(n) self._rng_seed.append(sd)
def add_dataset(self, dataset, mode='sequential', batch_size=None, num_batches=None, seed=None): "\n Determines the data used to calculate the values of each channel.\n\n Parameters\n ----------\n dataset : object\n A `pylearn2.datasets.Dataset` object.\n mode : str or object, optional\n Iteration mode; see the docstring of the `iterator` method\n on `pylearn2.datasets.Dataset` for details.\n batch_size : int, optional\n The size of an individual batch. Optional if `mode` is\n 'sequential' and `num_batches` is specified (batch size\n will be calculated based on full dataset size).\n num_batches : int, optional\n The total number of batches. Unnecessary if `mode` is\n 'sequential' and `batch_size` is specified (number of\n batches will be calculated based on full dataset size).\n seed : int, optional\n Optional. The seed to be used for random iteration modes.\n " if (not isinstance(dataset, list)): dataset = [dataset] if (not isinstance(mode, list)): mode = [mode] if (not isinstance(batch_size, list)): batch_size = [batch_size] if (not isinstance(num_batches, list)): num_batches = [num_batches] if (seed is None): seed = ([None] * len(dataset)) if (not isinstance(seed, list)): seed = [seed] if (len(mode) != len(dataset)): raise ValueError((((('Received ' + str(len(dataset))) + ' dataset but ') + str(len(mode))) + ' modes.')) if any([(len(l) != len(dataset)) for l in [batch_size, seed]]): raise ValueError(('make sure each dataset has its iteration ' + 'batch size and number of batches.')) for (d, m, b, n, sd) in safe_izip(dataset, mode, batch_size, num_batches, seed): try: it = d.iterator(mode=m, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) except ValueError as exc: reraise_as(ValueError((('invalid iteration parameters in ' + 'Monitor.add_dataset: ') + str(exc)))) if it.stochastic: if (sd is None): raise TypeError(('Monitor requires a seed when using ' + 'stochastic iteration modes.')) if (not isinstance(sd, (list, tuple, int))): raise TypeError((('Monitor requires a seed (not a random ' + 'number generator) when using ') + 'stochastic iteration modes.')) else: assert (sd is None) if (d not in self._datasets): self._datasets.append(d) self._iteration_mode.append(m) self._batch_size.append(b) self._num_batches.append(n) self._rng_seed.append(sd)<|docstring|>Determines the data used to calculate the values of each channel. Parameters ---------- dataset : object A `pylearn2.datasets.Dataset` object. mode : str or object, optional Iteration mode; see the docstring of the `iterator` method on `pylearn2.datasets.Dataset` for details. batch_size : int, optional The size of an individual batch. Optional if `mode` is 'sequential' and `num_batches` is specified (batch size will be calculated based on full dataset size). num_batches : int, optional The total number of batches. Unnecessary if `mode` is 'sequential' and `batch_size` is specified (number of batches will be calculated based on full dataset size). seed : int, optional Optional. The seed to be used for random iteration modes.<|endoftext|>
c5b759dc09539dfe6c014add9d030cbba2d13f741f7f9e4b54b545ea1dd80298
def __call__(self): '\n Runs the model on the monitoring dataset in order to add one\n data point to each of the channels.\n ' if self._dirty: self.redo_theano() datasets = self._datasets self.begin_record_entry() for (d, i, b, n, a, sd, ne) in safe_izip(datasets, self._iteration_mode, self._batch_size, self._num_batches, self.accum, self._rng_seed, self.num_examples): if isinstance(d, six.string_types): d = yaml_parse.load(d) raise NotImplementedError() myiterator = d.iterator(mode=i, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) if (len(self._flat_data_specs[1]) == 0): X = () self.run_prereqs(X, d) a(*X) else: actual_ne = 0 for X in myiterator: self.run_prereqs(X, d) a(*X) actual_ne += self._flat_data_specs[0].np_batch_size(X) if (actual_ne != ne): raise RuntimeError(('At compile time, your iterator said it had %d examples total, but at runtime it gave us %d.' % (ne, actual_ne))) log.info('Monitoring step:') log.info(('\tEpochs seen: %d' % self._epochs_seen)) log.info(('\tBatches seen: %d' % self._num_batches_seen)) log.info(('\tExamples seen: %d' % self._examples_seen)) t = (time.time() - self.t0) for channel_name in sorted(self.channels.keys(), key=number_aware_alphabetical_key): channel = self.channels[channel_name] channel.time_record.append(t) channel.batch_record.append(self._num_batches_seen) channel.example_record.append(self._examples_seen) channel.epoch_record.append(self._epochs_seen) val = channel.val_shared.get_value() channel.val_record.append(val) if (abs(val) < 10000.0): val_str = str(val) else: val_str = ('%.3e' % val) log.info(('\t%s: %s' % (channel_name, val_str)))
Runs the model on the monitoring dataset in order to add one data point to each of the channels.
pylearn2/monitor.py
__call__
fxyu/pylearn2
2,045
python
def __call__(self): '\n Runs the model on the monitoring dataset in order to add one\n data point to each of the channels.\n ' if self._dirty: self.redo_theano() datasets = self._datasets self.begin_record_entry() for (d, i, b, n, a, sd, ne) in safe_izip(datasets, self._iteration_mode, self._batch_size, self._num_batches, self.accum, self._rng_seed, self.num_examples): if isinstance(d, six.string_types): d = yaml_parse.load(d) raise NotImplementedError() myiterator = d.iterator(mode=i, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) if (len(self._flat_data_specs[1]) == 0): X = () self.run_prereqs(X, d) a(*X) else: actual_ne = 0 for X in myiterator: self.run_prereqs(X, d) a(*X) actual_ne += self._flat_data_specs[0].np_batch_size(X) if (actual_ne != ne): raise RuntimeError(('At compile time, your iterator said it had %d examples total, but at runtime it gave us %d.' % (ne, actual_ne))) log.info('Monitoring step:') log.info(('\tEpochs seen: %d' % self._epochs_seen)) log.info(('\tBatches seen: %d' % self._num_batches_seen)) log.info(('\tExamples seen: %d' % self._examples_seen)) t = (time.time() - self.t0) for channel_name in sorted(self.channels.keys(), key=number_aware_alphabetical_key): channel = self.channels[channel_name] channel.time_record.append(t) channel.batch_record.append(self._num_batches_seen) channel.example_record.append(self._examples_seen) channel.epoch_record.append(self._epochs_seen) val = channel.val_shared.get_value() channel.val_record.append(val) if (abs(val) < 10000.0): val_str = str(val) else: val_str = ('%.3e' % val) log.info(('\t%s: %s' % (channel_name, val_str)))
def __call__(self): '\n Runs the model on the monitoring dataset in order to add one\n data point to each of the channels.\n ' if self._dirty: self.redo_theano() datasets = self._datasets self.begin_record_entry() for (d, i, b, n, a, sd, ne) in safe_izip(datasets, self._iteration_mode, self._batch_size, self._num_batches, self.accum, self._rng_seed, self.num_examples): if isinstance(d, six.string_types): d = yaml_parse.load(d) raise NotImplementedError() myiterator = d.iterator(mode=i, batch_size=b, num_batches=n, data_specs=self._flat_data_specs, return_tuple=True, rng=sd) if (len(self._flat_data_specs[1]) == 0): X = () self.run_prereqs(X, d) a(*X) else: actual_ne = 0 for X in myiterator: self.run_prereqs(X, d) a(*X) actual_ne += self._flat_data_specs[0].np_batch_size(X) if (actual_ne != ne): raise RuntimeError(('At compile time, your iterator said it had %d examples total, but at runtime it gave us %d.' % (ne, actual_ne))) log.info('Monitoring step:') log.info(('\tEpochs seen: %d' % self._epochs_seen)) log.info(('\tBatches seen: %d' % self._num_batches_seen)) log.info(('\tExamples seen: %d' % self._examples_seen)) t = (time.time() - self.t0) for channel_name in sorted(self.channels.keys(), key=number_aware_alphabetical_key): channel = self.channels[channel_name] channel.time_record.append(t) channel.batch_record.append(self._num_batches_seen) channel.example_record.append(self._examples_seen) channel.epoch_record.append(self._epochs_seen) val = channel.val_shared.get_value() channel.val_record.append(val) if (abs(val) < 10000.0): val_str = str(val) else: val_str = ('%.3e' % val) log.info(('\t%s: %s' % (channel_name, val_str)))<|docstring|>Runs the model on the monitoring dataset in order to add one data point to each of the channels.<|endoftext|>
70b9adf5bc2e2a53ef8eea7216717735bbcc2ca440e1fb5fedd8d78f9e205fc7
def run_prereqs(self, data, dataset): '\n Runs all "prerequistie functions" on a batch of data. Always\n called right before computing the monitoring channels on that\n batch.\n\n Parameters\n ----------\n data : tuple or Variable\n a member of the Space used as input to the monitoring\n functions\n dataset : Dataset\n the Dataset the data was drawn from\n ' if (dataset not in self.prereqs): return for prereq in self.prereqs[dataset]: prereq(*data)
Runs all "prerequistie functions" on a batch of data. Always called right before computing the monitoring channels on that batch. Parameters ---------- data : tuple or Variable a member of the Space used as input to the monitoring functions dataset : Dataset the Dataset the data was drawn from
pylearn2/monitor.py
run_prereqs
fxyu/pylearn2
2,045
python
def run_prereqs(self, data, dataset): '\n Runs all "prerequistie functions" on a batch of data. Always\n called right before computing the monitoring channels on that\n batch.\n\n Parameters\n ----------\n data : tuple or Variable\n a member of the Space used as input to the monitoring\n functions\n dataset : Dataset\n the Dataset the data was drawn from\n ' if (dataset not in self.prereqs): return for prereq in self.prereqs[dataset]: prereq(*data)
def run_prereqs(self, data, dataset): '\n Runs all "prerequistie functions" on a batch of data. Always\n called right before computing the monitoring channels on that\n batch.\n\n Parameters\n ----------\n data : tuple or Variable\n a member of the Space used as input to the monitoring\n functions\n dataset : Dataset\n the Dataset the data was drawn from\n ' if (dataset not in self.prereqs): return for prereq in self.prereqs[dataset]: prereq(*data)<|docstring|>Runs all "prerequistie functions" on a batch of data. Always called right before computing the monitoring channels on that batch. Parameters ---------- data : tuple or Variable a member of the Space used as input to the monitoring functions dataset : Dataset the Dataset the data was drawn from<|endoftext|>
9fa27de96b858fdb362bc46dabac9c8a3aa831461488aa2d2d74860315e0e469
def get_batches_seen(self): '\n Returns the number of batches the model has learned on\n (assuming that the learning code has been calling\n Monitor.report_batch correctly).\n ' return self._num_batches_seen
Returns the number of batches the model has learned on (assuming that the learning code has been calling Monitor.report_batch correctly).
pylearn2/monitor.py
get_batches_seen
fxyu/pylearn2
2,045
python
def get_batches_seen(self): '\n Returns the number of batches the model has learned on\n (assuming that the learning code has been calling\n Monitor.report_batch correctly).\n ' return self._num_batches_seen
def get_batches_seen(self): '\n Returns the number of batches the model has learned on\n (assuming that the learning code has been calling\n Monitor.report_batch correctly).\n ' return self._num_batches_seen<|docstring|>Returns the number of batches the model has learned on (assuming that the learning code has been calling Monitor.report_batch correctly).<|endoftext|>
e990849ac81538ee1aea3960e995caadfa369ded69d3810aab0d56a2a40c4efc
def get_epochs_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n epochs_seen : int\n The number of epochs the model has been trained on.\n One "epoch" is one pass through Dataset.iterator.\n ' return self._epochs_seen
.. todo:: WRITEME Returns ------- epochs_seen : int The number of epochs the model has been trained on. One "epoch" is one pass through Dataset.iterator.
pylearn2/monitor.py
get_epochs_seen
fxyu/pylearn2
2,045
python
def get_epochs_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n epochs_seen : int\n The number of epochs the model has been trained on.\n One "epoch" is one pass through Dataset.iterator.\n ' return self._epochs_seen
def get_epochs_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n epochs_seen : int\n The number of epochs the model has been trained on.\n One "epoch" is one pass through Dataset.iterator.\n ' return self._epochs_seen<|docstring|>.. todo:: WRITEME Returns ------- epochs_seen : int The number of epochs the model has been trained on. One "epoch" is one pass through Dataset.iterator.<|endoftext|>
3bbb7d946908a296a329b60cf8798651678d7236e96aed2dc30e4322c72647b2
def get_examples_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n examples_seen : int\n The number of examples the model has learned on (assuming\n that the learning code has been calling Monitor.report_batch\n correctly)\n ' return self._examples_seen
.. todo:: WRITEME Returns ------- examples_seen : int The number of examples the model has learned on (assuming that the learning code has been calling Monitor.report_batch correctly)
pylearn2/monitor.py
get_examples_seen
fxyu/pylearn2
2,045
python
def get_examples_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n examples_seen : int\n The number of examples the model has learned on (assuming\n that the learning code has been calling Monitor.report_batch\n correctly)\n ' return self._examples_seen
def get_examples_seen(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n examples_seen : int\n The number of examples the model has learned on (assuming\n that the learning code has been calling Monitor.report_batch\n correctly)\n ' return self._examples_seen<|docstring|>.. todo:: WRITEME Returns ------- examples_seen : int The number of examples the model has learned on (assuming that the learning code has been calling Monitor.report_batch correctly)<|endoftext|>
6e1befab375da5db76d3bfa3ec8682efcd395fbacfa95bd8e6bcb597dbc51752
def report_batch(self, num_examples): '\n Call this whenever the model has learned on another batch of\n examples. Report how many examples were learned on.\n\n Parameters\n ----------\n num_examples : int\n The number of examples learned on in this minibatch.\n ' self._examples_seen += num_examples self._num_batches_seen += 1
Call this whenever the model has learned on another batch of examples. Report how many examples were learned on. Parameters ---------- num_examples : int The number of examples learned on in this minibatch.
pylearn2/monitor.py
report_batch
fxyu/pylearn2
2,045
python
def report_batch(self, num_examples): '\n Call this whenever the model has learned on another batch of\n examples. Report how many examples were learned on.\n\n Parameters\n ----------\n num_examples : int\n The number of examples learned on in this minibatch.\n ' self._examples_seen += num_examples self._num_batches_seen += 1
def report_batch(self, num_examples): '\n Call this whenever the model has learned on another batch of\n examples. Report how many examples were learned on.\n\n Parameters\n ----------\n num_examples : int\n The number of examples learned on in this minibatch.\n ' self._examples_seen += num_examples self._num_batches_seen += 1<|docstring|>Call this whenever the model has learned on another batch of examples. Report how many examples were learned on. Parameters ---------- num_examples : int The number of examples learned on in this minibatch.<|endoftext|>
c22fc100b771bae982dd2c5a6629008476c6402395e5ea224d6c9780fa52c541
def report_epoch(self): '\n Call this whenever the model has completed another "epoch" of\n learning. We regard one pass through Dataset.iterator as one\n epoch.\n ' self._epochs_seen += 1
Call this whenever the model has completed another "epoch" of learning. We regard one pass through Dataset.iterator as one epoch.
pylearn2/monitor.py
report_epoch
fxyu/pylearn2
2,045
python
def report_epoch(self): '\n Call this whenever the model has completed another "epoch" of\n learning. We regard one pass through Dataset.iterator as one\n epoch.\n ' self._epochs_seen += 1
def report_epoch(self): '\n Call this whenever the model has completed another "epoch" of\n learning. We regard one pass through Dataset.iterator as one\n epoch.\n ' self._epochs_seen += 1<|docstring|>Call this whenever the model has completed another "epoch" of learning. We regard one pass through Dataset.iterator as one epoch.<|endoftext|>
11685c2c0a79e8e40375b55b1cfc74bc005cf1ed72fcc1a01db335b6294dd611
def redo_theano(self): '\n Recompiles Theano functions used by this monitor.\n\n This is called any time we need to evaluate the channels and\n the channel definitions have changed since last we called it,\n or if the theano functions are unavailable for any other reason\n (first time they are needed after construction or\n deserialization, etc.)\n\n All channels are compiled as part of the same theano function\n so that the theano optimizations can eliminate subexpressions\n that are shared between multiple channels.\n ' self._dirty = False self._build_data_specs() init_names = dir(self) self.prereqs = OrderedDict() for channel in self.channels.values(): if (channel.prereqs is not None): dataset = channel.dataset if (dataset not in self.prereqs): self.prereqs[dataset] = [] prereqs = self.prereqs[dataset] for prereq in channel.prereqs: if (prereq not in prereqs): prereqs.append(prereq) updates = OrderedDict() for channel in self.channels.values(): updates[channel.val_shared] = np.cast[config.floatX](0.0) with log_timing(log, 'compiling begin_record_entry'): self.begin_record_entry = function(inputs=[], updates=updates, mode=self.theano_function_mode, name='Monitor.begin_record_entry') updates = OrderedDict() givens = OrderedDict() batch_names = [('monitoring_%s' % s) for s in self._flat_data_specs[1]] theano_args = self._flat_data_specs[0].make_theano_batch(batch_names) batch_size = self._flat_data_specs[0].batch_size(theano_args) nested_theano_args = self._data_specs_mapping.nest(theano_args) if (not isinstance(nested_theano_args, tuple)): nested_theano_args = (nested_theano_args,) assert (len(nested_theano_args) == (len(self.channels) + 1)) log.info('Monitored channels: ') for key in sorted(self.channels.keys()): mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line(((('compiling monitor including ' + 'channel ') + key) + '\n')) log.info(('\t%s' % key)) it = [] for (d, i, n, b) in safe_izip(self._datasets, self._iteration_mode, self._num_batches, self._batch_size): it.append(d.iterator(mode=i, num_batches=n, batch_size=b, data_specs=self._flat_data_specs, return_tuple=True)) self.num_examples = [i.num_examples for i in it] givens = [OrderedDict() for d in self._datasets] updates = [OrderedDict() for d in self._datasets] for (i, channel) in enumerate(self.channels.values()): index = self._datasets.index(channel.dataset) d = self._datasets[index] g = givens[index] cur_num_examples = self.num_examples[index] u = updates[index] c_mapping = DataSpecsMapping(channel.data_specs) channel_inputs = c_mapping.flatten(channel.graph_input, return_tuple=True) inputs = c_mapping.flatten(nested_theano_args[(i + 1)], return_tuple=True) for (channel_X, X) in safe_izip(channel_inputs, inputs): assert ((channel_X not in g) or (g[channel_X] is X)) assert (channel_X.type == X.type), (channel_X.type, X.type) g[channel_X] = X if (batch_size == 0): assert (len(self._flat_data_specs[1]) == 0) val = channel.val else: if (n == 0): raise ValueError(('Iterating over 0 examples results in ' + 'divide by 0')) val = T.cast(((channel.val * T.cast(batch_size, 'float64')) / cur_num_examples), config.floatX) u[channel.val_shared] = (channel.val_shared + val) with log_timing(log, 'Compiling accum'): for up in updates: for key in up: if (key.dtype != up[key].dtype): raise TypeError((((((('Monitoring channel shared variable ' + key.name) + ' has dtype ') + key.dtype) + ' but is driven by an expression ') + 'with type ') + up[key].dtype)) self.accum = [] for (idx, packed) in enumerate(safe_izip(givens, updates)): (g, u) = packed mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): for elem in g: mode.record.handle_line((('g key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('g val ' + var_descriptor(g[elem])) + '\n')) for elem in u: mode.record.handle_line((('u key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('u val ' + var_descriptor(u[elem])) + '\n')) function_name = ('Monitor.accum[%d]' % idx) if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line('compiling supervised accum\n') self.accum.append(function(theano_args, givens=g, updates=u, mode=self.theano_function_mode, name=function_name)) for a in self.accum: if ((mode is not None) and hasattr(mode, 'record')): for elem in a.maker.fgraph.outputs: mode.record.handle_line((('accum output ' + var_descriptor(elem)) + '\n')) log.info(('graph size: %d' % len(a.maker.fgraph.toposort()))) final_names = dir(self) self.register_names_to_del([name for name in final_names if (name not in init_names)])
Recompiles Theano functions used by this monitor. This is called any time we need to evaluate the channels and the channel definitions have changed since last we called it, or if the theano functions are unavailable for any other reason (first time they are needed after construction or deserialization, etc.) All channels are compiled as part of the same theano function so that the theano optimizations can eliminate subexpressions that are shared between multiple channels.
pylearn2/monitor.py
redo_theano
fxyu/pylearn2
2,045
python
def redo_theano(self): '\n Recompiles Theano functions used by this monitor.\n\n This is called any time we need to evaluate the channels and\n the channel definitions have changed since last we called it,\n or if the theano functions are unavailable for any other reason\n (first time they are needed after construction or\n deserialization, etc.)\n\n All channels are compiled as part of the same theano function\n so that the theano optimizations can eliminate subexpressions\n that are shared between multiple channels.\n ' self._dirty = False self._build_data_specs() init_names = dir(self) self.prereqs = OrderedDict() for channel in self.channels.values(): if (channel.prereqs is not None): dataset = channel.dataset if (dataset not in self.prereqs): self.prereqs[dataset] = [] prereqs = self.prereqs[dataset] for prereq in channel.prereqs: if (prereq not in prereqs): prereqs.append(prereq) updates = OrderedDict() for channel in self.channels.values(): updates[channel.val_shared] = np.cast[config.floatX](0.0) with log_timing(log, 'compiling begin_record_entry'): self.begin_record_entry = function(inputs=[], updates=updates, mode=self.theano_function_mode, name='Monitor.begin_record_entry') updates = OrderedDict() givens = OrderedDict() batch_names = [('monitoring_%s' % s) for s in self._flat_data_specs[1]] theano_args = self._flat_data_specs[0].make_theano_batch(batch_names) batch_size = self._flat_data_specs[0].batch_size(theano_args) nested_theano_args = self._data_specs_mapping.nest(theano_args) if (not isinstance(nested_theano_args, tuple)): nested_theano_args = (nested_theano_args,) assert (len(nested_theano_args) == (len(self.channels) + 1)) log.info('Monitored channels: ') for key in sorted(self.channels.keys()): mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line(((('compiling monitor including ' + 'channel ') + key) + '\n')) log.info(('\t%s' % key)) it = [] for (d, i, n, b) in safe_izip(self._datasets, self._iteration_mode, self._num_batches, self._batch_size): it.append(d.iterator(mode=i, num_batches=n, batch_size=b, data_specs=self._flat_data_specs, return_tuple=True)) self.num_examples = [i.num_examples for i in it] givens = [OrderedDict() for d in self._datasets] updates = [OrderedDict() for d in self._datasets] for (i, channel) in enumerate(self.channels.values()): index = self._datasets.index(channel.dataset) d = self._datasets[index] g = givens[index] cur_num_examples = self.num_examples[index] u = updates[index] c_mapping = DataSpecsMapping(channel.data_specs) channel_inputs = c_mapping.flatten(channel.graph_input, return_tuple=True) inputs = c_mapping.flatten(nested_theano_args[(i + 1)], return_tuple=True) for (channel_X, X) in safe_izip(channel_inputs, inputs): assert ((channel_X not in g) or (g[channel_X] is X)) assert (channel_X.type == X.type), (channel_X.type, X.type) g[channel_X] = X if (batch_size == 0): assert (len(self._flat_data_specs[1]) == 0) val = channel.val else: if (n == 0): raise ValueError(('Iterating over 0 examples results in ' + 'divide by 0')) val = T.cast(((channel.val * T.cast(batch_size, 'float64')) / cur_num_examples), config.floatX) u[channel.val_shared] = (channel.val_shared + val) with log_timing(log, 'Compiling accum'): for up in updates: for key in up: if (key.dtype != up[key].dtype): raise TypeError((((((('Monitoring channel shared variable ' + key.name) + ' has dtype ') + key.dtype) + ' but is driven by an expression ') + 'with type ') + up[key].dtype)) self.accum = [] for (idx, packed) in enumerate(safe_izip(givens, updates)): (g, u) = packed mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): for elem in g: mode.record.handle_line((('g key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('g val ' + var_descriptor(g[elem])) + '\n')) for elem in u: mode.record.handle_line((('u key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('u val ' + var_descriptor(u[elem])) + '\n')) function_name = ('Monitor.accum[%d]' % idx) if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line('compiling supervised accum\n') self.accum.append(function(theano_args, givens=g, updates=u, mode=self.theano_function_mode, name=function_name)) for a in self.accum: if ((mode is not None) and hasattr(mode, 'record')): for elem in a.maker.fgraph.outputs: mode.record.handle_line((('accum output ' + var_descriptor(elem)) + '\n')) log.info(('graph size: %d' % len(a.maker.fgraph.toposort()))) final_names = dir(self) self.register_names_to_del([name for name in final_names if (name not in init_names)])
def redo_theano(self): '\n Recompiles Theano functions used by this monitor.\n\n This is called any time we need to evaluate the channels and\n the channel definitions have changed since last we called it,\n or if the theano functions are unavailable for any other reason\n (first time they are needed after construction or\n deserialization, etc.)\n\n All channels are compiled as part of the same theano function\n so that the theano optimizations can eliminate subexpressions\n that are shared between multiple channels.\n ' self._dirty = False self._build_data_specs() init_names = dir(self) self.prereqs = OrderedDict() for channel in self.channels.values(): if (channel.prereqs is not None): dataset = channel.dataset if (dataset not in self.prereqs): self.prereqs[dataset] = [] prereqs = self.prereqs[dataset] for prereq in channel.prereqs: if (prereq not in prereqs): prereqs.append(prereq) updates = OrderedDict() for channel in self.channels.values(): updates[channel.val_shared] = np.cast[config.floatX](0.0) with log_timing(log, 'compiling begin_record_entry'): self.begin_record_entry = function(inputs=[], updates=updates, mode=self.theano_function_mode, name='Monitor.begin_record_entry') updates = OrderedDict() givens = OrderedDict() batch_names = [('monitoring_%s' % s) for s in self._flat_data_specs[1]] theano_args = self._flat_data_specs[0].make_theano_batch(batch_names) batch_size = self._flat_data_specs[0].batch_size(theano_args) nested_theano_args = self._data_specs_mapping.nest(theano_args) if (not isinstance(nested_theano_args, tuple)): nested_theano_args = (nested_theano_args,) assert (len(nested_theano_args) == (len(self.channels) + 1)) log.info('Monitored channels: ') for key in sorted(self.channels.keys()): mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line(((('compiling monitor including ' + 'channel ') + key) + '\n')) log.info(('\t%s' % key)) it = [] for (d, i, n, b) in safe_izip(self._datasets, self._iteration_mode, self._num_batches, self._batch_size): it.append(d.iterator(mode=i, num_batches=n, batch_size=b, data_specs=self._flat_data_specs, return_tuple=True)) self.num_examples = [i.num_examples for i in it] givens = [OrderedDict() for d in self._datasets] updates = [OrderedDict() for d in self._datasets] for (i, channel) in enumerate(self.channels.values()): index = self._datasets.index(channel.dataset) d = self._datasets[index] g = givens[index] cur_num_examples = self.num_examples[index] u = updates[index] c_mapping = DataSpecsMapping(channel.data_specs) channel_inputs = c_mapping.flatten(channel.graph_input, return_tuple=True) inputs = c_mapping.flatten(nested_theano_args[(i + 1)], return_tuple=True) for (channel_X, X) in safe_izip(channel_inputs, inputs): assert ((channel_X not in g) or (g[channel_X] is X)) assert (channel_X.type == X.type), (channel_X.type, X.type) g[channel_X] = X if (batch_size == 0): assert (len(self._flat_data_specs[1]) == 0) val = channel.val else: if (n == 0): raise ValueError(('Iterating over 0 examples results in ' + 'divide by 0')) val = T.cast(((channel.val * T.cast(batch_size, 'float64')) / cur_num_examples), config.floatX) u[channel.val_shared] = (channel.val_shared + val) with log_timing(log, 'Compiling accum'): for up in updates: for key in up: if (key.dtype != up[key].dtype): raise TypeError((((((('Monitoring channel shared variable ' + key.name) + ' has dtype ') + key.dtype) + ' but is driven by an expression ') + 'with type ') + up[key].dtype)) self.accum = [] for (idx, packed) in enumerate(safe_izip(givens, updates)): (g, u) = packed mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): for elem in g: mode.record.handle_line((('g key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('g val ' + var_descriptor(g[elem])) + '\n')) for elem in u: mode.record.handle_line((('u key ' + var_descriptor(elem)) + '\n')) mode.record.handle_line((('u val ' + var_descriptor(u[elem])) + '\n')) function_name = ('Monitor.accum[%d]' % idx) if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line('compiling supervised accum\n') self.accum.append(function(theano_args, givens=g, updates=u, mode=self.theano_function_mode, name=function_name)) for a in self.accum: if ((mode is not None) and hasattr(mode, 'record')): for elem in a.maker.fgraph.outputs: mode.record.handle_line((('accum output ' + var_descriptor(elem)) + '\n')) log.info(('graph size: %d' % len(a.maker.fgraph.toposort()))) final_names = dir(self) self.register_names_to_del([name for name in final_names if (name not in init_names)])<|docstring|>Recompiles Theano functions used by this monitor. This is called any time we need to evaluate the channels and the channel definitions have changed since last we called it, or if the theano functions are unavailable for any other reason (first time they are needed after construction or deserialization, etc.) All channels are compiled as part of the same theano function so that the theano optimizations can eliminate subexpressions that are shared between multiple channels.<|endoftext|>
b7dc7209cb1fa893730e5c7ef5f33a8603695bab1497d367b9894da52148f1c1
def register_names_to_del(self, names): '\n Register names of fields that should be deleted before pickling.\n\n Parameters\n ----------\n names : list\n A list of attribute names as strings.\n ' for name in names: if (name not in self.names_to_del): self.names_to_del.append(name)
Register names of fields that should be deleted before pickling. Parameters ---------- names : list A list of attribute names as strings.
pylearn2/monitor.py
register_names_to_del
fxyu/pylearn2
2,045
python
def register_names_to_del(self, names): '\n Register names of fields that should be deleted before pickling.\n\n Parameters\n ----------\n names : list\n A list of attribute names as strings.\n ' for name in names: if (name not in self.names_to_del): self.names_to_del.append(name)
def register_names_to_del(self, names): '\n Register names of fields that should be deleted before pickling.\n\n Parameters\n ----------\n names : list\n A list of attribute names as strings.\n ' for name in names: if (name not in self.names_to_del): self.names_to_del.append(name)<|docstring|>Register names of fields that should be deleted before pickling. Parameters ---------- names : list A list of attribute names as strings.<|endoftext|>
00c91be01a8bb81a212b868b6f964d45f068378bfa8b721374c6abe373fdf1ec
def __getstate__(self): "\n In order to avoid pickling a copy of the dataset whenever a\n monitor is saved, the __getstate__ method replaces the dataset\n field with the dataset's yaml source. This is not a perfect\n solution because it won't work with job resuming, which would\n require saving the state of the dataset's random number\n generator.\n\n Like in the Model class, we also need to avoid saving any\n Theano functions, so we delete everything that can be\n regenerated with `redo_theano` by deleting the fields in\n `self.names_to_del`\n " if (not hasattr(self, '_datasets')): self._datasets = [self._dataset] del self._dataset temp = self._datasets if self._datasets: self._datasets = [] for dataset in temp: if isinstance(dataset, six.string_types): self._datasets.append(dataset) else: try: self._datasets.append(dataset.yaml_src) except AttributeError: warnings.warn(('Trained model saved without ' + 'indicating yaml_src')) d = copy.copy(self.__dict__) self._datasets = temp for name in self.names_to_del: if (name in d): del d[name] return d
In order to avoid pickling a copy of the dataset whenever a monitor is saved, the __getstate__ method replaces the dataset field with the dataset's yaml source. This is not a perfect solution because it won't work with job resuming, which would require saving the state of the dataset's random number generator. Like in the Model class, we also need to avoid saving any Theano functions, so we delete everything that can be regenerated with `redo_theano` by deleting the fields in `self.names_to_del`
pylearn2/monitor.py
__getstate__
fxyu/pylearn2
2,045
python
def __getstate__(self): "\n In order to avoid pickling a copy of the dataset whenever a\n monitor is saved, the __getstate__ method replaces the dataset\n field with the dataset's yaml source. This is not a perfect\n solution because it won't work with job resuming, which would\n require saving the state of the dataset's random number\n generator.\n\n Like in the Model class, we also need to avoid saving any\n Theano functions, so we delete everything that can be\n regenerated with `redo_theano` by deleting the fields in\n `self.names_to_del`\n " if (not hasattr(self, '_datasets')): self._datasets = [self._dataset] del self._dataset temp = self._datasets if self._datasets: self._datasets = [] for dataset in temp: if isinstance(dataset, six.string_types): self._datasets.append(dataset) else: try: self._datasets.append(dataset.yaml_src) except AttributeError: warnings.warn(('Trained model saved without ' + 'indicating yaml_src')) d = copy.copy(self.__dict__) self._datasets = temp for name in self.names_to_del: if (name in d): del d[name] return d
def __getstate__(self): "\n In order to avoid pickling a copy of the dataset whenever a\n monitor is saved, the __getstate__ method replaces the dataset\n field with the dataset's yaml source. This is not a perfect\n solution because it won't work with job resuming, which would\n require saving the state of the dataset's random number\n generator.\n\n Like in the Model class, we also need to avoid saving any\n Theano functions, so we delete everything that can be\n regenerated with `redo_theano` by deleting the fields in\n `self.names_to_del`\n " if (not hasattr(self, '_datasets')): self._datasets = [self._dataset] del self._dataset temp = self._datasets if self._datasets: self._datasets = [] for dataset in temp: if isinstance(dataset, six.string_types): self._datasets.append(dataset) else: try: self._datasets.append(dataset.yaml_src) except AttributeError: warnings.warn(('Trained model saved without ' + 'indicating yaml_src')) d = copy.copy(self.__dict__) self._datasets = temp for name in self.names_to_del: if (name in d): del d[name] return d<|docstring|>In order to avoid pickling a copy of the dataset whenever a monitor is saved, the __getstate__ method replaces the dataset field with the dataset's yaml source. This is not a perfect solution because it won't work with job resuming, which would require saving the state of the dataset's random number generator. Like in the Model class, we also need to avoid saving any Theano functions, so we delete everything that can be regenerated with `redo_theano` by deleting the fields in `self.names_to_del`<|endoftext|>
029c5904fcf1a3f3ebb0d00e953c4da3f374c7267ca411042915a314429cd5db
def __setstate__(self, d): '\n Sets the object to have the state described by `d`.\n\n Parameters\n ----------\n d : dict\n A dictionary mapping string names of fields to values for\n these fields.\n ' if ('_dataset' in d): d['_datasets'] = [d['_dataset']] del d['_dataset'] self.__dict__.update(d)
Sets the object to have the state described by `d`. Parameters ---------- d : dict A dictionary mapping string names of fields to values for these fields.
pylearn2/monitor.py
__setstate__
fxyu/pylearn2
2,045
python
def __setstate__(self, d): '\n Sets the object to have the state described by `d`.\n\n Parameters\n ----------\n d : dict\n A dictionary mapping string names of fields to values for\n these fields.\n ' if ('_dataset' in d): d['_datasets'] = [d['_dataset']] del d['_dataset'] self.__dict__.update(d)
def __setstate__(self, d): '\n Sets the object to have the state described by `d`.\n\n Parameters\n ----------\n d : dict\n A dictionary mapping string names of fields to values for\n these fields.\n ' if ('_dataset' in d): d['_datasets'] = [d['_dataset']] del d['_dataset'] self.__dict__.update(d)<|docstring|>Sets the object to have the state described by `d`. Parameters ---------- d : dict A dictionary mapping string names of fields to values for these fields.<|endoftext|>
2f63dfc76cf601aa15afa52145f8c48b62e0a8a07763293a9e633adb536f9d18
def add_channel(self, name, ipt, val, dataset=None, prereqs=None, data_specs=None): '\n Asks the monitor to start tracking a new value. Can be called\n even after the monitor is already in use.\n\n Parameters\n ----------\n name : str\n The display name in the monitor.\n ipt : tensor_like\n The symbolic tensor which should be clamped to the data.\n (or a list/tuple containing symbolic tensors, following the\n data_specs)\n val : tensor_like\n The value (function of `ipt`) to be tracked.\n dataset : pylearn2.datasets.Dataset\n Which dataset to compute this channel on\n prereqs : list of callables that take a list of numpy tensors\n Each prereq must be called exactly once per each new batch\n of data drawn *from dataset* before the channel value is\n computed if two channels provide a prereq with exactly the\n same id, that prereq will only be called once\n data_specs : (space, source) pair\n Identifies the order, format and semantics of ipt\n ' if six.PY3: numeric = (float, int) else: numeric = (float, int, long) if isinstance(val, numeric): val = np.cast[theano.config.floatX](val) val = T.as_tensor_variable(val) if (data_specs is None): warnings.warn(("parameter 'data_specs' should be provided when " + 'calling add_channel. We will build a default one.'), stacklevel=2) if isinstance(ipt, list): ipt = tuple(ipt) if ((ipt is not None) and (not isinstance(ipt, tuple))): ipt = (ipt,) if (ipt is None): data_specs = (NullSpace(), '') elif (len(ipt) == 0): data_specs = (CompositeSpace([]), ()) elif hasattr(dataset, 'get_data_specs'): (dataset_space, dataset_source) = dataset.get_data_specs() if ((len(ipt) == 1) and (dataset_source is not None) and ((not isinstance(dataset_source, tuple)) or (len(dataset_source) == 1)) and ('features' in dataset_source)): data_specs = (dataset_space, dataset_source) elif ((len(ipt) == 2) and (dataset_source == ('features', 'targets'))): data_specs = (dataset_space, dataset_source) else: raise ValueError((('Cannot infer default data_specs for ' + 'the following input points and ') + ('dataset: ipt = %s, dataset = %s' % (ipt, dataset)))) data_specs[0].validate(ipt) mapping = DataSpecsMapping(data_specs) flat_ipt = mapping.flatten(ipt) if (not isinstance(flat_ipt, tuple)): flat_ipt = (flat_ipt,) inputs = theano.gof.graph.inputs([val]) for elem in inputs: if ((not hasattr(elem, 'get_value')) and (not isinstance(elem, theano.gof.graph.Constant))): if (elem not in flat_ipt): raise ValueError((((((('Unspecified input: ' + str(elem)) + '. This may be due to an incorrect ') + "implementation of a cost's ") + 'get_data_specs() method, or of a ') + "model's get_monitoring_data_specs() ") + 'method.')) mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line((('Adding monitor channel ' + name) + '\n')) assert isinstance(flat_ipt, tuple) if (len(flat_ipt) != 1): for elem in flat_ipt: mode.record.handle_line((('Includes input var ' + var_descriptor(elem)) + '\n')) else: mode.record.handle_line((((name + ' input var is ') + var_descriptor(flat_ipt[0])) + '\n')) mode.record.handle_line((((('channel ' + name) + ' is ') + var_descriptor(val)) + '\n')) if (dataset is None): if (len(self._datasets) == 1): dataset = self._datasets[0] elif (len(self._datasets) == 0): raise ValueError(_err_no_data) else: raise ValueError(_err_ambig_data) try: self._datasets.index(dataset) except ValueError: reraise_as(ValueError(('The dataset specified is not one of the ' + "monitor's datasets"))) if (self.on_channel_conflict not in ('error', 'copy_history', 'overwrite')): raise ValueError(("on_channel_conflict should be either 'error'" + "'copy_history', or 'overwrite'")) if ((name in self.channels) and (self.on_channel_conflict == 'error')): raise ValueError(('Tried to create the same channel twice (%s)' % name)) elif ((name in self.channels) and (self.on_channel_conflict == 'copy_history')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs, self.channels[name]) elif ((name not in self.channels) or (self.on_channel_conflict == 'overwrite')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs) self._dirty = True
Asks the monitor to start tracking a new value. Can be called even after the monitor is already in use. Parameters ---------- name : str The display name in the monitor. ipt : tensor_like The symbolic tensor which should be clamped to the data. (or a list/tuple containing symbolic tensors, following the data_specs) val : tensor_like The value (function of `ipt`) to be tracked. dataset : pylearn2.datasets.Dataset Which dataset to compute this channel on prereqs : list of callables that take a list of numpy tensors Each prereq must be called exactly once per each new batch of data drawn *from dataset* before the channel value is computed if two channels provide a prereq with exactly the same id, that prereq will only be called once data_specs : (space, source) pair Identifies the order, format and semantics of ipt
pylearn2/monitor.py
add_channel
fxyu/pylearn2
2,045
python
def add_channel(self, name, ipt, val, dataset=None, prereqs=None, data_specs=None): '\n Asks the monitor to start tracking a new value. Can be called\n even after the monitor is already in use.\n\n Parameters\n ----------\n name : str\n The display name in the monitor.\n ipt : tensor_like\n The symbolic tensor which should be clamped to the data.\n (or a list/tuple containing symbolic tensors, following the\n data_specs)\n val : tensor_like\n The value (function of `ipt`) to be tracked.\n dataset : pylearn2.datasets.Dataset\n Which dataset to compute this channel on\n prereqs : list of callables that take a list of numpy tensors\n Each prereq must be called exactly once per each new batch\n of data drawn *from dataset* before the channel value is\n computed if two channels provide a prereq with exactly the\n same id, that prereq will only be called once\n data_specs : (space, source) pair\n Identifies the order, format and semantics of ipt\n ' if six.PY3: numeric = (float, int) else: numeric = (float, int, long) if isinstance(val, numeric): val = np.cast[theano.config.floatX](val) val = T.as_tensor_variable(val) if (data_specs is None): warnings.warn(("parameter 'data_specs' should be provided when " + 'calling add_channel. We will build a default one.'), stacklevel=2) if isinstance(ipt, list): ipt = tuple(ipt) if ((ipt is not None) and (not isinstance(ipt, tuple))): ipt = (ipt,) if (ipt is None): data_specs = (NullSpace(), ) elif (len(ipt) == 0): data_specs = (CompositeSpace([]), ()) elif hasattr(dataset, 'get_data_specs'): (dataset_space, dataset_source) = dataset.get_data_specs() if ((len(ipt) == 1) and (dataset_source is not None) and ((not isinstance(dataset_source, tuple)) or (len(dataset_source) == 1)) and ('features' in dataset_source)): data_specs = (dataset_space, dataset_source) elif ((len(ipt) == 2) and (dataset_source == ('features', 'targets'))): data_specs = (dataset_space, dataset_source) else: raise ValueError((('Cannot infer default data_specs for ' + 'the following input points and ') + ('dataset: ipt = %s, dataset = %s' % (ipt, dataset)))) data_specs[0].validate(ipt) mapping = DataSpecsMapping(data_specs) flat_ipt = mapping.flatten(ipt) if (not isinstance(flat_ipt, tuple)): flat_ipt = (flat_ipt,) inputs = theano.gof.graph.inputs([val]) for elem in inputs: if ((not hasattr(elem, 'get_value')) and (not isinstance(elem, theano.gof.graph.Constant))): if (elem not in flat_ipt): raise ValueError((((((('Unspecified input: ' + str(elem)) + '. This may be due to an incorrect ') + "implementation of a cost's ") + 'get_data_specs() method, or of a ') + "model's get_monitoring_data_specs() ") + 'method.')) mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line((('Adding monitor channel ' + name) + '\n')) assert isinstance(flat_ipt, tuple) if (len(flat_ipt) != 1): for elem in flat_ipt: mode.record.handle_line((('Includes input var ' + var_descriptor(elem)) + '\n')) else: mode.record.handle_line((((name + ' input var is ') + var_descriptor(flat_ipt[0])) + '\n')) mode.record.handle_line((((('channel ' + name) + ' is ') + var_descriptor(val)) + '\n')) if (dataset is None): if (len(self._datasets) == 1): dataset = self._datasets[0] elif (len(self._datasets) == 0): raise ValueError(_err_no_data) else: raise ValueError(_err_ambig_data) try: self._datasets.index(dataset) except ValueError: reraise_as(ValueError(('The dataset specified is not one of the ' + "monitor's datasets"))) if (self.on_channel_conflict not in ('error', 'copy_history', 'overwrite')): raise ValueError(("on_channel_conflict should be either 'error'" + "'copy_history', or 'overwrite'")) if ((name in self.channels) and (self.on_channel_conflict == 'error')): raise ValueError(('Tried to create the same channel twice (%s)' % name)) elif ((name in self.channels) and (self.on_channel_conflict == 'copy_history')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs, self.channels[name]) elif ((name not in self.channels) or (self.on_channel_conflict == 'overwrite')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs) self._dirty = True
def add_channel(self, name, ipt, val, dataset=None, prereqs=None, data_specs=None): '\n Asks the monitor to start tracking a new value. Can be called\n even after the monitor is already in use.\n\n Parameters\n ----------\n name : str\n The display name in the monitor.\n ipt : tensor_like\n The symbolic tensor which should be clamped to the data.\n (or a list/tuple containing symbolic tensors, following the\n data_specs)\n val : tensor_like\n The value (function of `ipt`) to be tracked.\n dataset : pylearn2.datasets.Dataset\n Which dataset to compute this channel on\n prereqs : list of callables that take a list of numpy tensors\n Each prereq must be called exactly once per each new batch\n of data drawn *from dataset* before the channel value is\n computed if two channels provide a prereq with exactly the\n same id, that prereq will only be called once\n data_specs : (space, source) pair\n Identifies the order, format and semantics of ipt\n ' if six.PY3: numeric = (float, int) else: numeric = (float, int, long) if isinstance(val, numeric): val = np.cast[theano.config.floatX](val) val = T.as_tensor_variable(val) if (data_specs is None): warnings.warn(("parameter 'data_specs' should be provided when " + 'calling add_channel. We will build a default one.'), stacklevel=2) if isinstance(ipt, list): ipt = tuple(ipt) if ((ipt is not None) and (not isinstance(ipt, tuple))): ipt = (ipt,) if (ipt is None): data_specs = (NullSpace(), ) elif (len(ipt) == 0): data_specs = (CompositeSpace([]), ()) elif hasattr(dataset, 'get_data_specs'): (dataset_space, dataset_source) = dataset.get_data_specs() if ((len(ipt) == 1) and (dataset_source is not None) and ((not isinstance(dataset_source, tuple)) or (len(dataset_source) == 1)) and ('features' in dataset_source)): data_specs = (dataset_space, dataset_source) elif ((len(ipt) == 2) and (dataset_source == ('features', 'targets'))): data_specs = (dataset_space, dataset_source) else: raise ValueError((('Cannot infer default data_specs for ' + 'the following input points and ') + ('dataset: ipt = %s, dataset = %s' % (ipt, dataset)))) data_specs[0].validate(ipt) mapping = DataSpecsMapping(data_specs) flat_ipt = mapping.flatten(ipt) if (not isinstance(flat_ipt, tuple)): flat_ipt = (flat_ipt,) inputs = theano.gof.graph.inputs([val]) for elem in inputs: if ((not hasattr(elem, 'get_value')) and (not isinstance(elem, theano.gof.graph.Constant))): if (elem not in flat_ipt): raise ValueError((((((('Unspecified input: ' + str(elem)) + '. This may be due to an incorrect ') + "implementation of a cost's ") + 'get_data_specs() method, or of a ') + "model's get_monitoring_data_specs() ") + 'method.')) mode = self.theano_function_mode if ((mode is not None) and hasattr(mode, 'record')): mode.record.handle_line((('Adding monitor channel ' + name) + '\n')) assert isinstance(flat_ipt, tuple) if (len(flat_ipt) != 1): for elem in flat_ipt: mode.record.handle_line((('Includes input var ' + var_descriptor(elem)) + '\n')) else: mode.record.handle_line((((name + ' input var is ') + var_descriptor(flat_ipt[0])) + '\n')) mode.record.handle_line((((('channel ' + name) + ' is ') + var_descriptor(val)) + '\n')) if (dataset is None): if (len(self._datasets) == 1): dataset = self._datasets[0] elif (len(self._datasets) == 0): raise ValueError(_err_no_data) else: raise ValueError(_err_ambig_data) try: self._datasets.index(dataset) except ValueError: reraise_as(ValueError(('The dataset specified is not one of the ' + "monitor's datasets"))) if (self.on_channel_conflict not in ('error', 'copy_history', 'overwrite')): raise ValueError(("on_channel_conflict should be either 'error'" + "'copy_history', or 'overwrite'")) if ((name in self.channels) and (self.on_channel_conflict == 'error')): raise ValueError(('Tried to create the same channel twice (%s)' % name)) elif ((name in self.channels) and (self.on_channel_conflict == 'copy_history')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs, self.channels[name]) elif ((name not in self.channels) or (self.on_channel_conflict == 'overwrite')): self.channels[name] = MonitorChannel(ipt, val, name, data_specs, dataset, prereqs) self._dirty = True<|docstring|>Asks the monitor to start tracking a new value. Can be called even after the monitor is already in use. Parameters ---------- name : str The display name in the monitor. ipt : tensor_like The symbolic tensor which should be clamped to the data. (or a list/tuple containing symbolic tensors, following the data_specs) val : tensor_like The value (function of `ipt`) to be tracked. dataset : pylearn2.datasets.Dataset Which dataset to compute this channel on prereqs : list of callables that take a list of numpy tensors Each prereq must be called exactly once per each new batch of data drawn *from dataset* before the channel value is computed if two channels provide a prereq with exactly the same id, that prereq will only be called once data_specs : (space, source) pair Identifies the order, format and semantics of ipt<|endoftext|>
cb38977ab1b8b42b6558358c174630fbc679a77afc9c0b0feb0d47b654a29b6d
def _sanity_check(self): "\n Sometimes we serialize models and then load them somewhere else\n but still try to use their Monitor, and the Monitor is in a\n mangled state. I've added some calls to _sanity_check to try to\n catch when that happens. Not sure what to do for a long term\n fix. I think it requires making theano graphs serializable\n first.\n " for name in self.channels: channel = self.channels[name] assert hasattr(channel, 'prereqs')
Sometimes we serialize models and then load them somewhere else but still try to use their Monitor, and the Monitor is in a mangled state. I've added some calls to _sanity_check to try to catch when that happens. Not sure what to do for a long term fix. I think it requires making theano graphs serializable first.
pylearn2/monitor.py
_sanity_check
fxyu/pylearn2
2,045
python
def _sanity_check(self): "\n Sometimes we serialize models and then load them somewhere else\n but still try to use their Monitor, and the Monitor is in a\n mangled state. I've added some calls to _sanity_check to try to\n catch when that happens. Not sure what to do for a long term\n fix. I think it requires making theano graphs serializable\n first.\n " for name in self.channels: channel = self.channels[name] assert hasattr(channel, 'prereqs')
def _sanity_check(self): "\n Sometimes we serialize models and then load them somewhere else\n but still try to use their Monitor, and the Monitor is in a\n mangled state. I've added some calls to _sanity_check to try to\n catch when that happens. Not sure what to do for a long term\n fix. I think it requires making theano graphs serializable\n first.\n " for name in self.channels: channel = self.channels[name] assert hasattr(channel, 'prereqs')<|docstring|>Sometimes we serialize models and then load them somewhere else but still try to use their Monitor, and the Monitor is in a mangled state. I've added some calls to _sanity_check to try to catch when that happens. Not sure what to do for a long term fix. I think it requires making theano graphs serializable first.<|endoftext|>
366455c8da314b173b276c45242495837a068addf71d1065cc167da59844afa5
@classmethod def get_monitor(cls, model): "\n Returns a model's monitor. If the model doesn't have a monitor\n yet, installs one and returns that.\n\n Parameters\n ----------\n model : object\n An object that implements the `Model` interface specified\n in `pylearn2.models`.\n " if hasattr(model, 'monitor'): rval = model.monitor rval._sanity_check() else: rval = Monitor(model) model.monitor = rval return rval
Returns a model's monitor. If the model doesn't have a monitor yet, installs one and returns that. Parameters ---------- model : object An object that implements the `Model` interface specified in `pylearn2.models`.
pylearn2/monitor.py
get_monitor
fxyu/pylearn2
2,045
python
@classmethod def get_monitor(cls, model): "\n Returns a model's monitor. If the model doesn't have a monitor\n yet, installs one and returns that.\n\n Parameters\n ----------\n model : object\n An object that implements the `Model` interface specified\n in `pylearn2.models`.\n " if hasattr(model, 'monitor'): rval = model.monitor rval._sanity_check() else: rval = Monitor(model) model.monitor = rval return rval
@classmethod def get_monitor(cls, model): "\n Returns a model's monitor. If the model doesn't have a monitor\n yet, installs one and returns that.\n\n Parameters\n ----------\n model : object\n An object that implements the `Model` interface specified\n in `pylearn2.models`.\n " if hasattr(model, 'monitor'): rval = model.monitor rval._sanity_check() else: rval = Monitor(model) model.monitor = rval return rval<|docstring|>Returns a model's monitor. If the model doesn't have a monitor yet, installs one and returns that. Parameters ---------- model : object An object that implements the `Model` interface specified in `pylearn2.models`.<|endoftext|>
940b096da2fa58243757ee958283c0f9284ddd904a09a006bbcce2d397b040a2
@property def batch_size(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n batch_size : int\n The size of the batches used for monitoring\n ' return self._batch_size
.. todo:: WRITEME Returns ------- batch_size : int The size of the batches used for monitoring
pylearn2/monitor.py
batch_size
fxyu/pylearn2
2,045
python
@property def batch_size(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n batch_size : int\n The size of the batches used for monitoring\n ' return self._batch_size
@property def batch_size(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n batch_size : int\n The size of the batches used for monitoring\n ' return self._batch_size<|docstring|>.. todo:: WRITEME Returns ------- batch_size : int The size of the batches used for monitoring<|endoftext|>
690472128f13d949f1b900f7b8f1f1fa9dc2b1da63795658c1e59707716e7b83
@property def num_batches(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n num_batches : int\n The number of batches used for monitoring\n ' return self._num_batches
.. todo:: WRITEME Returns ------- num_batches : int The number of batches used for monitoring
pylearn2/monitor.py
num_batches
fxyu/pylearn2
2,045
python
@property def num_batches(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n num_batches : int\n The number of batches used for monitoring\n ' return self._num_batches
@property def num_batches(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n num_batches : int\n The number of batches used for monitoring\n ' return self._num_batches<|docstring|>.. todo:: WRITEME Returns ------- num_batches : int The number of batches used for monitoring<|endoftext|>
1ecadaadb497038af85c81640a79426e0bdae6942c2361e51efc912e3c91e4ca
def setup(self, dataset, cost, batch_size, num_batches=None, extra_costs=None, mode='sequential', obj_prereqs=None, cost_monitoring_args=None): "\n Sets up the monitor for a cost minimization problem.\n Adds channels defined by both the model and the cost for\n the specified dataset(s), as well as a channel called\n 'objective' defined by the costs' __call__ method.\n\n Parameters\n ----------\n dataset : pylearn2.datasets.Dataset\n Dataset or dictionary mapping string names to Datasets.\n If string names are used, then for every dataset, each\n channel defined by the model or cost will be replicated\n with that dataset's name followed by an underscore as the\n prefix. For example, if your cost defines a channel called\n 'misclass', and datasets is\n {'train' : train_dataset, 'valid' : valid_dataset},\n you will get channels called 'train_misclass' and\n 'valid_misclass'.\n cost : pylearn2.costs.Cost\n The cost being optimized by training. The value of the cost\n will appear as the `objective` channel. Its\n `get_monitoring_channels` method will also be used to\n supply other channels.\n extra_costs : OrderedDict, optional\n A dictionary mapping channel names to Cost objects.\n Their value will appear as the specified channel name.\n They will also provide more monitoring channels via their\n `get_monitoring_channels` method.\n obj_prereqs : None, or list of functions\n Functions to pass as prerequisites to the `objective` channel.\n cost_monitoring_args : dict\n Dictionary of kwargs that will be passed to\n `cost.get_monitoring_channels()`\n (but not for the extra_costs).\n " if (dataset is None): return if isinstance(dataset, Dataset): dataset = {'': dataset} else: assert isinstance(dataset, dict) assert all((isinstance(key, str) for key in dataset)) assert all((isinstance(dataset[key], Dataset) for key in dataset)) if (extra_costs is None): costs = {} else: assert isinstance(extra_costs, (OrderedDict, dict)) costs = extra_costs assert ('' not in costs) costs[''] = cost if (cost_monitoring_args is None): cost_monitoring_args = {} model = self.model cost_names = sorted(costs.keys()) spaces = [] sources = [] for c in cost_names: (c_space, c_source) = costs[c].get_data_specs(model) spaces.append(c_space) sources.append(c_source) (m_space, m_source) = model.get_monitoring_data_specs() spaces.append(m_space) sources.append(m_source) nested_space = CompositeSpace(spaces) nested_sources = tuple(sources) mapping = DataSpecsMapping((nested_space, nested_sources)) space_tuple = mapping.flatten(nested_space, return_tuple=True) source_tuple = mapping.flatten(nested_sources, return_tuple=True) ipt = tuple((space.make_theano_batch(name=('monitor_%s' % source), batch_size=None) for (space, source) in safe_zip(space_tuple, source_tuple))) nested_ipt = mapping.nest(ipt) custom_channels = {} for (i, cost_name) in enumerate(cost_names): if (cost_name == ''): prefix = '' else: prefix = (cost_name + '_') cost = costs[cost_name] cost_ipt = nested_ipt[i] raw_channels = cost.get_monitoring_channels(model, cost_ipt) channels = {} for name in raw_channels: channels[(prefix + name)] = (raw_channels[name], cost_ipt, (spaces[i], sources[i])) custom_channels.update(channels) model_channels = model.get_monitoring_channels(nested_ipt[(- 1)]) channels = {} for name in model_channels: channels[name] = (model_channels[name], nested_ipt[(- 1)], (spaces[(- 1)], sources[(- 1)])) custom_channels.update(channels) if is_stochastic(mode): seed = [[2013, 2, 22]] else: seed = None for dataset_name in dataset: cur_dataset = dataset[dataset_name] self.add_dataset(dataset=cur_dataset, mode=mode, batch_size=batch_size, num_batches=num_batches, seed=seed) if (dataset_name == ''): dprefix = '' else: dprefix = (dataset_name + '_') for (i, cost_name) in enumerate(cost_names): cost = costs[cost_name] cost_ipt = nested_ipt[i] cost_value = cost.expr(model, cost_ipt) if (cost_value is not None): if (cost_name == ''): name = (dprefix + 'objective') prereqs = obj_prereqs else: name = (dprefix + cost_name) prereqs = None cost.get_data_specs(model)[0].validate(cost_ipt) self.add_channel(name=name, ipt=cost_ipt, val=cost_value, data_specs=cost.get_data_specs(model), dataset=cur_dataset, prereqs=prereqs) for key in custom_channels: (val, ipt, data_specs) = custom_channels[key] data_specs[0].validate(ipt) self.add_channel(name=(dprefix + key), ipt=ipt, val=val, data_specs=data_specs, dataset=cur_dataset)
Sets up the monitor for a cost minimization problem. Adds channels defined by both the model and the cost for the specified dataset(s), as well as a channel called 'objective' defined by the costs' __call__ method. Parameters ---------- dataset : pylearn2.datasets.Dataset Dataset or dictionary mapping string names to Datasets. If string names are used, then for every dataset, each channel defined by the model or cost will be replicated with that dataset's name followed by an underscore as the prefix. For example, if your cost defines a channel called 'misclass', and datasets is {'train' : train_dataset, 'valid' : valid_dataset}, you will get channels called 'train_misclass' and 'valid_misclass'. cost : pylearn2.costs.Cost The cost being optimized by training. The value of the cost will appear as the `objective` channel. Its `get_monitoring_channels` method will also be used to supply other channels. extra_costs : OrderedDict, optional A dictionary mapping channel names to Cost objects. Their value will appear as the specified channel name. They will also provide more monitoring channels via their `get_monitoring_channels` method. obj_prereqs : None, or list of functions Functions to pass as prerequisites to the `objective` channel. cost_monitoring_args : dict Dictionary of kwargs that will be passed to `cost.get_monitoring_channels()` (but not for the extra_costs).
pylearn2/monitor.py
setup
fxyu/pylearn2
2,045
python
def setup(self, dataset, cost, batch_size, num_batches=None, extra_costs=None, mode='sequential', obj_prereqs=None, cost_monitoring_args=None): "\n Sets up the monitor for a cost minimization problem.\n Adds channels defined by both the model and the cost for\n the specified dataset(s), as well as a channel called\n 'objective' defined by the costs' __call__ method.\n\n Parameters\n ----------\n dataset : pylearn2.datasets.Dataset\n Dataset or dictionary mapping string names to Datasets.\n If string names are used, then for every dataset, each\n channel defined by the model or cost will be replicated\n with that dataset's name followed by an underscore as the\n prefix. For example, if your cost defines a channel called\n 'misclass', and datasets is\n {'train' : train_dataset, 'valid' : valid_dataset},\n you will get channels called 'train_misclass' and\n 'valid_misclass'.\n cost : pylearn2.costs.Cost\n The cost being optimized by training. The value of the cost\n will appear as the `objective` channel. Its\n `get_monitoring_channels` method will also be used to\n supply other channels.\n extra_costs : OrderedDict, optional\n A dictionary mapping channel names to Cost objects.\n Their value will appear as the specified channel name.\n They will also provide more monitoring channels via their\n `get_monitoring_channels` method.\n obj_prereqs : None, or list of functions\n Functions to pass as prerequisites to the `objective` channel.\n cost_monitoring_args : dict\n Dictionary of kwargs that will be passed to\n `cost.get_monitoring_channels()`\n (but not for the extra_costs).\n " if (dataset is None): return if isinstance(dataset, Dataset): dataset = {: dataset} else: assert isinstance(dataset, dict) assert all((isinstance(key, str) for key in dataset)) assert all((isinstance(dataset[key], Dataset) for key in dataset)) if (extra_costs is None): costs = {} else: assert isinstance(extra_costs, (OrderedDict, dict)) costs = extra_costs assert ( not in costs) costs[] = cost if (cost_monitoring_args is None): cost_monitoring_args = {} model = self.model cost_names = sorted(costs.keys()) spaces = [] sources = [] for c in cost_names: (c_space, c_source) = costs[c].get_data_specs(model) spaces.append(c_space) sources.append(c_source) (m_space, m_source) = model.get_monitoring_data_specs() spaces.append(m_space) sources.append(m_source) nested_space = CompositeSpace(spaces) nested_sources = tuple(sources) mapping = DataSpecsMapping((nested_space, nested_sources)) space_tuple = mapping.flatten(nested_space, return_tuple=True) source_tuple = mapping.flatten(nested_sources, return_tuple=True) ipt = tuple((space.make_theano_batch(name=('monitor_%s' % source), batch_size=None) for (space, source) in safe_zip(space_tuple, source_tuple))) nested_ipt = mapping.nest(ipt) custom_channels = {} for (i, cost_name) in enumerate(cost_names): if (cost_name == ): prefix = else: prefix = (cost_name + '_') cost = costs[cost_name] cost_ipt = nested_ipt[i] raw_channels = cost.get_monitoring_channels(model, cost_ipt) channels = {} for name in raw_channels: channels[(prefix + name)] = (raw_channels[name], cost_ipt, (spaces[i], sources[i])) custom_channels.update(channels) model_channels = model.get_monitoring_channels(nested_ipt[(- 1)]) channels = {} for name in model_channels: channels[name] = (model_channels[name], nested_ipt[(- 1)], (spaces[(- 1)], sources[(- 1)])) custom_channels.update(channels) if is_stochastic(mode): seed = [[2013, 2, 22]] else: seed = None for dataset_name in dataset: cur_dataset = dataset[dataset_name] self.add_dataset(dataset=cur_dataset, mode=mode, batch_size=batch_size, num_batches=num_batches, seed=seed) if (dataset_name == ): dprefix = else: dprefix = (dataset_name + '_') for (i, cost_name) in enumerate(cost_names): cost = costs[cost_name] cost_ipt = nested_ipt[i] cost_value = cost.expr(model, cost_ipt) if (cost_value is not None): if (cost_name == ): name = (dprefix + 'objective') prereqs = obj_prereqs else: name = (dprefix + cost_name) prereqs = None cost.get_data_specs(model)[0].validate(cost_ipt) self.add_channel(name=name, ipt=cost_ipt, val=cost_value, data_specs=cost.get_data_specs(model), dataset=cur_dataset, prereqs=prereqs) for key in custom_channels: (val, ipt, data_specs) = custom_channels[key] data_specs[0].validate(ipt) self.add_channel(name=(dprefix + key), ipt=ipt, val=val, data_specs=data_specs, dataset=cur_dataset)
def setup(self, dataset, cost, batch_size, num_batches=None, extra_costs=None, mode='sequential', obj_prereqs=None, cost_monitoring_args=None): "\n Sets up the monitor for a cost minimization problem.\n Adds channels defined by both the model and the cost for\n the specified dataset(s), as well as a channel called\n 'objective' defined by the costs' __call__ method.\n\n Parameters\n ----------\n dataset : pylearn2.datasets.Dataset\n Dataset or dictionary mapping string names to Datasets.\n If string names are used, then for every dataset, each\n channel defined by the model or cost will be replicated\n with that dataset's name followed by an underscore as the\n prefix. For example, if your cost defines a channel called\n 'misclass', and datasets is\n {'train' : train_dataset, 'valid' : valid_dataset},\n you will get channels called 'train_misclass' and\n 'valid_misclass'.\n cost : pylearn2.costs.Cost\n The cost being optimized by training. The value of the cost\n will appear as the `objective` channel. Its\n `get_monitoring_channels` method will also be used to\n supply other channels.\n extra_costs : OrderedDict, optional\n A dictionary mapping channel names to Cost objects.\n Their value will appear as the specified channel name.\n They will also provide more monitoring channels via their\n `get_monitoring_channels` method.\n obj_prereqs : None, or list of functions\n Functions to pass as prerequisites to the `objective` channel.\n cost_monitoring_args : dict\n Dictionary of kwargs that will be passed to\n `cost.get_monitoring_channels()`\n (but not for the extra_costs).\n " if (dataset is None): return if isinstance(dataset, Dataset): dataset = {: dataset} else: assert isinstance(dataset, dict) assert all((isinstance(key, str) for key in dataset)) assert all((isinstance(dataset[key], Dataset) for key in dataset)) if (extra_costs is None): costs = {} else: assert isinstance(extra_costs, (OrderedDict, dict)) costs = extra_costs assert ( not in costs) costs[] = cost if (cost_monitoring_args is None): cost_monitoring_args = {} model = self.model cost_names = sorted(costs.keys()) spaces = [] sources = [] for c in cost_names: (c_space, c_source) = costs[c].get_data_specs(model) spaces.append(c_space) sources.append(c_source) (m_space, m_source) = model.get_monitoring_data_specs() spaces.append(m_space) sources.append(m_source) nested_space = CompositeSpace(spaces) nested_sources = tuple(sources) mapping = DataSpecsMapping((nested_space, nested_sources)) space_tuple = mapping.flatten(nested_space, return_tuple=True) source_tuple = mapping.flatten(nested_sources, return_tuple=True) ipt = tuple((space.make_theano_batch(name=('monitor_%s' % source), batch_size=None) for (space, source) in safe_zip(space_tuple, source_tuple))) nested_ipt = mapping.nest(ipt) custom_channels = {} for (i, cost_name) in enumerate(cost_names): if (cost_name == ): prefix = else: prefix = (cost_name + '_') cost = costs[cost_name] cost_ipt = nested_ipt[i] raw_channels = cost.get_monitoring_channels(model, cost_ipt) channels = {} for name in raw_channels: channels[(prefix + name)] = (raw_channels[name], cost_ipt, (spaces[i], sources[i])) custom_channels.update(channels) model_channels = model.get_monitoring_channels(nested_ipt[(- 1)]) channels = {} for name in model_channels: channels[name] = (model_channels[name], nested_ipt[(- 1)], (spaces[(- 1)], sources[(- 1)])) custom_channels.update(channels) if is_stochastic(mode): seed = [[2013, 2, 22]] else: seed = None for dataset_name in dataset: cur_dataset = dataset[dataset_name] self.add_dataset(dataset=cur_dataset, mode=mode, batch_size=batch_size, num_batches=num_batches, seed=seed) if (dataset_name == ): dprefix = else: dprefix = (dataset_name + '_') for (i, cost_name) in enumerate(cost_names): cost = costs[cost_name] cost_ipt = nested_ipt[i] cost_value = cost.expr(model, cost_ipt) if (cost_value is not None): if (cost_name == ): name = (dprefix + 'objective') prereqs = obj_prereqs else: name = (dprefix + cost_name) prereqs = None cost.get_data_specs(model)[0].validate(cost_ipt) self.add_channel(name=name, ipt=cost_ipt, val=cost_value, data_specs=cost.get_data_specs(model), dataset=cur_dataset, prereqs=prereqs) for key in custom_channels: (val, ipt, data_specs) = custom_channels[key] data_specs[0].validate(ipt) self.add_channel(name=(dprefix + key), ipt=ipt, val=val, data_specs=data_specs, dataset=cur_dataset)<|docstring|>Sets up the monitor for a cost minimization problem. Adds channels defined by both the model and the cost for the specified dataset(s), as well as a channel called 'objective' defined by the costs' __call__ method. Parameters ---------- dataset : pylearn2.datasets.Dataset Dataset or dictionary mapping string names to Datasets. If string names are used, then for every dataset, each channel defined by the model or cost will be replicated with that dataset's name followed by an underscore as the prefix. For example, if your cost defines a channel called 'misclass', and datasets is {'train' : train_dataset, 'valid' : valid_dataset}, you will get channels called 'train_misclass' and 'valid_misclass'. cost : pylearn2.costs.Cost The cost being optimized by training. The value of the cost will appear as the `objective` channel. Its `get_monitoring_channels` method will also be used to supply other channels. extra_costs : OrderedDict, optional A dictionary mapping channel names to Cost objects. Their value will appear as the specified channel name. They will also provide more monitoring channels via their `get_monitoring_channels` method. obj_prereqs : None, or list of functions Functions to pass as prerequisites to the `objective` channel. cost_monitoring_args : dict Dictionary of kwargs that will be passed to `cost.get_monitoring_channels()` (but not for the extra_costs).<|endoftext|>
f85464a65c5c915473fbf9327ef1016bd32799829c60f8243d2ece0405bf8811
def __str__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n s : str\n A reasonably human-readable string representation of the object.\n ' try: graph_input_str = str(self.graph_input) except Exception: graph_input_str = '<bad graph input>' try: val_str = str(self.val) except Exception: val_str = '<bad val>' try: name_str = str(self.name) except Exception: name_str = '<bad name>' try: prereqs_str = str(self.prereqs) except Exception: prereqs_str = '<bad prereqs>' return ('MonitorChannel(%s,%s,%s,%s)' % (graph_input_str, val_str, name_str, prereqs_str))
.. todo:: WRITEME Returns ------- s : str A reasonably human-readable string representation of the object.
pylearn2/monitor.py
__str__
fxyu/pylearn2
2,045
python
def __str__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n s : str\n A reasonably human-readable string representation of the object.\n ' try: graph_input_str = str(self.graph_input) except Exception: graph_input_str = '<bad graph input>' try: val_str = str(self.val) except Exception: val_str = '<bad val>' try: name_str = str(self.name) except Exception: name_str = '<bad name>' try: prereqs_str = str(self.prereqs) except Exception: prereqs_str = '<bad prereqs>' return ('MonitorChannel(%s,%s,%s,%s)' % (graph_input_str, val_str, name_str, prereqs_str))
def __str__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n s : str\n A reasonably human-readable string representation of the object.\n ' try: graph_input_str = str(self.graph_input) except Exception: graph_input_str = '<bad graph input>' try: val_str = str(self.val) except Exception: val_str = '<bad val>' try: name_str = str(self.name) except Exception: name_str = '<bad name>' try: prereqs_str = str(self.prereqs) except Exception: prereqs_str = '<bad prereqs>' return ('MonitorChannel(%s,%s,%s,%s)' % (graph_input_str, val_str, name_str, prereqs_str))<|docstring|>.. todo:: WRITEME Returns ------- s : str A reasonably human-readable string representation of the object.<|endoftext|>
81214f28ebd419608ee4b250b304d277448185a487f14cf51aa1f31dd92408a7
def __getstate__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n d : dict\n A dictionary mapping the string names of the fields of the class\n to values appropriate for pickling.\n ' if hasattr(self, 'val'): doc = get_monitor_doc(self.val) elif hasattr(self, 'doc'): doc = self.doc else: doc = None return {'doc': doc, 'example_record': self.example_record, 'batch_record': self.batch_record, 'time_record': self.time_record, 'epoch_record': self.epoch_record, 'val_record': self.val_record}
.. todo:: WRITEME Returns ------- d : dict A dictionary mapping the string names of the fields of the class to values appropriate for pickling.
pylearn2/monitor.py
__getstate__
fxyu/pylearn2
2,045
python
def __getstate__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n d : dict\n A dictionary mapping the string names of the fields of the class\n to values appropriate for pickling.\n ' if hasattr(self, 'val'): doc = get_monitor_doc(self.val) elif hasattr(self, 'doc'): doc = self.doc else: doc = None return {'doc': doc, 'example_record': self.example_record, 'batch_record': self.batch_record, 'time_record': self.time_record, 'epoch_record': self.epoch_record, 'val_record': self.val_record}
def __getstate__(self): '\n .. todo::\n\n WRITEME\n\n Returns\n -------\n d : dict\n A dictionary mapping the string names of the fields of the class\n to values appropriate for pickling.\n ' if hasattr(self, 'val'): doc = get_monitor_doc(self.val) elif hasattr(self, 'doc'): doc = self.doc else: doc = None return {'doc': doc, 'example_record': self.example_record, 'batch_record': self.batch_record, 'time_record': self.time_record, 'epoch_record': self.epoch_record, 'val_record': self.val_record}<|docstring|>.. todo:: WRITEME Returns ------- d : dict A dictionary mapping the string names of the fields of the class to values appropriate for pickling.<|endoftext|>