text
stringlengths 28
881k
|
---|
QUERY = """NEWLINEquery searchByQuery($query:SearchQueryJson!$testListings:Boolean!$smartHide:Boolean$recentHides:[ListingId!])@debug(testListings:$testListings){rentSearch(query:$query smartHide:$smartHide recentHides:$recentHides){...RentResultsMetaData resolvedQuery{...SearchMetadata ...ResultsHeading ...SeoFooterLinks ...SearchResultsBreadcrumb __typename}marketInsights{...ResultsMarketInsightsData __typename}exclusiveShowcase{...RentExclusiveShowcaseData __typename}results{...ResultsSummary ...ResultsPagination ...RentResultsSet ...SearchResultsTotalCount exact{totalCount items{listing{...on RentResidentialListing{id productDepth __typename}...PropertyCard ...RentDetailsAboveTheFold __typename}__typename}__typename}surrounding{items{listing{...on RentResidentialListing{id productDepth __typename}...PropertyCard ...RentDetailsAboveTheFold __typename}__typename}__typename}trackingData totalResultsCount __typename}consumerContext{loggedInStatus __typename}__typename}}fragment RentResultsMetaData on RentResolvedSearch{resolvedQuery{localities{display __typename}__typename}results{__typename totalResultsCount pagination{moreResultsAvailable __typename}exact{items{listing{__typename ...on RentResidentialListing{inspections{startTime endTime __typename}_links{canonical{href __typename}__typename}__typename}...ResidentialListingAddressMetaData}__typename}__typename}}__typename}fragment ResidentialListingAddressMetaData on ResidentialListing{address{display{shortAddress fullAddress __typename}suburb state postcode __typename}__typename}fragment SearchMetadata on ResolvedQuery{metadata{canonicalSearchId savedSearchQuery __typename}__typename}fragment ResultsHeading on ResolvedQuery{localities{display __typename}__typename}fragment SeoFooterLinks on ResolvedQuery{localities{display atlasId urlValue precision name __typename}__typename}fragment SearchResultsBreadcrumb on ResolvedQuery{localities{atlasId display name urlValue precision state parents{display name urlValue precision __typename}__typename}__typename}fragment ResultsMarketInsightsData on MarketInsights{title suburbProfileUrl{href __typename}__typename}fragment RentExclusiveShowcaseData on ExclusiveShowcase{...CommonExclusiveShowcaseData listings{...on RentResidentialListing{inspections{display{shortLabel __typename}__typename}__typename}__typename}__typename}fragment CommonExclusiveShowcaseData on ExclusiveShowcase{listings{title id listingCompany{id name media{logo{templatedUrl __typename}__typename}branding{primaryColour textColour __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}__typename}address{suburb display{shortAddress __typename}__typename}listers{name photo{templatedUrl __typename}__typename}_links{trackedCanonical{path __typename}__typename}...PrimaryFeatures __typename}__typename}fragment PrimaryFeatures on ResidentialListing{...GeneralFeatures ...PropertySize __typename}fragment GeneralFeatures on ResidentialListing{generalFeatures{bedrooms{value __typename}bathrooms{value __typename}parkingSpaces{value __typename}__typename}__typename}fragment PropertySize on ResidentialListing{propertySizes{building{displayValue sizeUnit{displayValue __typename}__typename}land{displayValue sizeUnit{displayValue __typename}__typename}preferred{sizeType size{displayValue sizeUnit{displayValue __typename}__typename}__typename}__typename}__typename}fragment ResultsSummary on SearchResults{totalResultsCount pagination{page pageSize __typename}__typename}fragment ResultsPagination on SearchResults{pagination{maxPageNumberAvailable __typename}__typename}fragment RentResultsSet on RentSearchResults{exact{items{listing{__typename}__typename}__typename}surrounding{totalCount items{listing{__typename}__typename}__typename}pagination{page __typename}__typename}fragment SearchResultsTotalCount on SearchResults{totalResultsCount __typename}fragment PropertyCard on Listing{__typename ...ResidentialPropertyCard ...ProjectProfile}fragment ResidentialPropertyCard on ResidentialListing{...PropertyCardLayout ...BrandingOnSearchResultsConfig ...BrandingResidential badge{...Badge __typename}...ResidentialListingCardHero ...Price ...ResidentialListingCardAddress ...PropertyCardPropertyType ...PropertyCardDetailsLink ...PropertyCardAgentInfo ...ResidentialLaunchButtons ...ResidentialMediaViewerForResults ...ResidentialListingBookmark ...PrimaryFeatures ...PropertySize ...ResidentialListingCardInspection ...InspectionAuction ...DateSold ...ResidentialListingMoreButton ...ResidentialShareListing __typename}fragment PropertyCardLayout on ResidentialListing{productDepth __typename}fragment BrandingOnSearchResultsConfig on ResidentialListing{viewConfiguration{searchResults{agencyBranding __typename}__typename}productDepth __typename}fragment BrandingResidential on ResidentialListing{listingCompany{...Branding __typename}__typename}fragment Branding on ListingCompany{id name branding{primaryColour __typename}media{logo{templatedUrl __typename}__typename}__typename}fragment Badge on ListingBadge{colour label __typename}fragment ResidentialListingCardHero on ResidentialListing{...PowerProfileSlide productDepth address{display{fullAddress __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}floorplans{templatedUrl __typename}__typename}__typename}fragment PowerProfileSlide on ResidentialListing{media{mainImage{templatedUrl __typename}__typename}_links{canonical{path __typename}__typename}listingCompany{name media{logo{templatedUrl __typename}__typename}branding{primaryColour __typename}_links{canonical{href __typename}__typename}__typename}listers{id agentId name jobTitle photo{templatedUrl __typename}_links{canonical{href __typename}__typename}showInMediaViewer __typename}__typename}fragment Price on ResidentialListing{price{display __typename}__typename}fragment ResidentialListingCardAddress on ResidentialListing{address{suburb display{shortAddress __typename}__typename}__typename}fragment PropertyCardPropertyType on ResidentialListing{propertyType{display __typename}__typename}fragment PropertyCardDetailsLink on ResidentialListing{_links{canonical{path __typename}__typename}__typename}fragment PropertyCardAgentInfo on ResidentialListing{viewConfiguration{searchResults{agentPhoto agentName __typename}__typename}listers{name photo{templatedUrl __typename}__typename}listingCompany{branding{textColour __typename}__typename}__typename}fragment ResidentialLaunchButtons on ResidentialListing{media{threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ResidentialMediaViewerForResults on ResidentialListing{...ResultsAdConfiguration ...ResidentialSlides __typename}fragment ResultsAdConfiguration on ResidentialListing{viewConfiguration{searchResults{adverts{photoGallery __typename}__typename}__typename}__typename}fragment ResidentialSlides on ResidentialListing{...PowerProfileSlide ...MediaViewerEventTracking ...ThreeDimensionalTourSlide ...VideoSlide ...PhotoOverlayWithGallerySlide media{images{templatedUrl __typename}floorplans{templatedUrl __typename}__typename}__typename}fragment MediaViewerEventTracking on ResidentialListing{listers{id agentId __typename}__typename}fragment ThreeDimensionalTourSlide on ResidentialListing{media{threeDimensionalTours{href __typename}__typename}__typename}fragment VideoSlide on ResidentialListing{media{videos{...on YouTubeVideo{__typename id}__typename}__typename}__typename}fragment PhotoOverlayWithGallerySlide on ResidentialListing{...BuilderProfile ...ParentAndSiblings __typename}fragment BuilderProfile on ResidentialListing{media{mainImage{templatedUrl __typename}__typename}listingCompany{...on Builder{name _links{canonical{templated href __typename}__typename}homeDesigns{totalCount designs{name price houseSizeRange{min{displayValue value __typename}max{displayValue value __typename}__typename}generalFeaturesDisplay{bedrooms bathrooms parkingSpaces __typename}_links{canonical{href templated __typename}__typename}media{mainImage{templatedUrl __typename}__typename}__typename}__typename}__typename}__typename}__typename}fragment ParentAndSiblings on BuyResidentialListing{id media{mainImage{templatedUrl __typename}__typename}parent{name _links{canonical{path __typename}__typename}childListings{totalCount results{id media{mainImage{templatedUrl __typename}__typename}title price{display __typename}propertyType{display __typename}_links{canonical{path __typename}__typename}propertySizes{land{displayValue sizeUnit{id displayValue __typename}__typename}__typename}...PrimaryFeatures __typename}__typename}__typename}__typename}fragment ResidentialListingBookmark on ResidentialListing{id __typename}fragment ResidentialListingCardInspection on ResidentialListing{...on BuyResidentialListing{inspections{display{shortLabel longLabel __typename}__typename}__typename}...on RentResidentialListing{inspections{display{shortLabel longLabel __typename}__typename}__typename}__typename}fragment InspectionAuction on ResidentialListing{...PropertyCardAuctionDate ...ResidentialListingCardInspection __typename}fragment PropertyCardAuctionDate on BuyResidentialListing{auction{dateTime{display{shortDate __typename}__typename}__typename}__typename}fragment DateSold on ResidentialListing{...on SoldResidentialListing{dateSold{display __typename}__typename}__typename}fragment ResidentialListingMoreButton on ResidentialListing{id __typename}fragment ResidentialShareListing on ResidentialListing{_links{canonical{href __typename}__typename}address{display{fullAddress __typename}__typename}__typename}fragment ProjectProfile on ProjectProfile{badge{...Badge __typename}...ProjectProfileCardParentListing ...ProjectProfileCardAddress ...ProjectProfileCardHero ...ProjectProfileAgency ...ProjectProfileBranding ...ProjectProfileBookmark ...PropertyCardChildListings ...ProjectLaunchButtons ...ProjectProfileNextOpenTime __typename}fragment ProjectProfileCardParentListing on ProjectProfile{name title productDepth _links{canonical{path __typename}__typename}__typename}fragment ProjectProfileCardAddress on ProjectProfile{address{suburb display{shortAddress __typename}__typename}__typename}fragment ProjectProfileCardHero on ProjectProfile{productDepth address{display{fullAddress __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}__typename}__typename}fragment ProjectProfileAgency on ProjectProfile{listingCompany{id name media{logo{templatedUrl __typename}__typename}__typename}viewConfiguration{searchResults{agencyBranding __typename}__typename}__typename}fragment ProjectProfileBranding on ProjectProfile{name productDepth media{logo{templatedUrl __typename}__typename}branding{primaryColour __typename}__typename}fragment ProjectProfileBookmark on ProjectProfile{id __typename}fragment PropertyCardChildListings on ProjectProfile{productDepth _links{canonical{path __typename}__typename}childListings{totalCount results{id price{display __typename}media{mainImage{templatedUrl __typename}__typename}address{display{fullAddress __typename}__typename}title _links{canonical{path __typename}__typename}...PrimaryFeatures __typename}__typename}__typename}fragment ProjectLaunchButtons on ProjectProfile{media{videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ProjectProfileNextOpenTime on ProjectProfile{displayLocation{nextAvailableOpeningHours{nextAvailable{display{shortLabel longLabel __typename}__typename}__typename}__typename}__typename}fragment RentDetailsAboveTheFold on RentResidentialListing{aboveTheFoldId:id id badge{...Badge __typename}...Hero ...Price ...Address ...ResidentialShareListing ...Breadcrumb_ResidentialListing ...PrimaryFeatures ...PropertyCardPropertyType ...PropertyInfoPosterBoard ...InspectionsSummaryForRent ...Bond ...DateAvailableSummary ...BrandingOnContactAgentPanelConfig ...ResidentialContactAgentBranding ...AgentInfo ...AgencyInfo ...HeaderLeaderboard ...ListingCompanyHeaderBranding ...RentResidentialListingMetaData __typename}fragment Hero on ResidentialListing{...HeroImage ...ResidentialMediaTypeBar __typename}fragment HeroImage on ResidentialListing{address{display{fullAddress __typename}__typename}viewConfiguration{details{posterBoard __typename}__typename}media{mainImage{templatedUrl __typename}images{templatedUrl __typename}floorplans{templatedUrl __typename}threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment ResidentialMediaTypeBar on ResidentialListing{media{images{templatedUrl __typename}floorplans{templatedUrl __typename}threeDimensionalTours{href __typename}videos{...on YouTubeVideo{id __typename}...on ExternalVideo{href __typename}__typename}__typename}__typename}fragment Address on ResidentialListing{address{suburb postcode state display{shortAddress __typename}__typename}__typename}fragment Breadcrumb_ResidentialListing on ResidentialListing{__typename id address{suburb state postcode display{shortAddress __typename}__typename}propertyType{id display __typename}_links{canonical{path __typename}__typename}}fragment PropertyInfoPosterBoard on ResidentialListing{viewConfiguration{details{posterBoard __typename}__typename}__typename}fragment InspectionsSummaryForRent on RentResidentialListing{inspections{display{longLabel __typename}__typename}__typename}fragment Bond on RentResidentialListing{bond{display __typename}__typename}fragment DateAvailableSummary on RentResidentialListing{availableDate{display __typename}__typename}fragment BrandingOnContactAgentPanelConfig on ResidentialListing{viewConfiguration{details{agencyBrandingOnSidePanel __typename}__typename}__typename}fragment ResidentialContactAgentBranding on ResidentialListing{productDepth listingCompany{name branding{primaryColour __typename}media{logo{templatedUrl __typename}__typename}_links{canonical{href __typename}__typename}__typename}__typename}fragment AgentInfo on ResidentialListing{listers{name photo{templatedUrl __typename}preferredPhoneNumber _links{canonical{href __typename}__typename}__typename}listingCompany{id businessPhone __typename}__typename}fragment AgencyInfo on ResidentialListing{viewConfiguration{details{agencyInfo __typename}__typename}listingCompany{...on Agency{name __typename address{display{fullAddress __typename}__typename}_links{canonical{href __typename}__typename}}__typename}__typename}fragment HeaderLeaderboard on ResidentialListing{viewConfiguration{details{adverts{headerLeaderboard __typename}__typename}__typename}__typename}fragment ListingCompanyHeaderBranding on ResidentialListing{viewConfiguration{details{branding{header{size __typename}__typename}__typename}__typename}listingCompany{name branding{primaryColour __typename}_links{canonical{href __typename}__typename}media{logo{templatedUrl __typename}__typename}__typename}__typename}fragment RentResidentialListingMetaData on RentResidentialListing{...ResidentialListingMetaData inspections{startTime endTime __typename}__typename}fragment ResidentialListingMetaData on ResidentialListing{__typename id description media{mainImage{templatedUrl __typename}images{__typename}__typename}_links{canonical{href path __typename}__typename}propertyType{id display __typename}address{display{shortAddress fullAddress __typename}suburb state postcode __typename}price{display __typename}generalFeatures{bedrooms{value __typename}__typename}propertySizes{land{displayValue sizeUnit{displayValue __typename}__typename}__typename}}NEWLINE"""NEWLINE |
from __future__ import absolute_importNEWLINENEWLINE__all__ = ["from_user", "from_member", "DEFAULT"]NEWLINENEWLINEimport warningsNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.utils.functional import cached_propertyNEWLINENEWLINEfrom sentry import rolesNEWLINEfrom sentry.auth.superuser import is_active_superuserNEWLINEfrom sentry.auth.system import is_system_authNEWLINEfrom sentry.models import (NEWLINE AuthIdentity,NEWLINE AuthProvider,NEWLINE OrganizationMember,NEWLINE Project,NEWLINE ProjectStatus,NEWLINE SentryApp,NEWLINE UserPermission,NEWLINE Team,NEWLINE)NEWLINENEWLINENEWLINEdef _sso_params(member):NEWLINE """NEWLINE Return a tuple of (requires_sso, sso_is_valid) for a given member.NEWLINE """NEWLINE # TODO(dcramer): we want to optimize this access pattern as its severalNEWLINE # network hops and needed in a lot of placesNEWLINE try:NEWLINE auth_provider = AuthProvider.objects.get(organization=member.organization_id)NEWLINE except AuthProvider.DoesNotExist:NEWLINE sso_is_valid = TrueNEWLINE requires_sso = FalseNEWLINE else:NEWLINE if auth_provider.flags.allow_unlinked:NEWLINE requires_sso = FalseNEWLINE sso_is_valid = TrueNEWLINE else:NEWLINE requires_sso = TrueNEWLINE try:NEWLINE auth_identity = AuthIdentity.objects.get(NEWLINE auth_provider=auth_provider, user=member.user_idNEWLINE )NEWLINE except AuthIdentity.DoesNotExist:NEWLINE sso_is_valid = FalseNEWLINE # If an owner is trying to gain access,NEWLINE # allow bypassing SSO if there are no otherNEWLINE # owners with SSO enabled.NEWLINE if member.role == roles.get_top_dog().id:NEWLINE requires_sso = AuthIdentity.objects.filter(NEWLINE auth_provider=auth_provider,NEWLINE user__in=OrganizationMember.objects.filter(NEWLINE organization=member.organization_id,NEWLINE role=roles.get_top_dog().id,NEWLINE user__is_active=True,NEWLINE )NEWLINE .exclude(id=member.id)NEWLINE .values_list("user_id"),NEWLINE ).exists()NEWLINE else:NEWLINE sso_is_valid = auth_identity.is_valid(member)NEWLINE return requires_sso, sso_is_validNEWLINENEWLINENEWLINEclass BaseAccess(object):NEWLINE is_active = FalseNEWLINE sso_is_valid = FalseNEWLINE requires_sso = FalseNEWLINE organization_id = NoneNEWLINE # teams with membershipNEWLINE teams = ()NEWLINE # projects with membershipNEWLINE projects = ()NEWLINE # if has_global_access is True, then any projectNEWLINE # matching organization_id is valid. This is used forNEWLINE # both `organization.allow_joinleave` and to indicateNEWLINE # that the role is global / a user is an active superuserNEWLINE has_global_access = FalseNEWLINE scopes = frozenset()NEWLINE permissions = frozenset()NEWLINE role = NoneNEWLINENEWLINE def has_permission(self, permission):NEWLINE """NEWLINE Return bool representing if the user has the given permission.NEWLINENEWLINE >>> access.has_permission('broadcasts.admin')NEWLINE """NEWLINE if not self.is_active:NEWLINE return FalseNEWLINE return permission in self.permissionsNEWLINENEWLINE def has_scope(self, scope):NEWLINE """NEWLINE Return bool representing if the user has the given scope.NEWLINENEWLINE >>> access.has_project('org:read')NEWLINE """NEWLINE if not self.is_active:NEWLINE return FalseNEWLINE return scope in self.scopesNEWLINENEWLINE def has_team(self, team):NEWLINE warnings.warn("has_team() is deprecated in favor of has_team_access", DeprecationWarning)NEWLINE return self.has_team_access(team)NEWLINENEWLINE def has_team_access(self, team):NEWLINE """NEWLINE Return bool representing if a user should have access to information for the given team.NEWLINENEWLINE >>> access.has_team_access(team)NEWLINE """NEWLINE if not self.is_active:NEWLINE return FalseNEWLINE if self.has_global_access and self.organization_id == team.organization_id:NEWLINE return TrueNEWLINE return team in self.teamsNEWLINENEWLINE def has_team_scope(self, team, scope):NEWLINE """NEWLINE Return bool representing if a user should have access with the given scope to informationNEWLINE for the given team.NEWLINENEWLINE >>> access.has_team_scope(team, 'team:read')NEWLINE """NEWLINE return self.has_team_access(team) and self.has_scope(scope)NEWLINENEWLINE def has_project_access(self, project):NEWLINE """NEWLINE Return bool representing if a user should have access to information for the given project.NEWLINENEWLINE >>> access.has_project_access(project)NEWLINE """NEWLINE if not self.is_active:NEWLINE return FalseNEWLINE if self.has_global_access and self.organization_id == project.organization_id:NEWLINE return TrueNEWLINE return project in self.projectsNEWLINENEWLINE def has_projects_access(self, projects):NEWLINE """NEWLINE Returns bool representing if a user should have access to every requested projectNEWLINE """NEWLINE return all([self.has_project_access(project) for project in projects])NEWLINENEWLINE def has_project_membership(self, project):NEWLINE """NEWLINE Return bool representing if a user has explicit membership for the given project.NEWLINENEWLINE >>> access.has_project_membership(project)NEWLINE """NEWLINE if not self.is_active:NEWLINE return FalseNEWLINE return project in self.projectsNEWLINENEWLINE def has_project_scope(self, project, scope):NEWLINE """NEWLINE Return bool representing if a user should have access with the given scope to informationNEWLINE for the given project.NEWLINENEWLINE >>> access.has_project_scope(project, 'project:read')NEWLINE """NEWLINE return self.has_project_access(project) and self.has_scope(scope)NEWLINENEWLINE def to_django_context(self):NEWLINE return {s.replace(":", "_"): self.has_scope(s) for s in settings.SENTRY_SCOPES}NEWLINENEWLINENEWLINEclass Access(BaseAccess):NEWLINE # TODO(dcramer): this is still a little gross, and ideally backend accessNEWLINE # would be based on the same scopes as API access so theres clarity inNEWLINE # what things meanNEWLINE def __init__(NEWLINE self,NEWLINE scopes,NEWLINE is_active,NEWLINE organization_id,NEWLINE teams,NEWLINE projects,NEWLINE has_global_access,NEWLINE sso_is_valid,NEWLINE requires_sso,NEWLINE permissions=None,NEWLINE role=None,NEWLINE ):NEWLINE self.organization_id = organization_idNEWLINE self.teams = teamsNEWLINE self.projects = projectsNEWLINE self.has_global_access = has_global_accessNEWLINE self.scopes = scopesNEWLINE if permissions is not None:NEWLINE self.permissions = permissionsNEWLINE if role is not None:NEWLINE self.role = roleNEWLINENEWLINE self.is_active = is_activeNEWLINE self.sso_is_valid = sso_is_validNEWLINE self.requires_sso = requires_ssoNEWLINENEWLINENEWLINEclass OrganizationGlobalAccess(BaseAccess):NEWLINE requires_sso = FalseNEWLINE sso_is_valid = TrueNEWLINE is_active = TrueNEWLINE has_global_access = TrueNEWLINE teams = ()NEWLINE projects = ()NEWLINE permissions = frozenset()NEWLINENEWLINE def __init__(self, organization, scopes=None):NEWLINE if scopes:NEWLINE self.scopes = scopesNEWLINE self.organization_id = organization.idNEWLINENEWLINE @cached_propertyNEWLINE def scopes(self):NEWLINE return settings.SENTRY_SCOPESNEWLINENEWLINE def has_team_access(self, team):NEWLINE return team.organization_id == self.organization_idNEWLINENEWLINE def has_project_access(self, project):NEWLINE return project.organization_id == self.organization_idNEWLINENEWLINE def has_scope(self, scope):NEWLINE return TrueNEWLINENEWLINENEWLINEclass OrganizationlessAccess(BaseAccess):NEWLINE is_active = TrueNEWLINENEWLINE def __init__(self, permissions=None):NEWLINE if permissions is not None:NEWLINE self.permissions = permissionsNEWLINENEWLINENEWLINEclass SystemAccess(BaseAccess):NEWLINE is_active = TrueNEWLINENEWLINE def has_permission(self, permission):NEWLINE return TrueNEWLINENEWLINE def has_scope(self, scope):NEWLINE return TrueNEWLINENEWLINE def has_team_access(self, team):NEWLINE return TrueNEWLINENEWLINE def has_project_access(self, project):NEWLINE return TrueNEWLINENEWLINE def has_project_membership(self, project):NEWLINE return TrueNEWLINENEWLINENEWLINEclass NoAccess(BaseAccess):NEWLINE requires_sso = FalseNEWLINE sso_is_valid = TrueNEWLINE is_active = FalseNEWLINE organization_id = NoneNEWLINE has_global_access = FalseNEWLINE teams = ()NEWLINE projects = ()NEWLINE memberships = ()NEWLINE scopes = frozenset()NEWLINE permissions = frozenset()NEWLINENEWLINENEWLINEdef from_request(request, organization=None, scopes=None):NEWLINE if not organization:NEWLINE return from_user(request.user, organization=organization, scopes=scopes)NEWLINENEWLINE if getattr(request.user, "is_sentry_app", False):NEWLINE return from_sentry_app(request.user, organization=organization)NEWLINENEWLINE if is_active_superuser(request):NEWLINE role = NoneNEWLINE # we special case superuser so that if they're a member of the orgNEWLINE # they must still follow SSO checks, but they gain global accessNEWLINE try:NEWLINE member = OrganizationMember.objects.get(user=request.user, organization=organization)NEWLINE except OrganizationMember.DoesNotExist:NEWLINE requires_sso, sso_is_valid = False, TrueNEWLINE else:NEWLINE requires_sso, sso_is_valid = _sso_params(member)NEWLINE role = member.roleNEWLINENEWLINE team_list = ()NEWLINENEWLINE project_list = ()NEWLINE return Access(NEWLINE scopes=scopes if scopes is not None else settings.SENTRY_SCOPES,NEWLINE is_active=True,NEWLINE organization_id=organization.id if organization else None,NEWLINE teams=team_list,NEWLINE projects=project_list,NEWLINE sso_is_valid=sso_is_valid,NEWLINE requires_sso=requires_sso,NEWLINE has_global_access=True,NEWLINE permissions=UserPermission.for_user(request.user.id),NEWLINE role=role,NEWLINE )NEWLINENEWLINE if hasattr(request, "auth") and not request.user.is_authenticated():NEWLINE return from_auth(request.auth, scopes=scopes)NEWLINENEWLINE return from_user(request.user, organization, scopes=scopes)NEWLINENEWLINENEWLINEdef from_sentry_app(user, organization=None):NEWLINE if not organization:NEWLINE return NoAccess()NEWLINENEWLINE sentry_app = SentryApp.objects.get(proxy_user=user)NEWLINENEWLINE if not sentry_app.is_installed_on(organization):NEWLINE return NoAccess()NEWLINENEWLINE team_list = list(Team.objects.filter(organization=organization))NEWLINE project_list = list(NEWLINE Project.objects.filter(status=ProjectStatus.VISIBLE, teams__in=team_list).distinct()NEWLINE )NEWLINENEWLINE return Access(NEWLINE scopes=sentry_app.scope_list,NEWLINE is_active=True,NEWLINE organization_id=organization.id,NEWLINE teams=team_list,NEWLINE projects=project_list,NEWLINE permissions=(),NEWLINE has_global_access=False,NEWLINE sso_is_valid=True,NEWLINE requires_sso=False,NEWLINE )NEWLINENEWLINENEWLINEdef from_user(user, organization=None, scopes=None):NEWLINE if not user or user.is_anonymous() or not user.is_active:NEWLINE return DEFAULTNEWLINENEWLINE if not organization:NEWLINE return OrganizationlessAccess(permissions=UserPermission.for_user(user.id))NEWLINENEWLINE try:NEWLINE om = OrganizationMember.objects.get(user=user, organization=organization)NEWLINE except OrganizationMember.DoesNotExist:NEWLINE return OrganizationlessAccess(permissions=UserPermission.for_user(user.id))NEWLINENEWLINE # ensure cached relationNEWLINE om.organization = organizationNEWLINENEWLINE return from_member(om, scopes=scopes)NEWLINENEWLINENEWLINEdef from_member(member, scopes=None):NEWLINE # TODO(dcramer): we want to optimize this access pattern as its severalNEWLINE # network hops and needed in a lot of placesNEWLINE requires_sso, sso_is_valid = _sso_params(member)NEWLINENEWLINE team_list = member.get_teams()NEWLINE project_list = list(NEWLINE Project.objects.filter(status=ProjectStatus.VISIBLE, teams__in=team_list).distinct()NEWLINE )NEWLINENEWLINE if scopes is not None:NEWLINE scopes = set(scopes) & member.get_scopes()NEWLINE else:NEWLINE scopes = member.get_scopes()NEWLINENEWLINE return Access(NEWLINE is_active=True,NEWLINE requires_sso=requires_sso,NEWLINE sso_is_valid=sso_is_valid,NEWLINE scopes=scopes,NEWLINE organization_id=member.organization_id,NEWLINE teams=team_list,NEWLINE projects=project_list,NEWLINE has_global_access=bool(member.organization.flags.allow_joinleave)NEWLINE or roles.get(member.role).is_global,NEWLINE permissions=UserPermission.for_user(member.user_id),NEWLINE role=member.role,NEWLINE )NEWLINENEWLINENEWLINEdef from_auth(auth, organization):NEWLINE if is_system_auth(auth):NEWLINE return SystemAccess()NEWLINE if auth.organization_id == organization.id:NEWLINE return OrganizationGlobalAccess(auth.organization)NEWLINE else:NEWLINE return DEFAULTNEWLINENEWLINENEWLINEDEFAULT = NoAccess()NEWLINE |
import openxc_threadNEWLINEimport threadingNEWLINEimport bluetoothNEWLINEimport datetimeNEWLINEimport pygameNEWLINEimport timeNEWLINEimport jsonNEWLINEimport osNEWLINENEWLINENEWLINE# SETTING VARIABLESNEWLINE# ColorsNEWLINEBLACK = (0, 0, 0)NEWLINEWHITE = (255, 255, 255)NEWLINEGREEN = (0, 255, 0)NEWLINERED = (255, 0, 0)NEWLINEBLUE = (0, 0, 255)NEWLINEYELLOW = (255, 255, 0)NEWLINENEWLINE# Window SizeNEWLINEWINDOW_WIDTH = 1024NEWLINEWINDOW_HEIGHT = 600NEWLINENEWLINE# Time range of the graphNEWLINETIME_RANGE = 60NEWLINEDATA_CAPTURE_FINISHED = FalseNEWLINENEWLINENEWLINE# THE CLASS THAT HAS THE DATANEWLINEclassdict = {}NEWLINENEWLINEclassdict["accelerator_pedal_position"] = NoneNEWLINEclassdict["steering_wheel_angle"] = NoneNEWLINEclassdict["vehicle_speed"] = NoneNEWLINEclassdict["engine_speed"] = NoneNEWLINEclassdict["brake_pedal_status"] = NoneNEWLINEclassdict["fuel_consumed"] = NoneNEWLINEclassdict["fuel_level"] = NoneNEWLINEclassdict["high_beam_status"] = NoneNEWLINEclassdict["ignition_status"] = NoneNEWLINEclassdict["latitude"] = NoneNEWLINEclassdict["longitude"] = NoneNEWLINEclassdict["odometer"] = NoneNEWLINEclassdict["parking_brake_status"] = NoneNEWLINEclassdict["torque_at_transmission"] = NoneNEWLINENEWLINEoptions = ["accelerator_pedal_position", "steering_wheel_angle", "vehicle_speed",NEWLINE "brake_pedal_status", "fuel_consumed", "fuel_level", "high_beam_status", "ignition_status",NEWLINE "latitude", "longitude", "odometer", "parking_brake_status",NEWLINE "torque_at_transmission", "transmission_gear_position"]NEWLINENEWLINENEWLINEcolumn_title = FalseNEWLINENEWLINE# THE FILE THAT STORES ALL THE DATANEWLINEcsvFile = open('data.csv', 'w')NEWLINENEWLINENEWLINE# MENU FOR THE CONSOLE DATA SELECTIONNEWLINEprint("-------------------------------------------------------------------------------------")NEWLINEprint("Choose the variables you want to measure and write the number, for multiple choices use a comma ','")NEWLINEprint("-------------------------------------------------------------------------------------\n\n")NEWLINENEWLINEfor t in range(len(options)):NEWLINE print("> Do you want to measure "+options[t]+" ? [" + str(t) + "] \n")NEWLINEanswer = input("variables \n")NEWLINENEWLINENEWLINE# WINDOW INITNEWLINE# Center the window on the screenNEWLINEos.environ['SDL_VIDEO_CENTERED'] = '1'NEWLINEpygame.init()NEWLINEWINDOW_SIZE = (WINDOW_WIDTH, WINDOW_HEIGHT)NEWLINEscreen = pygame.display.set_mode(WINDOW_SIZE)NEWLINENEWLINE# Title of the windowNEWLINEpygame.display.set_caption("OpenXC Data Grabber & Monitor")NEWLINENEWLINENEWLINEclock = pygame.time.Clock()NEWLINENEWLINEdata_thread = openxc_thread.OpenXCDataThread()NEWLINEdata_thread.start()NEWLINENEWLINE# Font to use, size, bold, italicsNEWLINEfont = pygame.font.SysFont('Calibri', 27, False, False)NEWLINENEWLINENEWLINE# DATA CAPTURE WHILE-LOOPNEWLINEwhile not DATA_CAPTURE_FINISHED:NEWLINE for event in pygame.event.get():NEWLINE if event.type == pygame.QUIT:NEWLINE DATA_CAPTURE_FINISHED = TrueNEWLINENEWLINE screen.fill(BLACK)NEWLINENEWLINE # RESETS EMPTY LINE VALUENEWLINE emptyLine = TrueNEWLINE line = ""NEWLINENEWLINE if '0' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.accelerator_pedal_position) != 0:NEWLINE line = line + \NEWLINE str(data_thread.accelerator_pedal_position[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("accelerator_pedal_position" + ",")NEWLINENEWLINE if '1' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.steering_wheel_angle) != 0:NEWLINE line = line + \NEWLINE str(data_thread.steering_wheel_angle[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("steering_wheel_angle" + ",")NEWLINENEWLINE if '2' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.vehicle_speed) != 0:NEWLINE line = line + \NEWLINE str(data_thread.vehicle_speed[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("vehicle_speed" + ",")NEWLINENEWLINE if '3' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.engine_speed) != 0:NEWLINE line = line + \NEWLINE str(data_thread.engine_speed[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("engine_speed" + ",")NEWLINENEWLINE if '4' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.brake_pedal_status) != 0:NEWLINE line = line + \NEWLINE str(data_thread.brake_pedal_status[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("brake_pedal_status" + ",")NEWLINENEWLINE if '5' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.fuel_consumed) != 0:NEWLINE line = line + \NEWLINE str(data_thread.fuel_consumed[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("fuel_consumed" + ",")NEWLINENEWLINE if '6' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.fuel_level) != 0:NEWLINE line = line + \NEWLINE str(data_thread.fuel_level[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("fuel_level" + ",")NEWLINENEWLINE if '7' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.high_beam_status) != 0:NEWLINE line = line + \NEWLINE str(data_thread.high_beam_status[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("high_beam_status" + ",")NEWLINENEWLINE if '8' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.ignition_status) != 0:NEWLINE line = line + \NEWLINE str(data_thread.ignition_status[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("ignition_status" + ",")NEWLINENEWLINE if '9' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.latitude) != 0:NEWLINE line = line + \NEWLINE str(data_thread.latitude[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("latitude" + ",")NEWLINENEWLINE if '10' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.longitude) != 0:NEWLINE line = line + \NEWLINE str(data_thread.longitude[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("longitude" + ",")NEWLINENEWLINE if '11' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.odometer) != 0:NEWLINE line = line + \NEWLINE str(data_thread.odometer[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("odometer" + ",")NEWLINENEWLINE if '12' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.parking_brake_status) != 0:NEWLINE line = line + \NEWLINE str(data_thread.parking_brake_status[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("parking_brake_status" + ",")NEWLINENEWLINE if '13' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.torque_at_transmission) != 0:NEWLINE line = line + \NEWLINE str(data_thread.torque_at_transmission[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("torque_at_transmission" + ",")NEWLINENEWLINE if '14' in answer:NEWLINE if column_title:NEWLINE if len(data_thread.transmission_gear_position) != 0:NEWLINE line = line + \NEWLINE str(data_thread.transmission_gear_position[-1][1]) + ","NEWLINE emptyLine = FalseNEWLINE else:NEWLINE csvFile.write("transmission_gear_position" + ",")NEWLINENEWLINE # ENGINE SPEEDNEWLINE if len(data_thread.engine_speed) == 0:NEWLINE text = font.render(data_thread.message, True, WHITE)NEWLINE else:NEWLINE text = font.render(NEWLINE "RPM (Revolutions Per Minute): "+str(data_thread.engine_speed[-1][1]), True, WHITE)NEWLINENEWLINE screen.blit(text, [10, 10])NEWLINENEWLINE # VEHICULE SPEEDNEWLINE if len(data_thread.vehicle_speed) == 0:NEWLINE text = font.render("", True, RED)NEWLINE else:NEWLINE text = font.render("Vehicle Speed: {:.1f} kph".format(NEWLINE data_thread.vehicle_speed[-1][1]), True, GREEN)NEWLINENEWLINE screen.blit(text, [10, 40])NEWLINENEWLINE # GEAR POSITIONNEWLINE if len(data_thread.transmission_gear_position) == 0:NEWLINE text = font.render("", True, RED)NEWLINE else:NEWLINE text = font.render(NEWLINE "Gear: "+str(data_thread.transmission_gear_position[-1][1]), True, YELLOW)NEWLINENEWLINE screen.blit(text, [10, 70])NEWLINENEWLINE # MAKE THE COLUMN NAMESNEWLINE if column_title:NEWLINE if not emptyLine:NEWLINE csvFile.write(line + "\n")NEWLINE else:NEWLINE csvFile.write("photo_name, \n")NEWLINE column_title = TrueNEWLINENEWLINE for i in range(0, len(data_thread.engine_speed)):NEWLINE if i > 0:NEWLINE speed_data_1 = data_thread.engine_speed[i-1][1]NEWLINE speed_data_2 = data_thread.engine_speed[i][1]NEWLINE time_data_1 = data_thread.engine_speed[i-1][0]NEWLINE time_data_2 = data_thread.engine_speed[i][0]NEWLINENEWLINE y1 = WINDOW_HEIGHT - (speed_data_1 / 15)NEWLINE y2 = WINDOW_HEIGHT - (speed_data_2 / 15)NEWLINENEWLINE current_time = time.time() * 1000NEWLINE x1 = WINDOW_WIDTH - ((current_time - time_data_1) /NEWLINE (TIME_RANGE * 1000.) * WINDOW_WIDTH)NEWLINE x2 = WINDOW_WIDTH - ((current_time - time_data_2) /NEWLINE (TIME_RANGE * 1000.) * WINDOW_WIDTH)NEWLINENEWLINE if x2 > 0:NEWLINE pygame.draw.line(screen, BLUE,NEWLINE [x1, y1],NEWLINE [x2, y2], 2)NEWLINENEWLINE pygame.display.flip()NEWLINE clock.tick(60)NEWLINENEWLINEpygame.quit()NEWLINE |
# *************************NEWLINE# |docname| - Runestone APINEWLINE# *************************NEWLINE# This module implements the API that the Runestone Components use to communicate with a Runestone Server.NEWLINE#NEWLINE# ImportsNEWLINE# =======NEWLINE# These are listed in the order prescribed by `PEP 8NEWLINE# <http://www.python.org/dev/peps/pep-0008/#imports>`_.NEWLINEfrom collections import CounterNEWLINEimport datetimeNEWLINEfrom io import openNEWLINEimport jsonNEWLINEimport loggingNEWLINEfrom lxml import htmlNEWLINEimport mathNEWLINEimport osNEWLINEimport reNEWLINEimport subprocessNEWLINEfrom textwrap import dedentNEWLINEimport uuidNEWLINENEWLINE# Third-party importsNEWLINE# -------------------NEWLINEfrom bleach import cleanNEWLINEfrom dateutil.parser import parseNEWLINENEWLINE# Local application importsNEWLINE# -------------------------NEWLINEfrom feedback import is_server_feedback, fitb_feedback, lp_feedbackNEWLINEfrom rs_practice import _get_qualified_questionsNEWLINENEWLINElogger = logging.getLogger(settings.logger)NEWLINElogger.setLevel(settings.log_level)NEWLINENEWLINENEWLINEEVENT_TABLE = {NEWLINE "mChoice": "mchoice_answers",NEWLINE "fillb": "fitb_answers",NEWLINE "dragNdrop": "dragndrop_answers",NEWLINE "clickableArea": "clickablearea_answers",NEWLINE "parsons": "parsons_answers",NEWLINE "codelensq": "codelens_answers",NEWLINE "shortanswer": "shortanswer_answers",NEWLINE "fillintheblank": "fitb_answers",NEWLINE "mchoice": "mchoice_answers",NEWLINE "dragndrop": "dragndrop_answers",NEWLINE "clickablearea": "clickablearea_answers",NEWLINE "parsonsprob": "parsons_answers",NEWLINE}NEWLINENEWLINECOMMENT_MAP = {NEWLINE "sql": "--",NEWLINE "python": "#",NEWLINE "java": "//",NEWLINE "javascript": "//",NEWLINE "c": "//",NEWLINE "cpp": "//",NEWLINE}NEWLINENEWLINENEWLINEdef compareAndUpdateCookieData(sid: str):NEWLINE if (NEWLINE "ipuser" in request.cookiesNEWLINE and request.cookies["ipuser"].value != sidNEWLINE and request.cookies["ipuser"].value.endswith("@" + request.client)NEWLINE ):NEWLINE db.useinfo.update_or_insert(NEWLINE db.useinfo.sid == request.cookies["ipuser"].value, sid=sidNEWLINE )NEWLINENEWLINENEWLINE# EndpointsNEWLINE# =========NEWLINE#NEWLINE# .. _hsblog endpoint:NEWLINE#NEWLINE# hsblog endpointNEWLINE# ---------------NEWLINE# Given a JSON record of a clickstream event record the event in the ``useinfo`` table.NEWLINE# If the event is an answer to a runestone question record that answer in the database inNEWLINE# one of the xxx_answers tables.NEWLINE#NEWLINEdef hsblog():NEWLINE setCookie = FalseNEWLINE if auth.user:NEWLINE if request.vars.course != auth.user.course_name:NEWLINE return json.dumps(NEWLINE dict(NEWLINE log=False,NEWLINE message="You appear to have changed courses in another tab. Please switch to this course",NEWLINE )NEWLINE )NEWLINE sid = auth.user.usernameNEWLINE compareAndUpdateCookieData(sid)NEWLINE setCookie = True # we set our own cookie anyway to eliminate many of the extraneous anonymousNEWLINE # log entries that come from auth timing out even but the user hasn't reloadedNEWLINE # the page.NEWLINE else:NEWLINE if request.vars.clientLoginStatus == "true":NEWLINE logger.error("Session Expired")NEWLINE return json.dumps(dict(log=False, message="Session Expired"))NEWLINENEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINE setCookie = TrueNEWLINE else:NEWLINE sid = str(uuid.uuid1().int) + "@" + request.clientNEWLINE setCookie = TrueNEWLINE act = request.vars.get("act", "")NEWLINE div_id = request.vars.div_idNEWLINE event = request.vars.eventNEWLINE course = request.vars.courseNEWLINE # Get the current time, rounded to the nearest second -- this is how time time will be stored in the database.NEWLINE ts = datetime.datetime.utcnow()NEWLINE ts -= datetime.timedelta(microseconds=ts.microsecond)NEWLINE tt = request.vars.timeNEWLINE if not tt:NEWLINE tt = 0NEWLINENEWLINE try:NEWLINE db.useinfo.insert(NEWLINE sid=sid,NEWLINE act=act[0:512],NEWLINE div_id=div_id,NEWLINE event=event,NEWLINE timestamp=ts,NEWLINE course_id=course,NEWLINE )NEWLINE except Exception as e:NEWLINE logger.error(NEWLINE "failed to insert log record for {} in {} : {} {} {}".format(NEWLINE sid, course, div_id, event, actNEWLINE )NEWLINE )NEWLINE logger.error("Details: {}".format(e))NEWLINENEWLINE if event == "timedExam" and (act == "finish" or act == "reset" or act == "start"):NEWLINE logger.debug(act)NEWLINE if act == "reset":NEWLINE r = "T"NEWLINE else:NEWLINE r = NoneNEWLINENEWLINE try:NEWLINE db.timed_exam.insert(NEWLINE sid=sid,NEWLINE course_name=course,NEWLINE correct=int(request.vars.correct or 0),NEWLINE incorrect=int(request.vars.incorrect or 0),NEWLINE skipped=int(request.vars.skipped or 0),NEWLINE time_taken=int(tt),NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE reset=r,NEWLINE )NEWLINE except Exception as e:NEWLINE logger.debug(NEWLINE "failed to insert a timed exam record for {} in {} : {}".format(NEWLINE sid, course, div_idNEWLINE )NEWLINE )NEWLINE logger.debug(NEWLINE "correct {} incorrect {} skipped {} time {}".format(NEWLINE request.vars.correct,NEWLINE request.vars.incorrect,NEWLINE request.vars.skipped,NEWLINE request.vars.time,NEWLINE )NEWLINE )NEWLINE logger.debug("Error: {}".format(e.message))NEWLINENEWLINE # Produce a default result.NEWLINE res = dict(log=True, timestamp=str(ts))NEWLINE try:NEWLINE pct = float(request.vars.percent)NEWLINE except ValueError:NEWLINE pct = NoneNEWLINE except TypeError:NEWLINE pct = NoneNEWLINENEWLINE # Process this event.NEWLINE if event == "mChoice" and auth.user:NEWLINE answer = request.vars.answerNEWLINE correct = request.vars.correctNEWLINE db.mchoice_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINE elif event == "fillb" and auth.user:NEWLINE answer_json = request.vars.answerNEWLINE correct = request.vars.correctNEWLINE # Grade on the server if needed.NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE correct, res_update = fitb_feedback(answer_json, feedback)NEWLINE res.update(res_update)NEWLINE pct = res["percent"]NEWLINENEWLINE # Save this data.NEWLINE db.fitb_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer_json,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "dragNdrop" and auth.user:NEWLINE answers = request.vars.answerNEWLINE minHeight = request.vars.minHeightNEWLINE correct = request.vars.correctNEWLINENEWLINE db.dragndrop_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answers,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE min_height=minHeight,NEWLINE percent=pct,NEWLINE )NEWLINE elif event == "clickableArea" and auth.user:NEWLINE correct = request.vars.correctNEWLINE db.clickablearea_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=act,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "parsons" and auth.user:NEWLINE correct = request.vars.correctNEWLINE answer = request.vars.answerNEWLINE source = request.vars.sourceNEWLINE db.parsons_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE source=source,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "codelensq" and auth.user:NEWLINE correct = request.vars.correctNEWLINE answer = request.vars.answerNEWLINE source = request.vars.sourceNEWLINE db.codelens_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE source=source,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "shortanswer" and auth.user:NEWLINE db.shortanswer_answers.insert(NEWLINE sid=sid,NEWLINE answer=act,NEWLINE div_id=div_id,NEWLINE timestamp=ts,NEWLINE course_name=course,NEWLINE )NEWLINENEWLINE elif event == "unittest" and auth.user:NEWLINE statslist = act.split(":")NEWLINE if "undefined" not in act:NEWLINE pct = float(statslist[1])NEWLINE passed = int(statslist[3])NEWLINE failed = int(statslist[5])NEWLINE if math.isnan(pct):NEWLINE pct = 0NEWLINE else:NEWLINE pct = passed = failed = 0NEWLINE logger.error(f"Got undefined unittest results for {div_id} {sid}")NEWLINE if pct >= 99.99999:NEWLINE correct = "T"NEWLINE else:NEWLINE correct = "F"NEWLINE db.unittest_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE correct=correct,NEWLINE passed=passed,NEWLINE failed=failed,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "lp_build" and auth.user:NEWLINE ret, new_fields = db.lp_answers._validate_fields(NEWLINE dict(sid=sid, timestamp=ts, div_id=div_id, course_name=course)NEWLINE )NEWLINE if not ret.errors:NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE try:NEWLINE code_snippets = json.loads(request.vars.answer)["code_snippets"]NEWLINE except Exception:NEWLINE code_snippets = []NEWLINE result = lp_feedback(code_snippets, feedback)NEWLINE # If an error occurred or we're not testing, pass the answer through.NEWLINE res.update(result)NEWLINENEWLINE # Record the results in the database.NEWLINE correct = result.get("correct")NEWLINE answer = result.get("answer", {})NEWLINE answer["code_snippets"] = code_snippetsNEWLINE ret = db.lp_answers.validate_and_insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=json.dumps(answer),NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE )NEWLINE if ret.errors:NEWLINE res.setdefault("errors", []).append(ret.errors.as_dict())NEWLINE else:NEWLINE res["errors"] = ["No feedback provided."]NEWLINE else:NEWLINE res.setdefault("errors", []).append(ret.errors.as_dict())NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINE if setCookie:NEWLINE response.cookies["ipuser"] = sidNEWLINE response.cookies["ipuser"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["ipuser"]["path"] = "/"NEWLINE if auth.user:NEWLINE response.cookies["last_course"] = auth.user.course_nameNEWLINE response.cookies["last_course"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["last_course"]["path"] = "/"NEWLINENEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# .. _runlog endpoint:NEWLINE#NEWLINE# runlog endpointNEWLINE# ---------------NEWLINE# The `logRunEvent` client-side function calls this endpoint to record TODO...NEWLINEdef runlog(): # Log errors and runs with codeNEWLINE # response.headers['content-type'] = 'application/json'NEWLINE setCookie = FalseNEWLINE if auth.user:NEWLINE if request.vars.course != auth.user.course_name:NEWLINE return json.dumps(NEWLINE dict(NEWLINE log=False,NEWLINE message="You appear to have changed courses in another tab. Please switch to this course",NEWLINE )NEWLINE )NEWLINE sid = auth.user.usernameNEWLINE setCookie = TrueNEWLINE else:NEWLINE if request.vars.clientLoginStatus == "true":NEWLINE logger.error("Session Expired")NEWLINE return json.dumps(dict(log=False, message="Session Expired"))NEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINE setCookie = TrueNEWLINE else:NEWLINE sid = str(uuid.uuid1().int) + "@" + request.clientNEWLINE setCookie = TrueNEWLINE div_id = request.vars.div_idNEWLINE course = request.vars.courseNEWLINE code = request.vars.code if request.vars.code else ""NEWLINE ts = datetime.datetime.utcnow()NEWLINE error_info = request.vars.errinfoNEWLINE pre = request.vars.prefix if request.vars.prefix else ""NEWLINE post = request.vars.suffix if request.vars.suffix else ""NEWLINE if error_info != "success":NEWLINE event = "ac_error"NEWLINE act = str(error_info)[:512]NEWLINE else:NEWLINE act = "run"NEWLINE if request.vars.event:NEWLINE event = request.vars.eventNEWLINE else:NEWLINE event = "activecode"NEWLINE num_tries = 3NEWLINE done = FalseNEWLINE while num_tries > 0 and not done:NEWLINE try:NEWLINE db.useinfo.insert(NEWLINE sid=sid,NEWLINE act=act,NEWLINE div_id=div_id,NEWLINE event=event,NEWLINE timestamp=ts,NEWLINE course_id=course,NEWLINE )NEWLINE done = TrueNEWLINE except Exception as e:NEWLINE logger.error(NEWLINE "probable Too Long problem trying to insert sid={} act={} div_id={} event={} timestamp={} course_id={} exception={}".format(NEWLINE sid, act, div_id, event, ts, course, eNEWLINE )NEWLINE )NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Runlog Failed to insert into useinfo")NEWLINENEWLINE if auth.user:NEWLINE if "to_save" in request.vars and (NEWLINE request.vars.to_save == "True" or request.vars.to_save == "true"NEWLINE ):NEWLINE num_tries = 3NEWLINE done = FalseNEWLINE dbcourse = (NEWLINE db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE )NEWLINE while num_tries > 0 and not done:NEWLINE try:NEWLINE db.code.insert(NEWLINE sid=sid,NEWLINE acid=div_id,NEWLINE code=code,NEWLINE emessage=error_info,NEWLINE timestamp=ts,NEWLINE course_id=dbcourse,NEWLINE language=request.vars.lang,NEWLINE )NEWLINE if request.vars.partner:NEWLINE if _same_class(sid, request.vars.partner):NEWLINE comchar = COMMENT_MAP.get(request.vars.lang, "#")NEWLINE newcode = (NEWLINE "{} This code was shared by {}\n\n".format(comchar, sid)NEWLINE + codeNEWLINE )NEWLINE db.code.insert(NEWLINE sid=request.vars.partner,NEWLINE acid=div_id,NEWLINE code=newcode,NEWLINE emessage=error_info,NEWLINE timestamp=ts,NEWLINE course_id=dbcourse,NEWLINE language=request.vars.lang,NEWLINE )NEWLINE else:NEWLINE res = {NEWLINE "message": "You must be enrolled in the same class as your partner"NEWLINE }NEWLINE return json.dumps(res)NEWLINE done = TrueNEWLINE except Exception as e:NEWLINE num_tries -= 1NEWLINE logger.error("INSERT into code FAILED retrying -- {}".format(e))NEWLINE if num_tries == 0:NEWLINE raise Exception("Runlog Failed to insert into code")NEWLINENEWLINE res = {"log": True}NEWLINE if setCookie:NEWLINE response.cookies["ipuser"] = sidNEWLINE response.cookies["ipuser"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["ipuser"]["path"] = "/"NEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# Ajax Handlers for saving and restoring active code blocksNEWLINENEWLINENEWLINEdef gethist():NEWLINENEWLINE """NEWLINE return the history of saved code by this user for a particular acidNEWLINE :Parameters:NEWLINE - `acid`: id of the active code blockNEWLINE - `user`: optional identifier for the owner of the codeNEWLINE :Return:NEWLINE - json object containing a list/array of source textsNEWLINE """NEWLINE codetbl = db.codeNEWLINE acid = request.vars.acidNEWLINENEWLINE # if vars.sid then we know this is being called from the grading interfaceNEWLINE if request.vars.sid:NEWLINE sid = request.vars.sidNEWLINE if auth.user and verifyInstructorStatus(NEWLINE auth.user.course_name, auth.user.idNEWLINE ): # noqa: F405NEWLINE course_id = auth.user.course_idNEWLINE else:NEWLINE course_id = NoneNEWLINE elif auth.user:NEWLINE sid = auth.user.usernameNEWLINE course_id = auth.user.course_idNEWLINE else:NEWLINE sid = NoneNEWLINE course_id = NoneNEWLINENEWLINE res = {}NEWLINE if sid:NEWLINE query = (NEWLINE (codetbl.sid == sid)NEWLINE & (codetbl.acid == acid)NEWLINE & (codetbl.course_id == course_id)NEWLINE & (codetbl.timestamp != None) # noqa: E711NEWLINE )NEWLINE res["acid"] = acidNEWLINE res["sid"] = sidNEWLINE # get the code they saved in chronological order; id order gets that for usNEWLINE r = db(query).select(orderby=codetbl.id)NEWLINE res["history"] = [row.code for row in r]NEWLINE res["timestamps"] = [NEWLINE row.timestamp.replace(tzinfo=datetime.timezone.utc).isoformat() for row in rNEWLINE ]NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# @auth.requires_login()NEWLINE# This function is deprecated as of June 2019NEWLINE# We need to keep it in place as long as we continue to serve booksNEWLINE# from runestone/static/ When that period is over we can eliminateNEWLINEdef getuser():NEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE if auth.user:NEWLINE try:NEWLINE # return the list of courses that auth.user is registered for to keep them fromNEWLINE # accidentally wandering into courses they are not registered for.NEWLINE cres = db(NEWLINE (db.user_courses.user_id == auth.user.id)NEWLINE & (db.user_courses.course_id == db.courses.id)NEWLINE ).select(db.courses.course_name)NEWLINE clist = []NEWLINE for row in cres:NEWLINE clist.append(row.course_name)NEWLINE res = {NEWLINE "email": auth.user.email,NEWLINE "nick": auth.user.username,NEWLINE "donated": auth.user.donated,NEWLINE "isInstructor": verifyInstructorStatus( # noqa: F405NEWLINE auth.user.course_name, auth.user.idNEWLINE ),NEWLINE "course_list": clist,NEWLINE }NEWLINE session.timezoneoffset = request.vars.timezoneoffsetNEWLINE logger.debug(NEWLINE "setting timezone offset in session %s hours" % session.timezoneoffsetNEWLINE )NEWLINE except Exception:NEWLINE res = dict(redirect=auth.settings.login_url) # ?_next=....NEWLINE else:NEWLINE res = dict(redirect=auth.settings.login_url) # ?_next=....NEWLINE if session.readings:NEWLINE res["readings"] = session.readingsNEWLINE logger.debug("returning login info: %s" % res)NEWLINE return json.dumps([res])NEWLINENEWLINENEWLINEdef set_tz_offset():NEWLINE session.timezoneoffset = request.vars.timezoneoffsetNEWLINE logger.debug("setting timezone offset in session %s hours" % session.timezoneoffset)NEWLINE return "done"NEWLINENEWLINENEWLINE#NEWLINE# Ajax Handlers to update and retrieve the last position of the user in the courseNEWLINE#NEWLINEdef updatelastpage():NEWLINE lastPageUrl = request.vars.lastPageUrlNEWLINE lastPageScrollLocation = request.vars.lastPageScrollLocationNEWLINE if lastPageUrl is None:NEWLINE return # todo: log request.vars, request.args and request.env.path_infoNEWLINE course = request.vars.courseNEWLINE completionFlag = request.vars.completionFlagNEWLINE lastPageChapter = lastPageUrl.split("/")[-2]NEWLINE lastPageSubchapter = ".".join(lastPageUrl.split("/")[-1].split(".")[:-1])NEWLINE if auth.user:NEWLINE done = FalseNEWLINE num_tries = 3NEWLINE while not done and num_tries > 0:NEWLINE try:NEWLINE db(NEWLINE (db.user_state.user_id == auth.user.id)NEWLINE & (db.user_state.course_id == course)NEWLINE ).update(NEWLINE last_page_url=lastPageUrl,NEWLINE last_page_chapter=lastPageChapter,NEWLINE last_page_subchapter=lastPageSubchapter,NEWLINE last_page_scroll_location=lastPageScrollLocation,NEWLINE last_page_accessed_on=datetime.datetime.utcnow(),NEWLINE )NEWLINE done = TrueNEWLINE except Exception:NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Failed to save the user state in update_last_page")NEWLINENEWLINE done = FalseNEWLINE num_tries = 3NEWLINE while not done and num_tries > 0:NEWLINE try:NEWLINE db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.chapter_id == lastPageChapter)NEWLINE & (NEWLINE db.user_sub_chapter_progress.sub_chapter_idNEWLINE == lastPageSubchapterNEWLINE )NEWLINE & (NEWLINE (db.user_sub_chapter_progress.course_name == course)NEWLINE | (NEWLINE db.user_sub_chapter_progress.course_name == NoneNEWLINE ) # Back fill for old entries without courseNEWLINE )NEWLINE ).update(NEWLINE status=completionFlag,NEWLINE end_date=datetime.datetime.utcnow(),NEWLINE course_name=course,NEWLINE )NEWLINE done = TrueNEWLINE except Exception:NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Failed to save sub chapter progress in update_last_page")NEWLINENEWLINE practice_settings = db(db.course_practice.course_name == auth.user.course_name)NEWLINE if (NEWLINE practice_settings.count() != 0NEWLINE and practice_settings.select().first().flashcard_creation_method == 0NEWLINE ):NEWLINE # Since each authenticated user has only one active course, we retrieve the course this way.NEWLINE course = (NEWLINE db(db.courses.id == auth.user.course_id).select(**SELECT_CACHE).first()NEWLINE )NEWLINENEWLINE # We only retrieve questions to be used in flashcards if they are marked for practice purpose.NEWLINE questions = _get_qualified_questions(NEWLINE course.base_course, lastPageChapter, lastPageSubchapter, dbNEWLINE )NEWLINE if len(questions) > 0:NEWLINE now = datetime.datetime.utcnow()NEWLINE now_local = now - datetime.timedelta(NEWLINE hours=float(session.timezoneoffset)NEWLINE if "timezoneoffset" in sessionNEWLINE else 0NEWLINE )NEWLINE existing_flashcards = db(NEWLINE (db.user_topic_practice.user_id == auth.user.id)NEWLINE & (db.user_topic_practice.course_name == auth.user.course_name)NEWLINE & (db.user_topic_practice.chapter_label == lastPageChapter)NEWLINE & (db.user_topic_practice.sub_chapter_label == lastPageSubchapter)NEWLINE & (db.user_topic_practice.question_name == questions[0].name)NEWLINE )NEWLINE # There is at least one qualified question in this subchapter, so insert a flashcard for the subchapter.NEWLINE if completionFlag == "1" and existing_flashcards.isempty():NEWLINE db.user_topic_practice.insert(NEWLINE user_id=auth.user.id,NEWLINE course_name=auth.user.course_name,NEWLINE chapter_label=lastPageChapter,NEWLINE sub_chapter_label=lastPageSubchapter,NEWLINE question_name=questions[0].name,NEWLINE # Treat it as if the first eligible question is the last one asked.NEWLINE i_interval=0,NEWLINE e_factor=2.5,NEWLINE next_eligible_date=now_local.date(),NEWLINE # add as if yesterday, so can practice right awayNEWLINE last_presented=now - datetime.timedelta(1),NEWLINE last_completed=now - datetime.timedelta(1),NEWLINE creation_time=now,NEWLINE timezoneoffset=float(session.timezoneoffset)NEWLINE if "timezoneoffset" in sessionNEWLINE else 0,NEWLINE )NEWLINE if completionFlag == "0" and not existing_flashcards.isempty():NEWLINE existing_flashcards.delete()NEWLINENEWLINENEWLINEdef getCompletionStatus():NEWLINE if auth.user:NEWLINE lastPageUrl = request.vars.lastPageUrlNEWLINE lastPageChapter = lastPageUrl.split("/")[-2]NEWLINE lastPageSubchapter = ".".join(lastPageUrl.split("/")[-1].split(".")[:-1])NEWLINE result = db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.chapter_id == lastPageChapter)NEWLINE & (db.user_sub_chapter_progress.sub_chapter_id == lastPageSubchapter)NEWLINE & (NEWLINE (db.user_sub_chapter_progress.course_name == auth.user.course_name)NEWLINE | (NEWLINE db.user_sub_chapter_progress.course_name == NoneNEWLINE ) # for backward compatibilityNEWLINE )NEWLINE ).select(db.user_sub_chapter_progress.status)NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE res = {"completionStatus": row.status}NEWLINE rowarray_list.append(res)NEWLINE # question: since the javascript in user-highlights.js is going to look only at the first row, shouldn'tNEWLINE # we be returning just the *last* status? Or is there no history of status kept anyway?NEWLINE return json.dumps(rowarray_list)NEWLINE else:NEWLINE # haven't seen this Chapter/Subchapter beforeNEWLINE # make the insertions into the DB as necessaryNEWLINENEWLINE # we know the subchapter doesn't existNEWLINE db.user_sub_chapter_progress.insert(NEWLINE user_id=auth.user.id,NEWLINE chapter_id=lastPageChapter,NEWLINE sub_chapter_id=lastPageSubchapter,NEWLINE status=-1,NEWLINE start_date=datetime.datetime.utcnow(),NEWLINE course_name=auth.user.course_name,NEWLINE )NEWLINE # the chapter might exist without the subchapterNEWLINE result = db(NEWLINE (db.user_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_chapter_progress.chapter_id == lastPageChapter)NEWLINE ).select()NEWLINE if not result:NEWLINE db.user_chapter_progress.insert(NEWLINE user_id=auth.user.id, chapter_id=lastPageChapter, status=-1NEWLINE )NEWLINE return json.dumps([{"completionStatus": -1}])NEWLINENEWLINENEWLINEdef getAllCompletionStatus():NEWLINE if auth.user:NEWLINE result = db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.course_name == auth.user.course_name)NEWLINE ).select(NEWLINE db.user_sub_chapter_progress.chapter_id,NEWLINE db.user_sub_chapter_progress.sub_chapter_id,NEWLINE db.user_sub_chapter_progress.status,NEWLINE db.user_sub_chapter_progress.status,NEWLINE db.user_sub_chapter_progress.end_date,NEWLINE )NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE if row.end_date is None:NEWLINE endDate = 0NEWLINE else:NEWLINE endDate = row.end_date.strftime("%d %b, %Y")NEWLINE res = {NEWLINE "chapterName": row.chapter_id,NEWLINE "subChapterName": row.sub_chapter_id,NEWLINE "completionStatus": row.status,NEWLINE "endDate": endDate,NEWLINE }NEWLINE rowarray_list.append(res)NEWLINE return json.dumps(rowarray_list)[email protected]_login()NEWLINEdef getlastpage():NEWLINE course = request.vars.courseNEWLINE course = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINENEWLINE result = db(NEWLINE (db.user_state.user_id == auth.user.id)NEWLINE & (db.user_state.course_id == course.course_name)NEWLINE & (db.chapters.course_id == course.base_course)NEWLINE & (db.user_state.last_page_chapter == db.chapters.chapter_label)NEWLINE & (db.sub_chapters.chapter_id == db.chapters.id)NEWLINE & (db.user_state.last_page_subchapter == db.sub_chapters.sub_chapter_label)NEWLINE ).select(NEWLINE db.user_state.last_page_url,NEWLINE db.user_state.last_page_hash,NEWLINE db.chapters.chapter_name,NEWLINE db.user_state.last_page_scroll_location,NEWLINE db.sub_chapters.sub_chapter_name,NEWLINE )NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE res = {NEWLINE "lastPageUrl": row.user_state.last_page_url,NEWLINE "lastPageHash": row.user_state.last_page_hash,NEWLINE "lastPageChapter": row.chapters.chapter_name,NEWLINE "lastPageSubchapter": row.sub_chapters.sub_chapter_name,NEWLINE "lastPageScrollLocation": row.user_state.last_page_scroll_location,NEWLINE }NEWLINE rowarray_list.append(res)NEWLINE return json.dumps(rowarray_list)NEWLINE else:NEWLINE db.user_state.insert(user_id=auth.user.id, course_id=course.course_name)NEWLINENEWLINENEWLINEdef _getCorrectStats(miscdata, event):NEWLINE # TODO: update this to use the xxx_answer tableNEWLINE # select and count grouping by the correct columnNEWLINE # this version can suffer from division by zero errorNEWLINE sid = NoneNEWLINE dbtable = EVENT_TABLE[event] # translate event to correct tableNEWLINENEWLINE if auth.user:NEWLINE sid = auth.user.usernameNEWLINE else:NEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINENEWLINE if sid:NEWLINE course = (NEWLINE db(db.courses.course_name == miscdata["course"])NEWLINE .select(**SELECT_CACHE)NEWLINE .first()NEWLINE )NEWLINE tbl = db[dbtable]NEWLINENEWLINE count_expr = tbl.correct.count()NEWLINE rows = db((tbl.sid == sid) & (tbl.timestamp > course.term_start_date)).select(NEWLINE tbl.correct, count_expr, groupby=tbl.correctNEWLINE )NEWLINE total = 0NEWLINE correct = 0NEWLINE for row in rows:NEWLINE count = row[count_expr]NEWLINE total += countNEWLINE if row[dbtable].correct:NEWLINE correct = countNEWLINE if total > 0:NEWLINE pctcorr = round(float(correct) / total * 100)NEWLINE else:NEWLINE pctcorr = "unavailable"NEWLINE else:NEWLINE pctcorr = "unavailable"NEWLINENEWLINE miscdata["yourpct"] = pctcorrNEWLINENEWLINENEWLINEdef _getStudentResults(question: str):NEWLINE """NEWLINE Internal function to collect student answersNEWLINE """NEWLINE cc = db(db.courses.id == auth.user.course_id).select().first()NEWLINE qst = (NEWLINE db(NEWLINE (db.questions.name == question)NEWLINE & (db.questions.base_course == cc.base_course)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE tbl_name = EVENT_TABLE[qst.question_type]NEWLINE tbl = db[tbl_name]NEWLINENEWLINE res = db(NEWLINE (tbl.div_id == question)NEWLINE & (tbl.course_name == cc.course_name)NEWLINE & (tbl.timestamp >= cc.term_start_date)NEWLINE ).select(tbl.sid, tbl.answer, orderby=tbl.sid)NEWLINENEWLINE resultList = []NEWLINE if len(res) > 0:NEWLINE currentSid = res[0].sidNEWLINE currentAnswers = []NEWLINENEWLINE for row in res:NEWLINE if row.answer:NEWLINE answer = clean(row.answer)NEWLINE else:NEWLINE answer = NoneNEWLINENEWLINE if row.sid == currentSid:NEWLINE if answer is not None:NEWLINE currentAnswers.append(answer)NEWLINE else:NEWLINE currentAnswers.sort()NEWLINE resultList.append((currentSid, currentAnswers))NEWLINE currentAnswers = [answer] if answer is not None else []NEWLINE currentSid = row.sidNEWLINENEWLINE currentAnswers.sort()NEWLINE resultList.append((currentSid, currentAnswers))NEWLINENEWLINE return resultListNEWLINENEWLINENEWLINEdef getaggregateresults():NEWLINE course = request.vars.courseNEWLINE question = request.vars.div_idNEWLINE # select act, count(*) from useinfo where div_id = 'question4_2_1' group by act;NEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE if not auth.user:NEWLINE return json.dumps([dict(answerDict={}, misc={}, emess="You must be logged in")])NEWLINENEWLINE is_instructor = verifyInstructorStatus(course, auth.user.id) # noqa: F405NEWLINE # Yes, these two things could be done as a join. but this **may** be better for performanceNEWLINE if course in (NEWLINE "thinkcspy",NEWLINE "pythonds",NEWLINE "fopp",NEWLINE "csawesome",NEWLINE "apcsareview",NEWLINE "StudentCSP",NEWLINE ):NEWLINE start_date = datetime.datetime.utcnow() - datetime.timedelta(days=90)NEWLINE else:NEWLINE start_date = (NEWLINE db(db.courses.course_name == course)NEWLINE .select(db.courses.term_start_date)NEWLINE .first()NEWLINE .term_start_dateNEWLINE )NEWLINE count = db.useinfo.id.count()NEWLINE try:NEWLINE result = db(NEWLINE (db.useinfo.div_id == question)NEWLINE & (db.useinfo.course_id == course)NEWLINE & (db.useinfo.timestamp >= start_date)NEWLINE ).select(db.useinfo.act, count, groupby=db.useinfo.act)NEWLINE except Exception:NEWLINE return json.dumps(NEWLINE [dict(answerDict={}, misc={}, emess="Sorry, the request timed out")]NEWLINE )NEWLINENEWLINE tdata = {}NEWLINE tot = 0NEWLINE for row in result:NEWLINE tdata[clean(row.useinfo.act)] = row[count]NEWLINE tot += row[count]NEWLINENEWLINE tot = float(tot)NEWLINE rdata = {}NEWLINE miscdata = {}NEWLINE correct = ""NEWLINE if tot > 0:NEWLINE for key in tdata:NEWLINE all_a = key.split(":")NEWLINE try:NEWLINE answer = all_a[1]NEWLINE if "correct" in key:NEWLINE correct = answerNEWLINE count = int(tdata[key])NEWLINE if answer in rdata:NEWLINE count += rdata[answer] / 100.0 * totNEWLINE pct = round(count / tot * 100.0)NEWLINENEWLINE if answer != "undefined" and answer != "":NEWLINE rdata[answer] = pctNEWLINE except Exception as e:NEWLINE logger.error("Bad data for %s data is %s -- %s" % (question, key, e))NEWLINENEWLINE miscdata["correct"] = correctNEWLINE miscdata["course"] = courseNEWLINENEWLINE _getCorrectStats(miscdata, "mChoice")NEWLINENEWLINE returnDict = dict(answerDict=rdata, misc=miscdata)NEWLINENEWLINE if auth.user and is_instructor:NEWLINE resultList = _getStudentResults(question)NEWLINE returnDict["reslist"] = resultListNEWLINENEWLINE return json.dumps([returnDict])NEWLINENEWLINENEWLINEdef getpollresults():NEWLINE course = request.vars.courseNEWLINE div_id = request.vars.div_idNEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE query = """select act from useinfoNEWLINE join (select sid, max(id) midNEWLINE from useinfo where event='poll' and div_id = %s and course_id = %s group by sid) as TNEWLINE on id = T.mid"""NEWLINENEWLINE rows = db.executesql(query, (div_id, course))NEWLINENEWLINE result_list = []NEWLINE for row in rows:NEWLINE val = row[0].split(":")[0]NEWLINE result_list.append(int(val))NEWLINENEWLINE # maps option : countNEWLINE opt_counts = Counter(result_list)NEWLINENEWLINE if result_list:NEWLINE for i in range(max(result_list)):NEWLINE if i not in opt_counts:NEWLINE opt_counts[i] = 0NEWLINE # opt_list holds the option numbers from smallest to largestNEWLINE # count_list[i] holds the count of responses that chose option iNEWLINE opt_list = sorted(opt_counts.keys())NEWLINE count_list = []NEWLINE for i in opt_list:NEWLINE count_list.append(opt_counts[i])NEWLINENEWLINE user_res = NoneNEWLINE if auth.user:NEWLINE user_res = (NEWLINE db(NEWLINE (db.useinfo.sid == auth.user.username)NEWLINE & (db.useinfo.course_id == course)NEWLINE & (db.useinfo.div_id == div_id)NEWLINE )NEWLINE .select(db.useinfo.act, orderby=~db.useinfo.id)NEWLINE .first()NEWLINE )NEWLINENEWLINE if user_res:NEWLINE my_vote = user_res.actNEWLINE else:NEWLINE my_vote = -1NEWLINENEWLINE return json.dumps([len(result_list), opt_list, count_list, div_id, my_vote])NEWLINENEWLINENEWLINEdef gettop10Answers():NEWLINE course = request.vars.courseNEWLINE question = request.vars.div_idNEWLINE response.headers["content-type"] = "application/json"NEWLINE rows = []NEWLINENEWLINE try:NEWLINE dbcourse = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE count_expr = db.fitb_answers.answer.count()NEWLINE rows = db(NEWLINE (db.fitb_answers.div_id == question)NEWLINE & (db.fitb_answers.course_name == course)NEWLINE & (db.fitb_answers.timestamp > dbcourse.term_start_date)NEWLINE ).select(NEWLINE db.fitb_answers.answer,NEWLINE count_expr,NEWLINE groupby=db.fitb_answers.answer,NEWLINE orderby=~count_expr,NEWLINE limitby=(0, 10),NEWLINE )NEWLINE res = [NEWLINE {"answer": clean(row.fitb_answers.answer), "count": row[count_expr]}NEWLINE for row in rowsNEWLINE ]NEWLINE except Exception as e:NEWLINE logger.debug(e)NEWLINE res = "error in query"NEWLINENEWLINE miscdata = {"course": course}NEWLINE _getCorrectStats(NEWLINE miscdata, "fillb"NEWLINE ) # TODO: rewrite _getCorrectStats to use xxx_answersNEWLINENEWLINE if auth.user and verifyInstructorStatus(course, auth.user.id): # noqa: F405NEWLINE resultList = _getStudentResults(question)NEWLINE miscdata["reslist"] = resultListNEWLINENEWLINE return json.dumps([res, miscdata])NEWLINENEWLINENEWLINEdef getassignmentgrade():NEWLINE response.headers["content-type"] = "application/json"NEWLINE if not auth.user:NEWLINE return json.dumps([dict(message="not logged in")])NEWLINENEWLINE divid = request.vars.div_idNEWLINENEWLINE ret = {NEWLINE "grade": "Not graded yet",NEWLINE "comment": "No Comments",NEWLINE "avg": "None",NEWLINE "count": "None",NEWLINE "released": False,NEWLINE }NEWLINENEWLINE # check that the assignment is releasedNEWLINE #NEWLINE a_q = (NEWLINE db(NEWLINE (db.assignments.course == auth.user.course_id)NEWLINE & (db.assignment_questions.assignment_id == db.assignments.id)NEWLINE & (db.assignment_questions.question_id == db.questions.id)NEWLINE & (db.questions.name == divid)NEWLINE )NEWLINE .select(NEWLINE db.assignments.released, db.assignments.id, db.assignment_questions.pointsNEWLINE )NEWLINE .first()NEWLINE )NEWLINENEWLINE # if there is no assignment_questionNEWLINE # try new way that we store scores and commentsNEWLINE # divid is a question; find question_grades rowNEWLINE result = (NEWLINE db(NEWLINE (db.question_grades.sid == auth.user.username)NEWLINE & (db.question_grades.course_name == auth.user.course_name)NEWLINE & (db.question_grades.div_id == divid)NEWLINE )NEWLINE .select(db.question_grades.score, db.question_grades.comment)NEWLINE .first()NEWLINE )NEWLINE logger.debug(result)NEWLINE if result:NEWLINE # say that we're sending back result styles in new version, so they can be processed differently without affecting old way during transition.NEWLINE ret["version"] = 2NEWLINE ret["released"] = a_q.assignments.released if a_q else FalseNEWLINE if a_q and not a_q.assignments.released:NEWLINE ret["grade"] = "Not graded yet"NEWLINE elif a_q and a_q.assignments.released:NEWLINE ret["grade"] = result.score or "Written Feedback Only"NEWLINENEWLINE if a_q and a_q.assignments.released == True:NEWLINE ret["max"] = a_q.assignment_questions.pointsNEWLINE else:NEWLINE ret["max"] = ""NEWLINENEWLINE if result.comment:NEWLINE ret["comment"] = result.commentNEWLINENEWLINE return json.dumps([ret])NEWLINENEWLINENEWLINEdef _canonicalize_tz(tstring):NEWLINE x = re.search(r"\((.*)\)", tstring)NEWLINE x = x.group(1)NEWLINE y = x.split()NEWLINE if len(y) == 1:NEWLINE return tstringNEWLINE else:NEWLINE zstring = "".join([i[0] for i in y])NEWLINE return re.sub(r"(.*)\((.*)\)", r"\1({})".format(zstring), tstring)NEWLINENEWLINENEWLINE# .. _getAssessResults:NEWLINE#NEWLINE# getAssessResultsNEWLINE# ----------------NEWLINEdef getAssessResults():NEWLINE if not auth.user:NEWLINE # can't query for user's answers if we don't know who the user is, so just load from local storageNEWLINE return ""NEWLINENEWLINE course = request.vars.courseNEWLINE div_id = request.vars.div_idNEWLINE event = request.vars.eventNEWLINE if (NEWLINE verifyInstructorStatus(auth.user.course_name, auth.user) and request.vars.sidNEWLINE ): # retrieving results for graderNEWLINE sid = request.vars.sidNEWLINE else:NEWLINE sid = auth.user.usernameNEWLINENEWLINE # TODO This whole thing is messy - get the deadline from the assignment in the dbNEWLINE if request.vars.deadline:NEWLINE try:NEWLINE deadline = parse(_canonicalize_tz(request.vars.deadline))NEWLINE tzoff = session.timezoneoffset if session.timezoneoffset else 0NEWLINE deadline = deadline + datetime.timedelta(hours=float(tzoff))NEWLINE deadline = deadline.replace(tzinfo=None)NEWLINE except Exception:NEWLINE logger.error("Bad Timezone - {}".format(request.vars.deadline))NEWLINE deadline = datetime.datetime.utcnow()NEWLINE else:NEWLINE deadline = datetime.datetime.utcnow()NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE # Identify the correct event and query the database so we can load it from the serverNEWLINE if event == "fillb":NEWLINE rows = (NEWLINE db(NEWLINE (db.fitb_answers.div_id == div_id)NEWLINE & (db.fitb_answers.course_name == course)NEWLINE & (db.fitb_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.fitb_answers.answer,NEWLINE db.fitb_answers.timestamp,NEWLINE orderby=~db.fitb_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return "" # server doesn't have it so we load from local storage insteadNEWLINE #NEWLINE res = {"answer": rows.answer, "timestamp": str(rows.timestamp)}NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE correct, res_update = fitb_feedback(rows.answer, feedback)NEWLINE res.update(res_update)NEWLINE return json.dumps(res)NEWLINE elif event == "mChoice":NEWLINE rows = (NEWLINE db(NEWLINE (db.mchoice_answers.div_id == div_id)NEWLINE & (db.mchoice_answers.course_name == course)NEWLINE & (db.mchoice_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.mchoice_answers.answer,NEWLINE db.mchoice_answers.timestamp,NEWLINE db.mchoice_answers.correct,NEWLINE orderby=~db.mchoice_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "dragNdrop":NEWLINE rows = (NEWLINE db(NEWLINE (db.dragndrop_answers.div_id == div_id)NEWLINE & (db.dragndrop_answers.course_name == course)NEWLINE & (db.dragndrop_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.dragndrop_answers.answer,NEWLINE db.dragndrop_answers.timestamp,NEWLINE db.dragndrop_answers.correct,NEWLINE db.dragndrop_answers.min_height,NEWLINE orderby=~db.dragndrop_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE "minHeight": str(rows.min_height),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "clickableArea":NEWLINE rows = (NEWLINE db(NEWLINE (db.clickablearea_answers.div_id == div_id)NEWLINE & (db.clickablearea_answers.course_name == course)NEWLINE & (db.clickablearea_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.clickablearea_answers.answer,NEWLINE db.clickablearea_answers.timestamp,NEWLINE db.clickablearea_answers.correct,NEWLINE orderby=~db.clickablearea_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "timedExam":NEWLINE rows = (NEWLINE db(NEWLINE (db.timed_exam.reset == None) # noqa: E711NEWLINE & (db.timed_exam.div_id == div_id)NEWLINE & (db.timed_exam.course_name == course)NEWLINE & (db.timed_exam.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.timed_exam.correct,NEWLINE db.timed_exam.incorrect,NEWLINE db.timed_exam.skipped,NEWLINE db.timed_exam.time_taken,NEWLINE db.timed_exam.timestamp,NEWLINE db.timed_exam.reset,NEWLINE orderby=~db.timed_exam.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "correct": rows.correct,NEWLINE "incorrect": rows.incorrect,NEWLINE "skipped": str(rows.skipped),NEWLINE "timeTaken": str(rows.time_taken),NEWLINE "timestamp": str(rows.timestamp),NEWLINE "reset": str(rows.reset),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "parsons":NEWLINE rows = (NEWLINE db(NEWLINE (db.parsons_answers.div_id == div_id)NEWLINE & (db.parsons_answers.course_name == course)NEWLINE & (db.parsons_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.parsons_answers.answer,NEWLINE db.parsons_answers.source,NEWLINE db.parsons_answers.timestamp,NEWLINE orderby=~db.parsons_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "source": rows.source,NEWLINE "timestamp": str(rows.timestamp),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "shortanswer":NEWLINE logger.debug(f"Getting shortanswer: deadline is {deadline} ")NEWLINE rows = db(NEWLINE (db.shortanswer_answers.sid == sid)NEWLINE & (db.shortanswer_answers.div_id == div_id)NEWLINE & (db.shortanswer_answers.course_name == course)NEWLINE ).select(orderby=~db.shortanswer_answers.id)NEWLINE if not rows:NEWLINE return ""NEWLINE last_answer = NoneNEWLINE if not request.vars.deadline:NEWLINE row = rows[0]NEWLINE else:NEWLINE last_answer = rows[0]NEWLINE for row in rows:NEWLINE if row.timestamp <= deadline:NEWLINE breakNEWLINE if row.timestamp > deadline:NEWLINE row = NoneNEWLINENEWLINE if row and row == last_answer:NEWLINE res = {"answer": row.answer, "timestamp": row.timestamp.isoformat()}NEWLINE else:NEWLINE if row and row.timestamp <= deadline:NEWLINE res = {"answer": row.answer, "timestamp": row.timestamp.isoformat()}NEWLINE else:NEWLINE res = {NEWLINE "answer": "",NEWLINE "timestamp": None,NEWLINE "last_answer": last_answer.answer,NEWLINE "last_timestamp": last_answer.timestamp.isoformat(),NEWLINE }NEWLINE srow = (NEWLINE db(NEWLINE (db.question_grades.sid == sid)NEWLINE & (db.question_grades.div_id == div_id)NEWLINE & (db.question_grades.course_name == course)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE if srow:NEWLINE res["score"] = srow.scoreNEWLINE res["comment"] = srow.commentNEWLINENEWLINE return json.dumps(res)NEWLINE elif event == "lp_build":NEWLINE rows = (NEWLINE db(NEWLINE (db.lp_answers.div_id == div_id)NEWLINE & (db.lp_answers.course_name == course)NEWLINE & (db.lp_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.lp_answers.answer,NEWLINE db.lp_answers.timestamp,NEWLINE db.lp_answers.correct,NEWLINE orderby=~db.lp_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return "" # server doesn't have it so we load from local storage insteadNEWLINE answer = json.loads(rows.answer)NEWLINE correct = rows.correctNEWLINE return json.dumps(NEWLINE {"answer": answer, "timestamp": str(rows.timestamp), "correct": correct}NEWLINE )NEWLINENEWLINENEWLINEdef tookTimedAssessment():NEWLINE if auth.user:NEWLINE sid = auth.user.usernameNEWLINE else:NEWLINE return json.dumps({"tookAssessment": False})NEWLINENEWLINE exam_id = request.vars.div_idNEWLINE course = request.vars.course_nameNEWLINE rows = (NEWLINE db(NEWLINE (db.timed_exam.div_id == exam_id)NEWLINE & (db.timed_exam.sid == sid)NEWLINE & (db.timed_exam.course_name == course)NEWLINE )NEWLINE .select(orderby=~db.timed_exam.id)NEWLINE .first()NEWLINE )NEWLINE logger.debug(f"checking {exam_id} {sid} {course} {rows}")NEWLINE if rows:NEWLINE return json.dumps({"tookAssessment": True})NEWLINE else:NEWLINE return json.dumps({"tookAssessment": False})NEWLINENEWLINENEWLINE# The request variable ``code`` must contain JSON-encoded RST to be rendered by Runestone. Only the HTML containing the actual Runestone component will be returned.NEWLINEdef preview_question():NEWLINENEWLINE begin = """NEWLINE.. raw:: htmlNEWLINENEWLINE <begin_directive>NEWLINENEWLINE"""NEWLINE end = """NEWLINENEWLINE.. raw:: htmlNEWLINENEWLINE <end_directive>NEWLINENEWLINE"""NEWLINENEWLINE try:NEWLINE code = begin + dedent(json.loads(request.vars.code)) + endNEWLINE with open(NEWLINE "applications/{}/build/preview/_sources/index.rst".format(NEWLINE request.applicationNEWLINE ),NEWLINE "w",NEWLINE encoding="utf-8",NEWLINE ) as ixf:NEWLINE ixf.write(code)NEWLINENEWLINE # Note that ``os.environ`` isn't a dict, it's an object whose setter modifies environment variables. So, modifications of a copy/deepcopy still `modify the original environment <https://stackoverflow.com/questions/13142972/using-copy-deepcopy-on-os-environ-in-python-appears-broken>`_. Therefore, convert it to a dict, where modifications will not affect the environment.NEWLINE env = dict(os.environ)NEWLINE # Prevent any changes to the database when building a preview question.NEWLINE env.pop("DBURL", None)NEWLINE # Run a runestone build.NEWLINE # We would like to use sys.executable But when we run web2pyNEWLINE # in uwsgi then sys.executable is uwsgi which doesn't work.NEWLINE # Why not just run runestone?NEWLINE if "python" not in settings.python_interpreter:NEWLINE logger.error(f"Error {settings.python_interpreter} is not a valid python")NEWLINE return json.dumps(NEWLINE f"Error: settings.python_interpreter must be set to a valid interpreter not {settings.python_interpreter}"NEWLINE )NEWLINE popen_obj = subprocess.Popen(NEWLINE [settings.python_interpreter, "-m", "runestone", "build"],NEWLINE # The build must be run from the directory containing a ``conf.py`` and all the needed support files.NEWLINE cwd="applications/{}/build/preview".format(request.application),NEWLINE # Capture the build output as text in case of an error.NEWLINE stdout=subprocess.PIPE,NEWLINE stderr=subprocess.PIPE,NEWLINE universal_newlines=True,NEWLINE # Pass the modified environment which doesn't contain ``DBURL``.NEWLINE env=env,NEWLINE )NEWLINE stdout, stderr = popen_obj.communicate()NEWLINE # If there was an error, return stdout and stderr from the build.NEWLINE if popen_obj.returncode != 0:NEWLINE return json.dumps(NEWLINE "Error: Runestone build failed:\n\n" + stdout + "\n" + stderrNEWLINE )NEWLINENEWLINE with open(NEWLINE "applications/{}/build/preview/build/preview/index.html".format(NEWLINE request.applicationNEWLINE ),NEWLINE "r",NEWLINE encoding="utf-8",NEWLINE ) as ixf:NEWLINE src = ixf.read()NEWLINE tree = html.fromstring(src)NEWLINE if len(tree.cssselect(".runestone")) == 0:NEWLINE src = ""NEWLINE result = re.search(NEWLINE "<begin_directive>(.*)<end_directive>", src, flags=re.DOTALLNEWLINE )NEWLINE if result:NEWLINE ctext = result.group(1)NEWLINE else:NEWLINE component = tree.cssselect(".system-message")NEWLINE if len(component) > 0:NEWLINE ctext = html.tostring(component[0]).decode("utf-8")NEWLINE logger.debug("error - ", ctext)NEWLINE else:NEWLINE ctext = "Error: Runestone content missing."NEWLINE return json.dumps(ctext)NEWLINE except Exception as ex:NEWLINE return json.dumps("Error: {}".format(ex))NEWLINENEWLINENEWLINEdef save_donate():NEWLINE if auth.user:NEWLINE db(db.auth_user.id == auth.user.id).update(donated=True)NEWLINENEWLINENEWLINEdef did_donate():NEWLINE if auth.user:NEWLINE d_status = (NEWLINE db(db.auth_user.id == auth.user.id).select(db.auth_user.donated).first()NEWLINE )NEWLINENEWLINE return json.dumps(dict(donate=d_status.donated))NEWLINE return json.dumps(dict(donate=False))NEWLINENEWLINENEWLINEdef get_datafile():NEWLINE """NEWLINE course_id - string, the name of the courseNEWLINE acid - the acid of this datafileNEWLINE """NEWLINE course = request.vars.course_id # the course nameNEWLINE the_course = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE acid = request.vars.acidNEWLINE file_contents = (NEWLINE db(NEWLINE (db.source_code.acid == acid)NEWLINE & (NEWLINE (db.source_code.course_id == the_course.base_course)NEWLINE | (db.source_code.course_id == course)NEWLINE )NEWLINE )NEWLINE .select(db.source_code.main_code)NEWLINE .first()NEWLINE )NEWLINENEWLINE if file_contents:NEWLINE file_contents = file_contents.main_codeNEWLINE else:NEWLINE file_contents = NoneNEWLINENEWLINE return json.dumps(dict(data=file_contents))[email protected](NEWLINE lambda: verifyInstructorStatus(auth.user.course_name, auth.user),NEWLINE requires_login=True,NEWLINE)NEWLINEdef broadcast_code():NEWLINE """NEWLINE Callable by an instructor to send the code in their scratch activecodeNEWLINE to all students in the class.NEWLINE """NEWLINE the_course = (NEWLINE db(db.courses.course_name == auth.user.course_name)NEWLINE .select(**SELECT_CACHE)NEWLINE .first()NEWLINE )NEWLINE cid = the_course.idNEWLINE student_list = db(NEWLINE (db.user_courses.course_id == cid)NEWLINE & (db.auth_user.id == db.user_courses.user_id)NEWLINE ).select()NEWLINE shared_code = (NEWLINE "{} Instructor shared code on {}\n".format(NEWLINE COMMENT_MAP.get(request.vars.lang, "#"), datetime.datetime.utcnow().date()NEWLINE )NEWLINE + request.vars.codeNEWLINE )NEWLINE counter = 0NEWLINE for student in student_list:NEWLINE if student.auth_user.id == auth.user.id:NEWLINE continueNEWLINE sid = student.auth_user.usernameNEWLINE try:NEWLINE db.code.insert(NEWLINE sid=sid,NEWLINE acid=request.vars.divid,NEWLINE code=shared_code,NEWLINE emessage="",NEWLINE timestamp=datetime.datetime.utcnow(),NEWLINE course_id=cid,NEWLINE language=request.vars.lang,NEWLINE comment="Instructor shared code",NEWLINE )NEWLINE except Exception as e:NEWLINE logger.error("Failed to insert instructor code! details: {}".format(e))NEWLINE return json.dumps(dict(mess="failed"))NEWLINENEWLINE counter += 1NEWLINENEWLINE return json.dumps(dict(mess="success", share_count=counter))NEWLINENEWLINENEWLINEdef _same_class(user1: str, user2: str) -> bool:NEWLINE user1_course = (NEWLINE db(db.auth_user.username == user1).select(db.auth_user.course_id).first()NEWLINE )NEWLINE user2_course = (NEWLINE db(db.auth_user.username == user2).select(db.auth_user.course_id).first()NEWLINE )NEWLINENEWLINE return user1_course == user2_courseNEWLINENEWLINENEWLINEdef login_status():NEWLINE if auth.user:NEWLINE return json.dumps(dict(status="loggedin", course_name=auth.user.course_name))NEWLINE else:NEWLINE return json.dumps(dict(status="loggedout", course_name=auth.user.course_name))NEWLINENEWLINENEWLINEauto_gradable_q = [NEWLINE "clickablearea",NEWLINE "mchoice",NEWLINE "parsonsprob",NEWLINE "dragndrop",NEWLINE "fillintheblank",NEWLINE][email protected]_login()NEWLINEdef get_question_source():NEWLINE """Called from the selectquestion directiveNEWLINE There are 4 cases:NEWLINENEWLINE 1. If there is only 1 question in the question list then return the html source for it.NEWLINE 2. If there are multiple questions then choose a question at randomNEWLINE 3. If a proficiency is selected then select a random question that tests that proficiencyNEWLINE 4. If the question is an AB question then see if this student is an A or a B or assign them to one randomly.NEWLINENEWLINE In the last two cases, first check to see if there is a question for this student for thisNEWLINE component that was previously selected.NEWLINENEWLINE Returns:NEWLINE json: html source for this questionNEWLINE """NEWLINE prof = FalseNEWLINE points = request.vars.pointsNEWLINE logger.debug(f"POINTS = {points}")NEWLINE min_difficulty = request.vars.min_difficultyNEWLINE max_difficulty = request.vars.max_difficultyNEWLINE not_seen_ever = request.vars.not_seen_everNEWLINE autogradable = request.vars.autogradableNEWLINE is_primary = request.vars.primaryNEWLINE is_ab = request.vars.ABNEWLINE selector_id = request.vars["selector_id"]NEWLINE assignment_name = request.vars["timedWrapper"]NEWLINE toggle = request.vars["toggle"]NEWLINENEWLINE # If the question has a :points: option then those points are the defaultNEWLINE # however sometimes questions are entered in the web ui without the :points:NEWLINE # and points are assigned in the UI instead. If this is part of anNEWLINE # assignment or timed exam AND the points are set in the web UI we willNEWLINE # use the points from the UI over the :points: If this is an assignmentNEWLINE # or exam that is totally written in RST then the points in the UI will matchNEWLINE # the points from the assignment anyway.NEWLINE if assignment_name:NEWLINE ui_points = (NEWLINE db(NEWLINE (db.assignments.name == assignment_name)NEWLINE & (db.assignments.id == db.assignment_questions.assignment_id)NEWLINE & (db.assignment_questions.question_id == db.questions.id)NEWLINE & (db.questions.name == selector_id)NEWLINE )NEWLINE .select(db.assignment_questions.points)NEWLINE .first()NEWLINE )NEWLINE logger.debug(NEWLINE f"Assignment Points for {assignment_name}, {selector_id} = {ui_points}"NEWLINE )NEWLINE points = ui_points.pointsNEWLINENEWLINE if request.vars["questions"]:NEWLINE questionlist = request.vars["questions"].split(",")NEWLINE questionlist = [q.strip() for q in questionlist]NEWLINE elif request.vars["proficiency"]:NEWLINE prof = request.vars["proficiency"]NEWLINENEWLINE query = (db.competency.competency == prof) & (NEWLINE db.competency.question == db.questions.idNEWLINE )NEWLINE if is_primary:NEWLINE query = query & (db.competency.is_primary == True)NEWLINE if min_difficulty:NEWLINE query = query & (db.questions.difficulty >= float(min_difficulty))NEWLINE if max_difficulty:NEWLINE query = query & (db.questions.difficulty <= float(max_difficulty))NEWLINE if autogradable:NEWLINE query = query & (NEWLINE (db.questions.autograde == "unittest")NEWLINE | db.questions.question_type.contains(auto_gradable_q, all=False)NEWLINE )NEWLINE res = db(query).select(db.questions.name)NEWLINE logger.debug(f"Query was {db._lastsql}")NEWLINE if res:NEWLINE questionlist = [row.name for row in res]NEWLINE else:NEWLINE questionlist = []NEWLINE logger.error(f"No questions found for proficiency {prof}")NEWLINE return json.dumps(f"<p>No Questions found for proficiency: {prof}</p>")NEWLINENEWLINE logger.debug(f"is_ab is {is_ab}")NEWLINE if is_ab:NEWLINENEWLINE res = db(NEWLINE (db.user_experiment.sid == auth.user.username)NEWLINE & (db.user_experiment.experiment_id == is_ab)NEWLINE ).select(orderby=db.user_experiment.id)NEWLINENEWLINE if not res:NEWLINE exp_group = random.randrange(2)NEWLINE db.user_experiment.insert(NEWLINE sid=auth.user.username, experiment_id=is_ab, exp_group=exp_groupNEWLINE )NEWLINE logger.debug(f"added {auth.user.username} to {is_ab} group {exp_group}")NEWLINENEWLINE else:NEWLINE exp_group = res[0].exp_groupNEWLINENEWLINE logger.debug(f"experimental group is {exp_group}")NEWLINENEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINENEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE questionid = questionlist[exp_group]NEWLINENEWLINE if not is_ab:NEWLINE poss = set()NEWLINE if not_seen_ever:NEWLINE seenq = db(NEWLINE (db.useinfo.sid == auth.user.username)NEWLINE & (db.useinfo.div_id.contains(questionlist, all=False))NEWLINE ).select(db.useinfo.div_id)NEWLINE seen = set([x.div_id for x in seenq])NEWLINE poss = set(questionlist)NEWLINE questionlist = list(poss - seen)NEWLINENEWLINE if len(questionlist) == 0 and len(poss) > 0:NEWLINE questionlist = list(poss)NEWLINENEWLINE htmlsrc = ""NEWLINENEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINENEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE # Eliminate any previous exam questions for this studentNEWLINE prev_questions = db(db.selected_questions.sid == auth.user.username).select(NEWLINE db.selected_questions.selected_idNEWLINE )NEWLINE prev_questions = set([row.selected_id for row in prev_questions])NEWLINE possible = set(questionlist)NEWLINE questionlist = list(possible - prev_questions)NEWLINE if questionlist:NEWLINE questionid = random.choice(questionlist)NEWLINE else:NEWLINE # If there are no questions left we should still return a random question.NEWLINE questionid = random.choice(list(possible))NEWLINENEWLINE logger.debug(f"toggle is {toggle}")NEWLINE if toggle:NEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE questionid = request.vars["questions"].split(",")[0]NEWLINE # else:NEWLINE # logger.error(NEWLINE # f"Question ID '{questionid}' not found in select question list of '{selector_id}'."NEWLINE # )NEWLINE # return json.dumps(NEWLINE # f"<p>Question ID '{questionid}' not found in select question list of '{selector_id}'.</p>"NEWLINE # )NEWLINENEWLINE res = db((db.questions.name == questionid)).select(db.questions.htmlsrc).first()NEWLINENEWLINE if res and not prev_selection:NEWLINE qid = db.selected_questions.insert(NEWLINE selector_id=selector_id,NEWLINE sid=auth.user.username,NEWLINE selected_id=questionid,NEWLINE points=points,NEWLINE )NEWLINE if not qid:NEWLINE logger.error(NEWLINE f"Failed to insert a selected question for {selector_id} and {auth.user.username}"NEWLINE )NEWLINE else:NEWLINE logger.debug(NEWLINE f"Did not insert a record for {selector_id}, {questionid} Conditions are {res} QL: {questionlist} PREV: {prev_selection}"NEWLINE )NEWLINENEWLINE if res and res.htmlsrc:NEWLINE htmlsrc = res.htmlsrcNEWLINE else:NEWLINE logger.error(NEWLINE f"HTML Source not found for {questionid} in course {auth.user.course_name} for {auth.user.username}"NEWLINE )NEWLINE htmlsrc = "<p>No preview available</p>"NEWLINE return json.dumps(htmlsrc)[email protected]_login()NEWLINEdef update_selected_question():NEWLINE """NEWLINE This endpoint is used by the selectquestion problems that allow theNEWLINE student to select the problem they work on. For example they may haveNEWLINE a programming problem that can be solved with writing code, or theyNEWLINE can switch to a parsons problem if necessary.NEWLINENEWLINE Caller must provide:NEWLINE * ``metaid`` -- the id of the selectquestionNEWLINE * ``selected`` -- the id of the real question chosen by the studentNEWLINE """NEWLINE sid = auth.user.usernameNEWLINE selector_id = request.vars.metaidNEWLINE selected_id = request.vars.selectedNEWLINE logger.debug(f"USQ - {selector_id} --> {selected_id} for {sid}")NEWLINE db.selected_questions.update_or_insert(NEWLINE (db.selected_questions.selector_id == selector_id)NEWLINE & (db.selected_questions.sid == sid),NEWLINE selected_id=selected_id,NEWLINE selector_id=selector_id,NEWLINE sid=sid,NEWLINE )NEWLINE |
import timeNEWLINEimport typingNEWLINEimport inspectNEWLINEfrom uuid import uuid4NEWLINEfrom broccoli.injector import ASyncInjectorNEWLINEfrom broccoli.components import ReturnValueNEWLINEfrom broccoli.task import create_taskNEWLINEfrom broccoli.result import AsyncResultNEWLINEfrom broccoli.types import App, Broker, Task, Message, Arguments, Fence, TaskLoggerNEWLINEfrom broccoli.traceback import extract_log_tbNEWLINEfrom broccoli.utils import cached_propertyNEWLINEfrom broccoli.graph import GraphNEWLINEfrom broccoli.exceptions import RejectNEWLINEfrom broccoli.router import ROUTER_COMPONENTSNEWLINEfrom broccoli.broker import BROKER_COMPONENTSNEWLINEfrom broccoli.logger import LOGGER_COMPONENTSNEWLINEfrom broccoli.config import CONFIG_COMPONENTSNEWLINEfrom broccoli.arguments import ARGUMENT_COMPONENTSNEWLINENEWLINE__all__ = ('Broccoli',)NEWLINENEWLINENEWLINEclass nullfence:NEWLINENEWLINE def __enter__(self):NEWLINE passNEWLINENEWLINE def __exit__(self, *excinfo):NEWLINE passNEWLINENEWLINENEWLINEclass Broccoli(App):NEWLINE _injector: ASyncInjector = NoneNEWLINE _tasks: typing.Dict[str, Task] = NoneNEWLINE result_factory = AsyncResultNEWLINE graph_factory = GraphNEWLINENEWLINE def __init__(self, components=None, settings=None) -> None:NEWLINE if components:NEWLINE msg = 'components must be a list of instances of Component.'NEWLINE assert all([(not isinstance(component, type) and hasattr(component, 'resolve'))NEWLINE for component in components]), msgNEWLINE if settings is not None:NEWLINE msg = 'settings must be a dict.'NEWLINE assert isinstance(settings, dict), msgNEWLINENEWLINE self.settings = settingsNEWLINE self._init_injector(components or [])NEWLINE self._tasks = {}NEWLINE self._context = {}NEWLINE self._graphs = {}NEWLINENEWLINE def set_context(self, **context):NEWLINE self._context = dict(self._context, **context)NEWLINE self._graphs = {}NEWLINE self._injector.clear_cache()NEWLINENEWLINE def get_context(self):NEWLINE return self._contextNEWLINENEWLINE def set_hooks(self,NEWLINE on_request: typing.Callable = None,NEWLINE on_response: typing.Callable = None):NEWLINE if on_request:NEWLINE if inspect.iscoroutinefunction(on_request):NEWLINE msg = 'Function %r may not be async.'NEWLINE raise TypeError(msg % on_request)NEWLINE self._on_request_hook = on_requestNEWLINE elif '_on_request_hook' in self.__dict__:NEWLINE del self._on_request_hookNEWLINE if on_response:NEWLINE if inspect.iscoroutinefunction(on_response):NEWLINE msg = 'Function %r may not be async.'NEWLINE raise TypeError(msg % on_response)NEWLINE self._on_response_hook = on_responseNEWLINE elif '_on_response_hook' in self.__dict__:NEWLINE del self._on_response_hookNEWLINENEWLINE def _init_injector(self, components):NEWLINE components = components or []NEWLINE components += ROUTER_COMPONENTSNEWLINE components += LOGGER_COMPONENTSNEWLINE components += BROKER_COMPONENTSNEWLINE components += CONFIG_COMPONENTSNEWLINE components += ARGUMENT_COMPONENTSNEWLINENEWLINE initial = {NEWLINE 'app': App,NEWLINE 'message': Message,NEWLINE 'args': Arguments,NEWLINE 'task': Task,NEWLINE 'exc': Exception,NEWLINE 'fence': FenceNEWLINE }NEWLINENEWLINE self._injector = ASyncInjector(components, initial)NEWLINENEWLINE def inject(self, funcs, args=None, cache=True):NEWLINE state = {NEWLINE 'app': self,NEWLINE 'message': None,NEWLINE 'args': args,NEWLINE 'task': None,NEWLINE 'exc': None,NEWLINE 'fence': NoneNEWLINE }NEWLINE return self._injector.run(NEWLINE funcs,NEWLINE state=state,NEWLINE cache=cacheNEWLINE )NEWLINENEWLINE def get_task(self, name: str):NEWLINE try:NEWLINE return self._tasks[name]NEWLINE except KeyError:NEWLINE raise Reject('Task %s not found' % name)NEWLINENEWLINE def task(self, *args, **kwargs):NEWLINE def create_task_wrapper(func):NEWLINE if inspect.iscoroutinefunction(func):NEWLINE msg = 'Function %r may not be async.'NEWLINE raise TypeError(msg % func)NEWLINE task = create_task(self, func, **kwargs)NEWLINE if task.name in self._tasks:NEWLINE msg = 'Task with name %r is already registered.'NEWLINE raise TypeError(msg % task.name)NEWLINE self._tasks[task.name] = taskNEWLINE return taskNEWLINENEWLINE if len(args) == 1:NEWLINE if callable(args[0]):NEWLINE return create_task_wrapper(*args)NEWLINE raise TypeError("Argument 1 to @task() must be a callable")NEWLINENEWLINE if args:NEWLINE raise TypeError("@task() takes exactly 1 argument")NEWLINENEWLINE return create_task_wrapperNEWLINENEWLINE def send_message(self, message: dict):NEWLINE self._broker.send_message(message)NEWLINENEWLINE def result(self, result_key: str):NEWLINE return self.result_factory(self, result_key)NEWLINENEWLINE def serve_message(self, message: dict, fence: Fence = None):NEWLINE if 'id' not in message:NEWLINE raise Reject('no id')NEWLINENEWLINE if 'task' not in message:NEWLINE raise Reject('no task')NEWLINENEWLINE if 'reply_id' in message:NEWLINE if 'graph_id' not in message:NEWLINE raise Reject('no graph_id')NEWLINE graph_id = message['graph_id']NEWLINE if graph_id not in self._graphs:NEWLINE raise Reject('unexpected graph_id')NEWLINE graph = self._graphs[graph_id]NEWLINE if message['reply_id'] not in graph:NEWLINE raise Reject('unexpected reply id')NEWLINE graph.run_reply(message)NEWLINE return graph.get_pending_messages()NEWLINE else:NEWLINE coro = self._run_async(message, fence)NEWLINE try:NEWLINE graph = coro.send(None)NEWLINE graph.set_coroutine(coro)NEWLINE return graph.get_pending_messages()NEWLINE except StopIteration as stop:NEWLINE return [stop.value]NEWLINENEWLINE async def _run_async(self, message, fence):NEWLINE state = {NEWLINE 'app': self,NEWLINE 'message': message,NEWLINE 'fence': fence,NEWLINE 'args': None,NEWLINE 'task': None,NEWLINE 'exc': None,NEWLINE 'return_value': NoneNEWLINE }NEWLINENEWLINE try:NEWLINE __log_tb_start__ = NoneNEWLINE task = self.get_task(message['task'])NEWLINE state['task'] = taskNEWLINE funcs = (NEWLINE self._on_request,NEWLINE self._on_request_hook,NEWLINE self._build_graph,NEWLINE task.handler,NEWLINE self._on_response_hook,NEWLINE self._on_response,NEWLINE )NEWLINE return await self._injector.run_async(funcs, state=state)NEWLINE except Exception as exc:NEWLINE try:NEWLINE state['exc'] = excNEWLINE step = state.get('$step', 0)NEWLINE if 0 < step < 4:NEWLINE funcs = (self._on_response_hook, self._on_response)NEWLINE else:NEWLINE funcs = (self._on_response,)NEWLINE return self._injector.run(funcs, state=state)NEWLINE except Exception as inner_exc:NEWLINE state['exc'] = inner_excNEWLINE return self._injector.run((self._on_response,), state)NEWLINENEWLINE @staticmethodNEWLINE def _on_request(message: Message):NEWLINE expires_at = message.get('expires_at')NEWLINE if expires_at is not None and isinstance(expires_at, (int, float)):NEWLINE if expires_at < time.time():NEWLINE raise Reject('Due to expiration time.')NEWLINENEWLINE @staticmethodNEWLINE def _on_request_hook(ret: ReturnValue):NEWLINE return retNEWLINENEWLINE async def _build_graph(self, task: Task, message: Message) -> Arguments:NEWLINE args = ()NEWLINE if message.get('subtasks'):NEWLINE graph = self.graph_factory(message)NEWLINE self._graphs[graph.id] = graphNEWLINE try:NEWLINE args = await graphNEWLINE finally:NEWLINE graph.close()NEWLINE del self._graphs[graph.id]NEWLINE return task.get_arguments(NEWLINE *((message.get('args') or ()) + tuple(args)),NEWLINE **(message.get('kwargs') or {})NEWLINE )NEWLINENEWLINE @staticmethodNEWLINE def _on_response_hook(ret: ReturnValue):NEWLINE return retNEWLINENEWLINE @staticmethodNEWLINE def _on_response(message: Message,NEWLINE exc: Exception,NEWLINE logger: TaskLogger,NEWLINE return_value: ReturnValue,NEWLINE task: Task):NEWLINE reply = {'id': str(uuid4()), 'task': message['task'], 'reply_id': message['id']}NEWLINE if 'graph_id' in message:NEWLINE reply['graph_id'] = message['graph_id']NEWLINE if 'reply_to' in message:NEWLINE reply['reply_to'] = message['reply_to']NEWLINE if 'result_key' in message:NEWLINE if message.get('ignore_result', task.ignore_result):NEWLINE reply['result_key'] = NoneNEWLINE else:NEWLINE reply['result_key'] = message['result_key']NEWLINE if '_context' in message:NEWLINE reply['_context'] = message['_context']NEWLINE if exc is not None:NEWLINE reply['exc'] = excNEWLINE if isinstance(exc, task.throws) or isinstance(exc, Reject):NEWLINE logger.error("Task {'id': %r, 'task': %r} raised exception %s: %s",NEWLINE message['id'], message['task'], exc.__class__.__name__, exc)NEWLINE return replyNEWLINE else:NEWLINE traceback = extract_log_tb(exc)NEWLINE if traceback:NEWLINE reply['traceback'] = tracebackNEWLINE logger.error("Task {'id': %r, 'task': %r} raised exception %s: %s\n%s",NEWLINE message['id'], message['task'], exc.__class__.__name__, exc, traceback)NEWLINE return replyNEWLINE reply['value'] = return_valueNEWLINE return replyNEWLINENEWLINE @cached_propertyNEWLINE def _broker(self) -> Broker:NEWLINE def get(obj: Broker):NEWLINE return objNEWLINENEWLINE return self.inject([get], cache=False)NEWLINE |
import boto3NEWLINEimport sysNEWLINEfrom st2actions.runners.pythonrunner import ActionNEWLINENEWLINEclass GetStackBuildStatus(Action):NEWLINE def run(self, stack_name_or_id):NEWLINE region = self.config['region']NEWLINENEWLINE stack_states = ['CREATE_COMPLETE', 'CREATE_FAILED', 'ROLLBACK_COMPLETE']NEWLINENEWLINE client = boto3.client('cloudformation', region_name=region)NEWLINENEWLINE try:NEWLINE stack_status = client.describe_stacks(StackName=stack_name_or_id)['Stacks'][0]['StackStatus']NEWLINENEWLINE except Exception as err:NEWLINE sys.stderr.write('ERROR: %s\n' % str(err))NEWLINE raiseNEWLINENEWLINE if stack_status not in stack_states:NEWLINE sys.stderr.write('Current state: %s\n' % stack_status)NEWLINE sys.exit(2)NEWLINENEWLINE return TrueNEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf8 -*-NEWLINENEWLINE"""Initialize userNEWLINE"""NEWLINENEWLINEimport sysNEWLINEsys.path.insert(0, '../')NEWLINENEWLINEfrom app.models import UserNEWLINEfrom app.models import dbNEWLINENEWLINE_admin0 = {'name': 'uadmin', 'hash': '$6$rounds=656000$BGPNku.GTxUFp5/m$z2VoGUbOzZfjEq2TnQjyK4Ho47MYCEHEK5N/TjpgzNuLWOJHwoeIA3AUbbDSMEvQBdqtEv1Vez1OXAYtYc4r80'}NEWLINENEWLINEuser0 = User(nickname=_admin0['name'], password_hash=_admin0['hash'], NEWLINE email='admin@localhost', id=1)NEWLINENEWLINE# default admin accountNEWLINEdb.session.add(user0)NEWLINEdb.session.commit()NEWLINE |
from django.apps import AppConfigNEWLINENEWLINENEWLINEclass GiftConfig(AppConfig):NEWLINE name = 'gift'NEWLINE |
#NEWLINE# Copyright (c) 2021 Citrix Systems, Inc.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License")NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINENEWLINEfrom nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resourceNEWLINEfrom nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_responseNEWLINEfrom nssrc.com.citrix.netscaler.nitro.service.options import optionsNEWLINEfrom nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exceptionNEWLINENEWLINEfrom nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_utilNEWLINENEWLINEclass rdpclientprofile(base_resource) :NEWLINE """ Configuration for RDP clientprofile resource. """NEWLINE def __init__(self) :NEWLINE self._name = NoneNEWLINE self._rdpurloverride = NoneNEWLINE self._redirectclipboard = NoneNEWLINE self._redirectdrives = NoneNEWLINE self._redirectprinters = NoneNEWLINE self._redirectcomports = NoneNEWLINE self._redirectpnpdevices = NoneNEWLINE self._keyboardhook = NoneNEWLINE self._audiocapturemode = NoneNEWLINE self._videoplaybackmode = NoneNEWLINE self._multimonitorsupport = NoneNEWLINE self._rdpcookievalidity = NoneNEWLINE self._addusernameinrdpfile = NoneNEWLINE self._rdpfilename = NoneNEWLINE self._rdphost = NoneNEWLINE self._rdplistener = NoneNEWLINE self._rdpcustomparams = NoneNEWLINE self._psk = NoneNEWLINE self._randomizerdpfilename = NoneNEWLINE self._rdplinkattribute = NoneNEWLINE self._builtin = NoneNEWLINE self._feature = NoneNEWLINE self.___count = NoneNEWLINENEWLINE @propertyNEWLINE def name(self) :NEWLINE r"""The name of the rdp profile.<br/>Minimum length = 1.NEWLINE """NEWLINE try :NEWLINE return self._nameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @name.setterNEWLINE def name(self, name) :NEWLINE r"""The name of the rdp profile.<br/>Minimum length = 1NEWLINE """NEWLINE try :NEWLINE self._name = nameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdpurloverride(self) :NEWLINE r"""This setting determines whether the RDP parameters supplied in the vpn url override those specified in the RDP profile.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._rdpurloverrideNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdpurloverride.setterNEWLINE def rdpurloverride(self, rdpurloverride) :NEWLINE r"""This setting determines whether the RDP parameters supplied in the vpn url override those specified in the RDP profile.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._rdpurloverride = rdpurloverrideNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def redirectclipboard(self) :NEWLINE r"""This setting corresponds to the Clipboard check box on the Local Resources tab under Options in RDC.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._redirectclipboardNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @redirectclipboard.setterNEWLINE def redirectclipboard(self, redirectclipboard) :NEWLINE r"""This setting corresponds to the Clipboard check box on the Local Resources tab under Options in RDC.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._redirectclipboard = redirectclipboardNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def redirectdrives(self) :NEWLINE r"""This setting corresponds to the selections for Drives under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._redirectdrivesNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @redirectdrives.setterNEWLINE def redirectdrives(self, redirectdrives) :NEWLINE r"""This setting corresponds to the selections for Drives under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._redirectdrives = redirectdrivesNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def redirectprinters(self) :NEWLINE r"""This setting corresponds to the selection in the Printers check box on the Local Resources tab under Options in RDC.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._redirectprintersNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @redirectprinters.setterNEWLINE def redirectprinters(self, redirectprinters) :NEWLINE r"""This setting corresponds to the selection in the Printers check box on the Local Resources tab under Options in RDC.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._redirectprinters = redirectprintersNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def redirectcomports(self) :NEWLINE r"""This setting corresponds to the selections for comports under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._redirectcomportsNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @redirectcomports.setterNEWLINE def redirectcomports(self, redirectcomports) :NEWLINE r"""This setting corresponds to the selections for comports under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._redirectcomports = redirectcomportsNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def redirectpnpdevices(self) :NEWLINE r"""This setting corresponds to the selections for pnpdevices under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._redirectpnpdevicesNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @redirectpnpdevices.setterNEWLINE def redirectpnpdevices(self, redirectpnpdevices) :NEWLINE r"""This setting corresponds to the selections for pnpdevices under More on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._redirectpnpdevices = redirectpnpdevicesNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def keyboardhook(self) :NEWLINE r"""This setting corresponds to the selection in the Keyboard drop-down list on the Local Resources tab under Options in RDC.<br/>Default value: InFullScreenMode<br/>Possible values = OnLocal, OnRemote, InFullScreenMode.NEWLINE """NEWLINE try :NEWLINE return self._keyboardhookNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @keyboardhook.setterNEWLINE def keyboardhook(self, keyboardhook) :NEWLINE r"""This setting corresponds to the selection in the Keyboard drop-down list on the Local Resources tab under Options in RDC.<br/>Default value: InFullScreenMode<br/>Possible values = OnLocal, OnRemote, InFullScreenModeNEWLINE """NEWLINE try :NEWLINE self._keyboardhook = keyboardhookNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def audiocapturemode(self) :NEWLINE r"""This setting corresponds to the selections in the Remote audio area on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._audiocapturemodeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @audiocapturemode.setterNEWLINE def audiocapturemode(self, audiocapturemode) :NEWLINE r"""This setting corresponds to the selections in the Remote audio area on the Local Resources tab under Options in RDC.<br/>Default value: DISABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._audiocapturemode = audiocapturemodeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def videoplaybackmode(self) :NEWLINE r"""This setting determines if Remote Desktop Connection (RDC) will use RDP efficient multimedia streaming for video playback.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._videoplaybackmodeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @videoplaybackmode.setterNEWLINE def videoplaybackmode(self, videoplaybackmode) :NEWLINE r"""This setting determines if Remote Desktop Connection (RDC) will use RDP efficient multimedia streaming for video playback.<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._videoplaybackmode = videoplaybackmodeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def multimonitorsupport(self) :NEWLINE r"""Enable/Disable Multiple Monitor Support for Remote Desktop Connection (RDC).<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLE.NEWLINE """NEWLINE try :NEWLINE return self._multimonitorsupportNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @multimonitorsupport.setterNEWLINE def multimonitorsupport(self, multimonitorsupport) :NEWLINE r"""Enable/Disable Multiple Monitor Support for Remote Desktop Connection (RDC).<br/>Default value: ENABLE<br/>Possible values = ENABLE, DISABLENEWLINE """NEWLINE try :NEWLINE self._multimonitorsupport = multimonitorsupportNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdpcookievalidity(self) :NEWLINE r"""RDP cookie validity period. RDP cookie validity time is applicable for new connection and also for any re-connection that might happen, mostly due to network disruption or during fail-over.<br/>Default value: 60<br/>Minimum length = 1<br/>Maximum length = 86400.NEWLINE """NEWLINE try :NEWLINE return self._rdpcookievalidityNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdpcookievalidity.setterNEWLINE def rdpcookievalidity(self, rdpcookievalidity) :NEWLINE r"""RDP cookie validity period. RDP cookie validity time is applicable for new connection and also for any re-connection that might happen, mostly due to network disruption or during fail-over.<br/>Default value: 60<br/>Minimum length = 1<br/>Maximum length = 86400NEWLINE """NEWLINE try :NEWLINE self._rdpcookievalidity = rdpcookievalidityNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def addusernameinrdpfile(self) :NEWLINE r"""Add username in rdp file.<br/>Default value: NO<br/>Possible values = YES, NO.NEWLINE """NEWLINE try :NEWLINE return self._addusernameinrdpfileNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @addusernameinrdpfile.setterNEWLINE def addusernameinrdpfile(self, addusernameinrdpfile) :NEWLINE r"""Add username in rdp file.<br/>Default value: NO<br/>Possible values = YES, NONEWLINE """NEWLINE try :NEWLINE self._addusernameinrdpfile = addusernameinrdpfileNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdpfilename(self) :NEWLINE r"""RDP file name to be sent to End User.<br/>Minimum length = 1.NEWLINE """NEWLINE try :NEWLINE return self._rdpfilenameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdpfilename.setterNEWLINE def rdpfilename(self, rdpfilename) :NEWLINE r"""RDP file name to be sent to End User.<br/>Minimum length = 1NEWLINE """NEWLINE try :NEWLINE self._rdpfilename = rdpfilenameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdphost(self) :NEWLINE r"""Fully-qualified domain name (FQDN) of the RDP Listener.<br/>Maximum length = 252.NEWLINE """NEWLINE try :NEWLINE return self._rdphostNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdphost.setterNEWLINE def rdphost(self, rdphost) :NEWLINE r"""Fully-qualified domain name (FQDN) of the RDP Listener.<br/>Maximum length = 252NEWLINE """NEWLINE try :NEWLINE self._rdphost = rdphostNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdplistener(self) :NEWLINE r"""IP address (or) Fully-qualified domain name(FQDN) of the RDP Listener with the port in the format IP:Port (or) FQDN:Port.<br/>Maximum length = 255.NEWLINE """NEWLINE try :NEWLINE return self._rdplistenerNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdplistener.setterNEWLINE def rdplistener(self, rdplistener) :NEWLINE r"""IP address (or) Fully-qualified domain name(FQDN) of the RDP Listener with the port in the format IP:Port (or) FQDN:Port.<br/>Maximum length = 255NEWLINE """NEWLINE try :NEWLINE self._rdplistener = rdplistenerNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdpcustomparams(self) :NEWLINE r"""Option for RDP custom parameters settings (if any). Custom params needs to be separated by '&'.<br/>Default value: 0<br/>Minimum length = 1.NEWLINE """NEWLINE try :NEWLINE return self._rdpcustomparamsNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdpcustomparams.setterNEWLINE def rdpcustomparams(self, rdpcustomparams) :NEWLINE r"""Option for RDP custom parameters settings (if any). Custom params needs to be separated by '&'.<br/>Default value: 0<br/>Minimum length = 1NEWLINE """NEWLINE try :NEWLINE self._rdpcustomparams = rdpcustomparamsNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def psk(self) :NEWLINE r"""Pre shared key value.<br/>Default value: 0.NEWLINE """NEWLINE try :NEWLINE return self._pskNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @psk.setterNEWLINE def psk(self, psk) :NEWLINE r"""Pre shared key value.<br/>Default value: 0NEWLINE """NEWLINE try :NEWLINE self._psk = pskNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def randomizerdpfilename(self) :NEWLINE r"""Will generate unique filename everytime rdp file is downloaded by appending output of time() function in the format <rdpfileName>_<time>.rdp. This tries to avoid the pop-up for replacement of existing rdp file during each rdp connection launch, hence providing better end-user experience.<br/>Default value: NO<br/>Possible values = YES, NO.NEWLINE """NEWLINE try :NEWLINE return self._randomizerdpfilenameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @randomizerdpfilename.setterNEWLINE def randomizerdpfilename(self, randomizerdpfilename) :NEWLINE r"""Will generate unique filename everytime rdp file is downloaded by appending output of time() function in the format <rdpfileName>_<time>.rdp. This tries to avoid the pop-up for replacement of existing rdp file during each rdp connection launch, hence providing better end-user experience.<br/>Default value: NO<br/>Possible values = YES, NONEWLINE """NEWLINE try :NEWLINE self._randomizerdpfilename = randomizerdpfilenameNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def rdplinkattribute(self) :NEWLINE r"""Citrix Gateway allows the configuration of rdpLinkAttribute parameter which can be used to fetch a list of RDP servers(IP/FQDN) that a user can access, from an Authentication server attribute(Example: LDAP, SAML). Based on the list received, the RDP links will be generated and displayed to the user.NEWLINE Note: The Attribute mentioned in the rdpLinkAttribute should be fetched through corresponding authentication method.NEWLINE """NEWLINE try :NEWLINE return self._rdplinkattributeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @rdplinkattribute.setterNEWLINE def rdplinkattribute(self, rdplinkattribute) :NEWLINE r"""Citrix Gateway allows the configuration of rdpLinkAttribute parameter which can be used to fetch a list of RDP servers(IP/FQDN) that a user can access, from an Authentication server attribute(Example: LDAP, SAML). Based on the list received, the RDP links will be generated and displayed to the user.NEWLINE Note: The Attribute mentioned in the rdpLinkAttribute should be fetched through corresponding authentication method.NEWLINE """NEWLINE try :NEWLINE self._rdplinkattribute = rdplinkattributeNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def builtin(self) :NEWLINE r"""Indicates that a variable is a built-in (SYSTEM INTERNAL) type.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE, PARTITION_ALL.NEWLINE """NEWLINE try :NEWLINE return self._builtinNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE @propertyNEWLINE def feature(self) :NEWLINE r"""The feature to be checked while applying this config.NEWLINE """NEWLINE try :NEWLINE return self._featureNEWLINE except Exception as e:NEWLINE raise eNEWLINENEWLINE def _get_nitro_response(self, service, response) :NEWLINE r""" converts nitro response into object and returns the object array in case of get request.NEWLINE """NEWLINE try :NEWLINE result = service.payload_formatter.string_to_resource(rdpclientprofile_response, response, self.__class__.__name__)NEWLINE if(result.errorcode != 0) :NEWLINE if (result.errorcode == 444) :NEWLINE service.clear_session(self)NEWLINE if result.severity :NEWLINE if (result.severity == "ERROR") :NEWLINE raise nitro_exception(result.errorcode, str(result.message), str(result.severity))NEWLINE else :NEWLINE raise nitro_exception(result.errorcode, str(result.message), str(result.severity))NEWLINE return result.rdpclientprofileNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE def _get_object_name(self) :NEWLINE r""" Returns the value of object identifier argumentNEWLINE """NEWLINE try :NEWLINE if self.name is not None :NEWLINE return str(self.name)NEWLINE return NoneNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINENEWLINENEWLINE @classmethodNEWLINE def filter_add_parameters(cls, resource) :NEWLINE r""" Use this function to create a resource with only add operation specific parameters.NEWLINE """NEWLINE addresource = rdpclientprofile()NEWLINE addresource.name = resource.nameNEWLINE addresource.rdpurloverride = resource.rdpurloverrideNEWLINE addresource.redirectclipboard = resource.redirectclipboardNEWLINE addresource.redirectdrives = resource.redirectdrivesNEWLINE addresource.redirectprinters = resource.redirectprintersNEWLINE addresource.redirectcomports = resource.redirectcomportsNEWLINE addresource.redirectpnpdevices = resource.redirectpnpdevicesNEWLINE addresource.keyboardhook = resource.keyboardhookNEWLINE addresource.audiocapturemode = resource.audiocapturemodeNEWLINE addresource.videoplaybackmode = resource.videoplaybackmodeNEWLINE addresource.multimonitorsupport = resource.multimonitorsupportNEWLINE addresource.rdpcookievalidity = resource.rdpcookievalidityNEWLINE addresource.addusernameinrdpfile = resource.addusernameinrdpfileNEWLINE addresource.rdpfilename = resource.rdpfilenameNEWLINE addresource.rdphost = resource.rdphostNEWLINE addresource.rdplistener = resource.rdplistenerNEWLINE addresource.rdpcustomparams = resource.rdpcustomparamsNEWLINE addresource.psk = resource.pskNEWLINE addresource.randomizerdpfilename = resource.randomizerdpfilenameNEWLINE addresource.rdplinkattribute = resource.rdplinkattributeNEWLINE return addresourceNEWLINENEWLINE @classmethodNEWLINE def add(cls, client, resource) :NEWLINE r""" Use this API to add rdpclientprofile.NEWLINE """NEWLINE try :NEWLINE if type(resource) is not list :NEWLINE addresource = cls.filter_add_parameters(resource)NEWLINE return addresource.add_resource(client)NEWLINE else :NEWLINE if (resource and len(resource) > 0) :NEWLINE addresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE addresources[i] = cls.filter_add_parameters(resource[i])NEWLINE result = cls.add_bulk_request(client, addresources)NEWLINE return resultNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE @classmethodNEWLINE def filter_update_parameters(cls, resource) :NEWLINE r""" Use this function to create a resource with only update operation specific parameters.NEWLINE """NEWLINE updateresource = rdpclientprofile()NEWLINE updateresource.name = resource.nameNEWLINE updateresource.rdpurloverride = resource.rdpurloverrideNEWLINE updateresource.redirectclipboard = resource.redirectclipboardNEWLINE updateresource.redirectdrives = resource.redirectdrivesNEWLINE updateresource.redirectprinters = resource.redirectprintersNEWLINE updateresource.redirectcomports = resource.redirectcomportsNEWLINE updateresource.redirectpnpdevices = resource.redirectpnpdevicesNEWLINE updateresource.keyboardhook = resource.keyboardhookNEWLINE updateresource.audiocapturemode = resource.audiocapturemodeNEWLINE updateresource.videoplaybackmode = resource.videoplaybackmodeNEWLINE updateresource.multimonitorsupport = resource.multimonitorsupportNEWLINE updateresource.rdpcookievalidity = resource.rdpcookievalidityNEWLINE updateresource.addusernameinrdpfile = resource.addusernameinrdpfileNEWLINE updateresource.rdpfilename = resource.rdpfilenameNEWLINE updateresource.rdphost = resource.rdphostNEWLINE updateresource.rdplistener = resource.rdplistenerNEWLINE updateresource.rdpcustomparams = resource.rdpcustomparamsNEWLINE updateresource.psk = resource.pskNEWLINE updateresource.randomizerdpfilename = resource.randomizerdpfilenameNEWLINE updateresource.rdplinkattribute = resource.rdplinkattributeNEWLINE return updateresourceNEWLINENEWLINE @classmethodNEWLINE def update(cls, client, resource) :NEWLINE r""" Use this API to update rdpclientprofile.NEWLINE """NEWLINE try :NEWLINE if type(resource) is not list :NEWLINE updateresource = cls.filter_update_parameters(resource)NEWLINE return updateresource.update_resource(client)NEWLINE else :NEWLINE if (resource and len(resource) > 0) :NEWLINE updateresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE updateresources[i] = cls.filter_update_parameters(resource[i])NEWLINE result = cls.update_bulk_request(client, updateresources)NEWLINE return resultNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE @classmethodNEWLINE def unset(cls, client, resource, args) :NEWLINE r""" Use this API to unset the properties of rdpclientprofile resource.NEWLINE Properties that need to be unset are specified in args array.NEWLINE """NEWLINE try :NEWLINE if type(resource) is not list :NEWLINE unsetresource = rdpclientprofile()NEWLINE if type(resource) != type(unsetresource):NEWLINE unsetresource.name = resourceNEWLINE else :NEWLINE unsetresource.name = resource.nameNEWLINE return unsetresource.unset_resource(client, args)NEWLINE else :NEWLINE if type(resource[0]) != cls :NEWLINE if (resource and len(resource) > 0) :NEWLINE unsetresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE unsetresources[i].name = resource[i]NEWLINE else :NEWLINE if (resource and len(resource) > 0) :NEWLINE unsetresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE unsetresources[i].name = resource[i].nameNEWLINE result = cls.unset_bulk_request(client, unsetresources, args)NEWLINE return resultNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE @classmethodNEWLINE def filter_delete_parameters(cls, resource) :NEWLINE r""" Use this function to create a resource with only delete operation specific parameters.NEWLINE """NEWLINE deleteresource = rdpclientprofile()NEWLINE deleteresource.name = resource.nameNEWLINE return deleteresourceNEWLINENEWLINE @classmethodNEWLINE def delete(cls, client, resource) :NEWLINE r""" Use this API to delete rdpclientprofile.NEWLINE """NEWLINE try :NEWLINE if type(resource) is not list :NEWLINE deleteresource = rdpclientprofile()NEWLINE if type(resource) != type(deleteresource):NEWLINE deleteresource.name = resourceNEWLINE else :NEWLINE deleteresource = cls.filter_delete_parameters(resource)NEWLINE return deleteresource.delete_resource(client)NEWLINE else :NEWLINE if type(resource[0]) != cls :NEWLINE if (resource and len(resource) > 0) :NEWLINE deleteresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE deleteresources[i].name = resource[i]NEWLINE else :NEWLINE if (resource and len(resource) > 0) :NEWLINE deleteresources = [ rdpclientprofile() for _ in range(len(resource))]NEWLINE for i in range(len(resource)) :NEWLINE deleteresources[i] = cls.filter_delete_parameters(resource)NEWLINE result = cls.delete_bulk_request(client, deleteresources)NEWLINE return resultNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE @classmethodNEWLINE def get(cls, client, name="", option_="") :NEWLINE r""" Use this API to fetch all the rdpclientprofile resources that are configured on netscaler.NEWLINE """NEWLINE try :NEWLINE if not name :NEWLINE obj = rdpclientprofile()NEWLINE response = obj.get_resources(client, option_)NEWLINE else :NEWLINE if type(name) is not list :NEWLINE if type(name) == cls :NEWLINE raise Exception('Invalid parameter name:{0}'.format(type(name)))NEWLINE obj = rdpclientprofile()NEWLINE obj.name = nameNEWLINE response = obj.get_resource(client, option_)NEWLINE else :NEWLINE if name and len(name) > 0 :NEWLINE if type(name[0]) == cls :NEWLINE raise Exception('Invalid parameter name:{0}'.format(type(name[0])))NEWLINE response = [rdpclientprofile() for _ in range(len(name))]NEWLINE obj = [rdpclientprofile() for _ in range(len(name))]NEWLINE for i in range(len(name)) :NEWLINE obj[i] = rdpclientprofile()NEWLINE obj[i].name = name[i]NEWLINE response[i] = obj[i].get_resource(client, option_)NEWLINE return responseNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINENEWLINE @classmethodNEWLINE def get_filtered(cls, client, filter_) :NEWLINE r""" Use this API to fetch filtered set of rdpclientprofile resources.NEWLINE filter string should be in JSON format.eg: "port:80,servicetype:HTTP".NEWLINE """NEWLINE try :NEWLINE obj = rdpclientprofile()NEWLINE option_ = options()NEWLINE option_.filter = filter_NEWLINE response = obj.getfiltered(client, option_)NEWLINE return responseNEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINENEWLINE @classmethodNEWLINE def count(cls, client) :NEWLINE r""" Use this API to count the rdpclientprofile resources configured on NetScaler.NEWLINE """NEWLINE try :NEWLINE obj = rdpclientprofile()NEWLINE option_ = options()NEWLINE option_.count = TrueNEWLINE response = obj.get_resources(client, option_)NEWLINE if response :NEWLINE return response[0].__dict__['___count']NEWLINE return 0NEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINE @classmethodNEWLINE def count_filtered(cls, client, filter_) :NEWLINE r""" Use this API to count filtered the set of rdpclientprofile resources.NEWLINE Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".NEWLINE """NEWLINE try :NEWLINE obj = rdpclientprofile()NEWLINE option_ = options()NEWLINE option_.count = TrueNEWLINE option_.filter = filter_NEWLINE response = obj.getfiltered(client, option_)NEWLINE if response :NEWLINE return response[0].__dict__['___count']NEWLINE return 0NEWLINE except Exception as e :NEWLINE raise eNEWLINENEWLINENEWLINE class Rdpurloverride:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Keyboardhook:NEWLINE OnLocal = "OnLocal"NEWLINE OnRemote = "OnRemote"NEWLINE InFullScreenMode = "InFullScreenMode"NEWLINENEWLINE class Feature:NEWLINE WL = "WL"NEWLINE WebLogging = "WebLogging"NEWLINE SP = "SP"NEWLINE SurgeProtection = "SurgeProtection"NEWLINE LB = "LB"NEWLINE LoadBalancing = "LoadBalancing"NEWLINE CS = "CS"NEWLINE ContentSwitching = "ContentSwitching"NEWLINE CR = "CR"NEWLINE CacheRedirection = "CacheRedirection"NEWLINE SC = "SC"NEWLINE SureConnect = "SureConnect"NEWLINE CMP = "CMP"NEWLINE CMPcntl = "CMPcntl"NEWLINE CompressionControl = "CompressionControl"NEWLINE PQ = "PQ"NEWLINE PriorityQueuing = "PriorityQueuing"NEWLINE HDOSP = "HDOSP"NEWLINE HttpDoSProtection = "HttpDoSProtection"NEWLINE SSLVPN = "SSLVPN"NEWLINE AAA = "AAA"NEWLINE GSLB = "GSLB"NEWLINE GlobalServerLoadBalancing = "GlobalServerLoadBalancing"NEWLINE SSL = "SSL"NEWLINE SSLOffload = "SSLOffload"NEWLINE SSLOffloading = "SSLOffloading"NEWLINE CF = "CF"NEWLINE ContentFiltering = "ContentFiltering"NEWLINE IC = "IC"NEWLINE IntegratedCaching = "IntegratedCaching"NEWLINE OSPF = "OSPF"NEWLINE OSPFRouting = "OSPFRouting"NEWLINE RIP = "RIP"NEWLINE RIPRouting = "RIPRouting"NEWLINE BGP = "BGP"NEWLINE BGPRouting = "BGPRouting"NEWLINE REWRITE = "REWRITE"NEWLINE IPv6PT = "IPv6PT"NEWLINE IPv6protocoltranslation = "IPv6protocoltranslation"NEWLINE AppFw = "AppFw"NEWLINE ApplicationFirewall = "ApplicationFirewall"NEWLINE RESPONDER = "RESPONDER"NEWLINE HTMLInjection = "HTMLInjection"NEWLINE push = "push"NEWLINE NSPush = "NSPush"NEWLINE NetScalerPush = "NetScalerPush"NEWLINE AppFlow = "AppFlow"NEWLINE CloudBridge = "CloudBridge"NEWLINE ISIS = "ISIS"NEWLINE ISISRouting = "ISISRouting"NEWLINE CH = "CH"NEWLINE CallHome = "CallHome"NEWLINE AppQoE = "AppQoE"NEWLINE ContentAccelerator = "ContentAccelerator"NEWLINE SYSTEM = "SYSTEM"NEWLINE RISE = "RISE"NEWLINE FEO = "FEO"NEWLINE LSN = "LSN"NEWLINE LargeScaleNAT = "LargeScaleNAT"NEWLINE RDPProxy = "RDPProxy"NEWLINE Rep = "Rep"NEWLINE Reputation = "Reputation"NEWLINE URLFiltering = "URLFiltering"NEWLINE VideoOptimization = "VideoOptimization"NEWLINE ForwardProxy = "ForwardProxy"NEWLINE SSLInterception = "SSLInterception"NEWLINE AdaptiveTCP = "AdaptiveTCP"NEWLINE CQA = "CQA"NEWLINE CI = "CI"NEWLINE ContentInspection = "ContentInspection"NEWLINE Bot = "Bot"NEWLINE APIGateway = "APIGateway"NEWLINENEWLINE class Redirectcomports:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Builtin:NEWLINE MODIFIABLE = "MODIFIABLE"NEWLINE DELETABLE = "DELETABLE"NEWLINE IMMUTABLE = "IMMUTABLE"NEWLINE PARTITION_ALL = "PARTITION_ALL"NEWLINENEWLINE class Randomizerdpfilename:NEWLINE YES = "YES"NEWLINE NO = "NO"NEWLINENEWLINE class Multimonitorsupport:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Addusernameinrdpfile:NEWLINE YES = "YES"NEWLINE NO = "NO"NEWLINENEWLINE class Videoplaybackmode:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Redirectclipboard:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Redirectpnpdevices:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Redirectprinters:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Audiocapturemode:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINE class Redirectdrives:NEWLINE ENABLE = "ENABLE"NEWLINE DISABLE = "DISABLE"NEWLINENEWLINEclass rdpclientprofile_response(base_response) :NEWLINE def __init__(self, length=1) :NEWLINE self.rdpclientprofile = []NEWLINE self.errorcode = 0NEWLINE self.message = ""NEWLINE self.severity = ""NEWLINE self.sessionid = ""NEWLINE self.rdpclientprofile = [rdpclientprofile() for _ in range(length)]NEWLINENEWLINE |
from django.db import modelsNEWLINEimport django.contrib.auth.modelsNEWLINENEWLINENEWLINE# class Admin(models.Model):NEWLINE# password = models.CharField(max_length=128)NEWLINE# last_login = models.DateTimeField()NEWLINE# is_superuser = models.BooleanField()NEWLINE# first_name = models.CharField(max_length=30)NEWLINE# last_name = models.CharField(max_length=30)NEWLINE# email = models.EmailField(max_length=254)NEWLINE# is_staff = models.BooleanField()NEWLINE# is_active = models.BooleanField()NEWLINE# date_joined = models.DateTimeField()NEWLINE# username = models.CharField(max_length=30)NEWLINE#NEWLINE# class Meta:NEWLINE# db_table = "auth_user"NEWLINENEWLINENEWLINEclass Statistics(models.Model):NEWLINE new_order = models.IntegerField()NEWLINE new_visitors = models.IntegerField()NEWLINE new_user = models.IntegerField()NEWLINE profit_today = models.IntegerField()NEWLINENEWLINE class Meta:NEWLINE db_table = "admin_site_statistics"NEWLINENEWLINENEWLINEclass EmailEntity(models.Model):NEWLINE user = models.ForeignKey(django.contrib.auth.models.User, related_name='email_owner')NEWLINE #MIME headerNEWLINE mime_from = models.EmailField(max_length=128)NEWLINE mime_to = models.CharField(max_length=128)NEWLINE mime_cc = models.CharField(max_length=128)NEWLINE mime_bcc = models.CharField(max_length=128)NEWLINE mime_date = models.DateTimeField()NEWLINE mime_subject = models.CharField(max_length=128)NEWLINE mime_transfer_encoding = models.CharField(max_length=8)NEWLINE #MIME contentNEWLINE content_type = models.CharField(max_length=8)NEWLINENEWLINE #local statusNEWLINE readed = models.BooleanField()NEWLINENEWLINE class Meta:NEWLINE db_table = "admin_email_inbox"NEWLINE |
import abcNEWLINEfrom typing import Type, UnionNEWLINEimport numpy as npNEWLINENEWLINEfrom uncertainty_framework.report._report import Report as ReportBaseClassNEWLINEfrom uncertainty_framework.report.margins import MarginReportNEWLINEfrom uncertainty_framework.report.console import ConsoleReportNEWLINEfrom uncertainty_framework.report.plot import PlotReportNEWLINENEWLINENEWLINEclass Simulator(abc.ABC):NEWLINE def __init__(self):NEWLINE self.result = NoneNEWLINENEWLINE """Simulator base class"""NEWLINE @abc.abstractmethodNEWLINE def run(self, **kwargs) -> np.ndarray:NEWLINE """NEWLINE The run function has to run the actual simulation NEWLINE and needs at least to be implemented.NEWLINE It has to return a numpy array with all simulation NEWLINE results in it, which can be used by reporting NEWLINE and statistics tools.NEWLINE """NEWLINE passNEWLINENEWLINE def __call__(self, **kwargs) -> np.ndarray:NEWLINE self.result = self.run(**kwargs)NEWLINE return self.resultNEWLINENEWLINE def render(self, Report: Union[Type[ReportBaseClass], str] ='console', result: np.ndarray =None, **kwargs):NEWLINE """NEWLINE Render the results with the given classNEWLINE """NEWLINE # TODO: here we could accept some strings like 'html'NEWLINE # and so on to load default reporting classesNEWLINE NEWLINE # check if a result is calculatedNEWLINE if result is None:NEWLINE result = self.resultNEWLINE if result is None:NEWLINE raise RuntimeError("No simulation result found. Call this instance or pass the result matrix.")NEWLINE NEWLINE # load a predefined Report classNEWLINE if isinstance(Report, str):NEWLINE if Report.lower() == 'margin':NEWLINE Report = MarginReportNEWLINE elif Report.lower() == 'console':NEWLINE Report = ConsoleReportNEWLINE elif Report.lower() == 'plot':NEWLINE Report = PlotReportNEWLINE else:NEWLINE raise ValueError("'%s' is not a known Report option." % Report)NEWLINE NEWLINE # instantiate a reportNEWLINE report = Report(result=result)NEWLINENEWLINE # return the reportNEWLINE return report(**kwargs)NEWLINE |
from rest_framework import generics, permissionsNEWLINEfrom rest_framework import filters as filters_rfNEWLINEfrom django_filters import rest_framework as filtersNEWLINENEWLINEfrom oms_cms.backend.comments.models import OmsCommentNEWLINEfrom .serializers import OmsCommentSerializer, OmsCommentDeleteUpdateCreateSerializerNEWLINENEWLINENEWLINEclass OmsCommentListApi(generics.ListAPIView):NEWLINE """Список всех комментариев"""NEWLINE permission_classes = [permissions.AllowAny]NEWLINE queryset = OmsComment.objects.all()NEWLINE serializer_class = OmsCommentSerializerNEWLINE filter_backends = [filters.DjangoFilterBackend,NEWLINE filters_rf.SearchFilter,NEWLINE filters_rf.OrderingFilter]NEWLINE filter_fields = ('id', 'user', 'is_public', 'is_removed', 'published', 'tree_id',NEWLINE 'level', 'site', 'parent', 'children')NEWLINE search_fields = ['user_name', 'user_email', 'comment']NEWLINE ordering = ['id']NEWLINENEWLINENEWLINEclass OmsCommentApi(generics.RetrieveAPIView):NEWLINE """Просмотр комментария (доступ по ID)"""NEWLINE permission_classes = [permissions.AllowAny]NEWLINE queryset = OmsComment.objects.all()NEWLINE lookup_field = 'id'NEWLINE serializer_class = OmsCommentSerializerNEWLINENEWLINENEWLINEclass OmsCommentDeleteUpdateWithId(generics.RetrieveUpdateDestroyAPIView):NEWLINE """Удаление и изменение комментария (доступ по ID)"""NEWLINE permission_classes = [permissions.DjangoModelPermissions]NEWLINE queryset = OmsComment.objects.all()NEWLINE lookup_field = 'id'NEWLINE serializer_class = OmsCommentDeleteUpdateCreateSerializerNEWLINENEWLINENEWLINEclass OmsCommentCreate(generics.CreateAPIView):NEWLINE """Создание комментария"""NEWLINE permission_classes = [permissions.DjangoModelPermissions]NEWLINE serializer_class = OmsCommentDeleteUpdateCreateSerializerNEWLINE queryset = OmsComment.objects.none() # Required for DjangoModelPermissionsNEWLINENEWLINENEWLINENEWLINENEWLINE |
# Copyright (c) 2015 Jaime van Kessel, Ultimaker B.V.NEWLINE# The PostProcessingPlugin is released under the terms of the AGPLv3 or higher.NEWLINEfrom PyQt5.QtCore import QObject, pyqtProperty, pyqtSignal, pyqtSlotNEWLINENEWLINEfrom UM.PluginRegistry import PluginRegistryNEWLINEfrom UM.Resources import ResourcesNEWLINEfrom UM.Application import ApplicationNEWLINEfrom UM.Extension import ExtensionNEWLINEfrom UM.Logger import LoggerNEWLINENEWLINEimport configparser #The script lists are stored in metadata as serialised config files.NEWLINEimport io #To allow configparser to write to a string.NEWLINEimport os.pathNEWLINEimport pkgutilNEWLINEimport sysNEWLINEimport importlib.utilNEWLINENEWLINEfrom UM.i18n import i18nCatalogNEWLINEi18n_catalog = i18nCatalog("cura")NEWLINENEWLINENEWLINE## The post processing plugin is an Extension type plugin that enables pre-written scripts to post process generatedNEWLINE# g-code files.NEWLINEclass PostProcessingPlugin(QObject, Extension):NEWLINE def __init__(self, parent = None):NEWLINE super().__init__(parent)NEWLINE self.addMenuItem(i18n_catalog.i18n("Modify G-Code"), self.showPopup)NEWLINE self._view = NoneNEWLINENEWLINE # Loaded scripts are all scripts that can be usedNEWLINE self._loaded_scripts = {}NEWLINE self._script_labels = {}NEWLINENEWLINE # Script list contains instances of scripts in loaded_scripts.NEWLINE # There can be duplicates, which will be executed in sequence.NEWLINE self._script_list = []NEWLINE self._selected_script_index = -1NEWLINENEWLINE Application.getInstance().getOutputDeviceManager().writeStarted.connect(self.execute)NEWLINE Application.getInstance().globalContainerStackChanged.connect(self._onGlobalContainerStackChanged) #When the current printer changes, update the list of scripts.NEWLINE Application.getInstance().mainWindowChanged.connect(self._createView) #When the main window is created, create the view so that we can display the post-processing icon if necessary.NEWLINENEWLINE selectedIndexChanged = pyqtSignal()NEWLINE @pyqtProperty("QVariant", notify = selectedIndexChanged)NEWLINE def selectedScriptDefinitionId(self):NEWLINE try:NEWLINE return self._script_list[self._selected_script_index].getDefinitionId()NEWLINE except:NEWLINE return ""NEWLINENEWLINE @pyqtProperty("QVariant", notify=selectedIndexChanged)NEWLINE def selectedScriptStackId(self):NEWLINE try:NEWLINE return self._script_list[self._selected_script_index].getStackId()NEWLINE except:NEWLINE return ""NEWLINENEWLINE ## Execute all post-processing scripts on the gcode.NEWLINE def execute(self, output_device):NEWLINE scene = Application.getInstance().getController().getScene()NEWLINE # If the scene does not have a gcode, do nothingNEWLINE if not hasattr(scene, "gcode_dict"):NEWLINE returnNEWLINE gcode_dict = getattr(scene, "gcode_dict")NEWLINE if not gcode_dict:NEWLINE returnNEWLINENEWLINE # get gcode list for the active build plateNEWLINE active_build_plate_id = Application.getInstance().getMultiBuildPlateModel().activeBuildPlateNEWLINE gcode_list = gcode_dict[active_build_plate_id]NEWLINE if not gcode_list:NEWLINE returnNEWLINENEWLINE if ";POSTPROCESSED" not in gcode_list[0]:NEWLINE for script in self._script_list:NEWLINE try:NEWLINE gcode_list = script.execute(gcode_list)NEWLINE except Exception:NEWLINE Logger.logException("e", "Exception in post-processing script.")NEWLINE if len(self._script_list): # Add comment to g-code if any changes were made.NEWLINE gcode_list[0] += ";POSTPROCESSED\n"NEWLINE gcode_dict[active_build_plate_id] = gcode_listNEWLINE setattr(scene, "gcode_dict", gcode_dict)NEWLINE else:NEWLINE Logger.log("e", "Already post processed")NEWLINENEWLINE @pyqtSlot(int)NEWLINE def setSelectedScriptIndex(self, index):NEWLINE self._selected_script_index = indexNEWLINE self.selectedIndexChanged.emit()NEWLINENEWLINE @pyqtProperty(int, notify = selectedIndexChanged)NEWLINE def selectedScriptIndex(self):NEWLINE return self._selected_script_indexNEWLINENEWLINE @pyqtSlot(int, int)NEWLINE def moveScript(self, index, new_index):NEWLINE if new_index < 0 or new_index > len(self._script_list) - 1:NEWLINE return # nothing needs to be doneNEWLINE else:NEWLINE # Magical switch code.NEWLINE self._script_list[new_index], self._script_list[index] = self._script_list[index], self._script_list[new_index]NEWLINE self.scriptListChanged.emit()NEWLINE self.selectedIndexChanged.emit() #Ensure that settings are updatedNEWLINE self._propertyChanged()NEWLINENEWLINE ## Remove a script from the active script list by index.NEWLINE @pyqtSlot(int)NEWLINE def removeScriptByIndex(self, index):NEWLINE self._script_list.pop(index)NEWLINE if len(self._script_list) - 1 < self._selected_script_index:NEWLINE self._selected_script_index = len(self._script_list) - 1NEWLINE self.scriptListChanged.emit()NEWLINE self.selectedIndexChanged.emit() # Ensure that settings are updatedNEWLINE self._propertyChanged()NEWLINENEWLINE ## Load all scripts from all paths where scripts can be found.NEWLINE #NEWLINE # This should probably only be done on init.NEWLINE def loadAllScripts(self):NEWLINE if self._loaded_scripts: #Already loaded.NEWLINE returnNEWLINENEWLINE #The PostProcessingPlugin path is for built-in scripts.NEWLINE #The Resources path is where the user should store custom scripts.NEWLINE #The Preferences path is legacy, where the user may previously have stored scripts.NEWLINE for root in [PluginRegistry.getInstance().getPluginPath("PostProcessingPlugin"), Resources.getStoragePath(Resources.Resources), Resources.getStoragePath(Resources.Preferences)]:NEWLINE path = os.path.join(root, "scripts")NEWLINE if not os.path.isdir(path):NEWLINE try:NEWLINE os.makedirs(path)NEWLINE except OSError:NEWLINE Logger.log("w", "Unable to create a folder for scripts: " + path)NEWLINE continueNEWLINENEWLINE self.loadScripts(path)NEWLINENEWLINE ## Load all scripts from provided path.NEWLINE # This should probably only be done on init.NEWLINE # \param path Path to check for scripts.NEWLINE def loadScripts(self, path):NEWLINE ## Load all scripts in the scripts foldersNEWLINE scripts = pkgutil.iter_modules(path = [path])NEWLINE for loader, script_name, ispkg in scripts:NEWLINE # Iterate over all scripts.NEWLINE if script_name not in sys.modules:NEWLINE try:NEWLINE spec = importlib.util.spec_from_file_location(__name__ + "." + script_name, os.path.join(path, script_name + ".py"))NEWLINE loaded_script = importlib.util.module_from_spec(spec)NEWLINE spec.loader.exec_module(loaded_script)NEWLINE sys.modules[script_name] = loaded_script #TODO: This could be a security risk. Overwrite any module with a user-provided name?NEWLINENEWLINE loaded_class = getattr(loaded_script, script_name)NEWLINE temp_object = loaded_class()NEWLINE Logger.log("d", "Begin loading of script: %s", script_name)NEWLINE try:NEWLINE setting_data = temp_object.getSettingData()NEWLINE if "name" in setting_data and "key" in setting_data:NEWLINE self._script_labels[setting_data["key"]] = setting_data["name"]NEWLINE self._loaded_scripts[setting_data["key"]] = loaded_classNEWLINE else:NEWLINE Logger.log("w", "Script %s.py has no name or key", script_name)NEWLINE self._script_labels[script_name] = script_nameNEWLINE self._loaded_scripts[script_name] = loaded_classNEWLINE except AttributeError:NEWLINE Logger.log("e", "Script %s.py is not a recognised script type. Ensure it inherits Script", script_name)NEWLINE except NotImplementedError:NEWLINE Logger.log("e", "Script %s.py has no implemented settings", script_name)NEWLINE except Exception as e:NEWLINE Logger.logException("e", "Exception occurred while loading post processing plugin: {error_msg}".format(error_msg = str(e)))NEWLINENEWLINE loadedScriptListChanged = pyqtSignal()NEWLINE @pyqtProperty("QVariantList", notify = loadedScriptListChanged)NEWLINE def loadedScriptList(self):NEWLINE return sorted(list(self._loaded_scripts.keys()))NEWLINENEWLINE @pyqtSlot(str, result = str)NEWLINE def getScriptLabelByKey(self, key):NEWLINE return self._script_labels[key]NEWLINENEWLINE scriptListChanged = pyqtSignal()NEWLINE @pyqtProperty("QVariantList", notify = scriptListChanged)NEWLINE def scriptList(self):NEWLINE script_list = [script.getSettingData()["key"] for script in self._script_list]NEWLINE return script_listNEWLINENEWLINE @pyqtSlot(str)NEWLINE def addScriptToList(self, key):NEWLINE Logger.log("d", "Adding script %s to list.", key)NEWLINE new_script = self._loaded_scripts[key]()NEWLINE self._script_list.append(new_script)NEWLINE self.setSelectedScriptIndex(len(self._script_list) - 1)NEWLINE self.scriptListChanged.emit()NEWLINE self._propertyChanged()NEWLINENEWLINE ## When the global container stack is changed, swap out the list of activeNEWLINE # scripts.NEWLINE def _onGlobalContainerStackChanged(self):NEWLINE self.loadAllScripts()NEWLINE new_stack = Application.getInstance().getGlobalContainerStack()NEWLINE self._script_list.clear()NEWLINE if not new_stack.getMetaDataEntry("post_processing_scripts"): #Missing or empty.NEWLINE self.scriptListChanged.emit() #Even emit this if it didn't change. We want it to write the empty list to the stack's metadata.NEWLINE returnNEWLINENEWLINE self._script_list.clear()NEWLINE scripts_list_strs = new_stack.getMetaDataEntry("post_processing_scripts")NEWLINE for script_str in scripts_list_strs.split("\n"): #Encoded config files should never contain three newlines in a row. At most 2, just before section headers.NEWLINE if not script_str: #There were no scripts in this one (or a corrupt file caused more than 3 consecutive newlines here).NEWLINE continueNEWLINE script_str = script_str.replace("\\n", "\n").replace("\\\\", "\\") #Unescape escape sequences.NEWLINE script_parser = configparser.ConfigParser(interpolation = None)NEWLINE script_parser.optionxform = str #Don't transform the setting keys as they are case-sensitive.NEWLINE script_parser.read_string(script_str)NEWLINE for script_name, settings in script_parser.items(): #There should only be one, really! Otherwise we can't guarantee the order or allow multiple uses of the same script.NEWLINE if script_name == "DEFAULT": #ConfigParser always has a DEFAULT section, but we don't fill it. Ignore this one.NEWLINE continueNEWLINE if script_name not in self._loaded_scripts: #Don't know this post-processing plug-in.NEWLINE Logger.log("e", "Unknown post-processing script {script_name} was encountered in this global stack.".format(script_name = script_name))NEWLINE continueNEWLINE new_script = self._loaded_scripts[script_name]()NEWLINE for setting_key, setting_value in settings.items(): #Put all setting values into the script.NEWLINE new_script._instance.setProperty(setting_key, "value", setting_value)NEWLINE self._script_list.append(new_script)NEWLINENEWLINE self.setSelectedScriptIndex(0)NEWLINE self.scriptListChanged.emit()NEWLINENEWLINE @pyqtSlot()NEWLINE def writeScriptsToStack(self):NEWLINE script_list_strs = []NEWLINE for script in self._script_list:NEWLINE parser = configparser.ConfigParser(interpolation = None) #We'll encode the script as a config with one section. The section header is the key and its values are the settings.NEWLINE parser.optionxform = str #Don't transform the setting keys as they are case-sensitive.NEWLINE script_name = script.getSettingData()["key"]NEWLINE parser.add_section(script_name)NEWLINE for key in script.getSettingData()["settings"]:NEWLINE value = script.getSettingValueByKey(key)NEWLINE parser[script_name][key] = str(value)NEWLINE serialized = io.StringIO() #ConfigParser can only write to streams. Fine.NEWLINE parser.write(serialized)NEWLINE serialized.seek(0)NEWLINE script_str = serialized.read()NEWLINE script_str = script_str.replace("\\", "\\\\").replace("\n", "\\n") #Escape newlines because configparser sees those as section delimiters.NEWLINE script_list_strs.append(script_str)NEWLINENEWLINE script_list_strs = "\n".join(script_list_strs) #ConfigParser should never output three newlines in a row when serialised, so it's a safe delimiter.NEWLINENEWLINE global_stack = Application.getInstance().getGlobalContainerStack()NEWLINE if "post_processing_scripts" not in global_stack.getMetaData():NEWLINE global_stack.addMetaDataEntry("post_processing_scripts", "")NEWLINE Application.getInstance().getGlobalContainerStack().setMetaDataEntry("post_processing_scripts", script_list_strs)NEWLINENEWLINE ## Creates the view used by show popup. The view is saved because of the fairly aggressive garbage collection.NEWLINE def _createView(self):NEWLINE Logger.log("d", "Creating post processing plugin view.")NEWLINENEWLINE self.loadAllScripts()NEWLINENEWLINE # Create the plugin dialog componentNEWLINE path = os.path.join(PluginRegistry.getInstance().getPluginPath("PostProcessingPlugin"), "PostProcessingPlugin.qml")NEWLINE self._view = Application.getInstance().createQmlComponent(path, {"manager": self})NEWLINE Logger.log("d", "Post processing view created.")NEWLINENEWLINE # Create the save button componentNEWLINE Application.getInstance().addAdditionalComponent("saveButton", self._view.findChild(QObject, "postProcessingSaveAreaButton"))NEWLINENEWLINE ## Show the (GUI) popup of the post processing plugin.NEWLINE def showPopup(self):NEWLINE if self._view is None:NEWLINE self._createView()NEWLINE self._view.show()NEWLINENEWLINE ## Property changed: trigger re-sliceNEWLINE # To do this we use the global container stack propertyChanged.NEWLINE # Re-slicing is necessary for setting changes in this plugin, because the changesNEWLINE # are applied only once per "fresh" gcodeNEWLINE def _propertyChanged(self):NEWLINE global_container_stack = Application.getInstance().getGlobalContainerStack()NEWLINE global_container_stack.propertyChanged.emit("post_processing_plugin", "value")NEWLINENEWLINENEWLINE |
from discord.ext import commandsNEWLINEimport discordNEWLINEimport requestsNEWLINEfrom bs4 import BeautifulSoupNEWLINEimport reNEWLINENEWLINEclass neorg_cmds(commands.Cog):NEWLINENEWLINE def __init__(self, bot):NEWLINE self.bot = botNEWLINENEWLINE @commands.command()NEWLINE async def wiki(self, ctx, *, query):NEWLINE query = query.strip().lower().replace(' ', '-')NEWLINE neorg_wiki = {}NEWLINE wiki_url = "https://github.com/vhyrro/neorg/wiki"NEWLINENEWLINE stuff = BeautifulSoup(requests.get(wiki_url).text, 'lxml')NEWLINE lis = stuff.find_all("div", {"class": "Box-body wiki-custom-sidebar markdown-body"})[0]NEWLINENEWLINE for li in lis.find_all('li'):NEWLINE part = li.a['href']NEWLINE neorg_wiki[part[37:].lower()] = partNEWLINENEWLINE wiki = [neorg_wiki[k] for k in neorg_wiki.keys() if query in k.lower()]NEWLINENEWLINE if len(wiki) == 0:NEWLINE await ctx.send(embed=discord.Embed(description="No Results Found!", colour=0x4878BE))NEWLINE returnNEWLINE NEWLINE for i in wiki:NEWLINE em = discord.Embed(description=i, colour=0x4878BE)NEWLINE await ctx.send(embed=em)NEWLINENEWLINE @commands.command()NEWLINE async def spec(self, ctx, *, query):NEWLINE query = query.strip().lower().replace(' ', '-')NEWLINE url = "https://raw.githubusercontent.com/vhyrro/neorg/main/docs/NFF-0.1-spec.md"NEWLINE og_url = "https://github.com/vhyrro/neorg/blob/main/docs/NFF-0.1-spec.md"NEWLINENEWLINE soup = re.findall( r"\[(.+)\]\((.+)\)", requests.get(url).text[:1500])NEWLINE neorg_specs = {}NEWLINENEWLINE for k,v in soup:NEWLINE neorg_specs[k.lower().replace(' ', '-')] = og_url + vNEWLINENEWLINE spec = [neorg_specs[k] for k in neorg_specs.keys() if query in k.lower()]NEWLINENEWLINE if len(spec) == 0:NEWLINE await ctx.send(embed=discord.Embed(description="No Results Found!", colour=0x4878BE))NEWLINE returnNEWLINENEWLINE for i in spec:NEWLINE em = discord.Embed(description=i, colour=0x4878BE)NEWLINE await ctx.send(embed=em)NEWLINENEWLINE @commands.command(aliases=["norg"])NEWLINE async def neorg(self, ctx):NEWLINE """Fetch the Neorg repository"""NEWLINE await ctx.send("Neorg - https://github.com/vhyrro/neorg")NEWLINENEWLINEdef setup(bot):NEWLINE bot.add_cog(neorg_cmds(bot))NEWLINE |
#!/usr/bin/pythonNEWLINE#NEWLINE# pybuddyDXNEWLINE# python e-buddy (ibuddy alike sold on DX) daemonNEWLINE# http://code.google.com/p/pybuddyDXNEWLINE#NEWLINE# protocol reverse engineered and implemented byNEWLINE# [email protected]#NEWLINE# borrows code from http://code.google.com/p/pybuddyNEWLINE# by [email protected] and [email protected]# who got most of the code from http://cuntography.com/blog/?p=17NEWLINE# Which is based on http://scott.weston.id.au/software/pymissile/NEWLINENEWLINEimport usbNEWLINEimport timeNEWLINEimport sysNEWLINEimport socketNEWLINEimport osNEWLINEimport pwdNEWLINEimport loggingNEWLINEfrom ConfigParser import RawConfigParserNEWLINENEWLINE################NEWLINE#CommandsNEWLINE################NEWLINE# GLADNESS = 00NEWLINE# FEAR = 01NEWLINE# FIZZ = 02NEWLINE# PLEASANTSURPRISE =03NEWLINE# GRIEF = 04NEWLINE# FURY = 05NEWLINE# QUELL = 06NEWLINE# REDHEAD = 07NEWLINE# GREENHEAD = 08NEWLINE# BLUEHEAD = 09NEWLINE# YELLOWHEAD = 10NEWLINE# BLAME = 11NEWLINE# BLUEGREENHEAD = 12NEWLINE# WHITEHEAD = 13NEWLINE# HEART = 14NEWLINE# WINGS = 15NEWLINE# BODY = 16NEWLINE# NOEFFECT = 17NEWLINE# ONLINE = 18NEWLINE# BUSY = 19NEWLINE# DAZE = 20NEWLINE# BACKSOON = 21NEWLINE# AWAY = 22NEWLINE# PHONE = 23NEWLINE# LUNCH = 24NEWLINE# OFFLINE = 25NEWLINENEWLINE################NEWLINE#ConfigurationNEWLINE################NEWLINEtsleep = 0.1NEWLINENEWLINENEWLINE################NEWLINE# IBUDDY classNEWLINE################NEWLINENEWLINEclass BuddyDevice:NEWLINE SETUP = (0x21, 0x09, 0x00, 0x02, 0x00, 0x00, 0x04, 0x00)NEWLINE MESS = (0x43, 0x4D)NEWLINE NEWLINE OFF1 = 0x31NEWLINE OFF2 = 0x37NEWLINENEWLINE code1 = OFF1NEWLINE code2 = OFF2NEWLINENEWLINE def __init__(self):NEWLINE try:NEWLINE self.dev=UsbDevice(0x0c45, 0x11)NEWLINE self.dev.open()NEWLINE self.dev.handle.reset()NEWLINE self.resetMessage()NEWLINE except NoBuddyException, e:NEWLINE raise NoBuddyException()NEWLINE NEWLINE def resetMessage(self):NEWLINE self.code1 = self.OFF1NEWLINE self.code1 = self.OFF1NEWLINE self.send()NEWLINENEWLINE def send(self):NEWLINE try:NEWLINE self.dev.handle.controlMsg(0x21, 0x09, self.SETUP, 0x0200, 0x00)NEWLINE self.dev.handle.controlMsg(0x21, 0x09, self.MESS+(self.code1,self.code2), 0x0200, 0x00)NEWLINE except usb.USBError:NEWLINE log.info("Error sending USB command")NEWLINE raise NoBuddyException()NEWLINENEWLINE#####################NEWLINE# USB classNEWLINE######################NEWLINENEWLINEclass UsbDevice:NEWLINE def __init__(self, vendor_id, product_id):NEWLINE busses = usb.busses()NEWLINE self.handle = NoneNEWLINE for bus in busses:NEWLINE devices = bus.devicesNEWLINE for dev in devices:NEWLINE if dev.idVendor==vendor_id and dev.idProduct==product_id:NEWLINE log.info("DX e-buddy found!")NEWLINE# log.info("vend %s prod %s",dev.idVendor, dev.idProduct)NEWLINE self.dev = devNEWLINE self.conf = self.dev.configurations[0]NEWLINE self.intf = self.conf.interfaces[0][0]NEWLINE self.endpoints = []NEWLINE# log.info("interface = %x, class = %s, protocol = %s", self.intf.interfaceNumber, self.intf.interfaceClass, self.intf.interfaceProtocol)NEWLINE for endpoint in self.intf.endpoints:NEWLINE self.endpoints.append(endpoint)NEWLINE# log.info("endpoint number = %x, type = %s", endpoint.address, endpoint.type)NEWLINE returnNEWLINE raise NoBuddyException()NEWLINENEWLINE def open(self):NEWLINE if self.handle:NEWLINE self.handle = NoneNEWLINE self.handle = self.dev.open()NEWLINENEWLINE# if self.handle:NEWLINE# log.info("Handle OK")NEWLINENEWLINE #We need to detach HID interfaceNEWLINE try:NEWLINE self.handle.detachKernelDriver(0)NEWLINE self.handle.detachKernelDriver(1)NEWLINE except:NEWLINE passNEWLINENEWLINE try:NEWLINE self.handle.setConfiguration(self.conf)NEWLINE self.handle.claimInterface(0)NEWLINE self.handle.setAltInterface(0)NEWLINE except:NEWLINE log.info("Configuration failed")NEWLINE raise NoBuddyException()NEWLINENEWLINE# log.info("Device opened OK")NEWLINENEWLINEclass NoBuddyException(Exception): passNEWLINENEWLINENEWLINE#########################################NEWLINE# Decoding macrosNEWLINE##########################################NEWLINENEWLINENEWLINEdef decode_buddy (buddy,msg):NEWLINE# log.info("Received message: %s",msg)NEWLINE buddy.code1 = int(msg)/10 + 0x30NEWLINE buddy.code2 = int(msg) - (int(msg)/10)*10 + 0x30NEWLINE# log.info("Codes: %x %x",buddy.code1,buddy.code2)NEWLINENEWLINE#######################################NEWLINE# MAIN programNEWLINE#######################################NEWLINENEWLINElog = logging.getLogger('pybuddy')NEWLINENEWLINE#Default configNEWLINEconfig = RawConfigParser(NEWLINE { 'port': 8888,NEWLINE 'address': '127.0.0.1',NEWLINE 'user': 'nobody',NEWLINE 'loglevel': 'info',NEWLINE 'logfile': 'console',NEWLINE }NEWLINE)NEWLINENEWLINEconfig._sections = {'network':{}, 'system':{}}NEWLINENEWLINEconfig_files = [ "~/.pybuddy.cfg", NEWLINE "/etc/pybuddy/pybuddy.cfg", NEWLINE "/usr/local/etc/pybuddy.cfg"NEWLINE]NEWLINENEWLINE#Parse configNEWLINEif len(sys.argv) > 1:NEWLINE config_files.append(sys.argv[1])NEWLINE NEWLINEconfig_read = config.read(config_files)NEWLINENEWLINEif config.get("system", "logfile") != "console":NEWLINE logging.basicConfig(NEWLINE filename=config.get("system", "logfile"),NEWLINE format='%(asctime)s %(levelname)-8s %(message)s',NEWLINE )NEWLINEelse:NEWLINE logging.basicConfig(NEWLINE stream=sys.stderr,NEWLINE format='%(asctime)s %(levelname)-8s %(message)s',NEWLINE )NEWLINENEWLINENEWLINEif config.get("system", "loglevel") == "debug":NEWLINE log.setLevel(logging.DEBUG)NEWLINEelif config.get("system", "loglevel") == "info":NEWLINE log.setLevel(logging.INFO)NEWLINENEWLINENEWLINEif config_read:NEWLINE log.info("Read config file: %s", config_read[0])NEWLINE NEWLINE#Initialize deviceNEWLINElog.info("Searching e-buddy...")NEWLINEtry:NEWLINE buddy=BuddyDevice()NEWLINEexcept NoBuddyException, e:NEWLINE log.error("Not found or ERROR!")NEWLINE sys.exit(1)NEWLINENEWLINENEWLINE#DaemonizeNEWLINElog.info("Starting daemon...")NEWLINEif os.fork()==0:NEWLINE os.setsid()NEWLINEelse:NEWLINE sys.exit(0)NEWLINENEWLINE#Create server socketNEWLINEs = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)NEWLINEs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)NEWLINEs.bind((config.get("network", "address"), int(config.get("network", "port"))))NEWLINENEWLINE#Drop privilegesNEWLINEtry:NEWLINE uid = pwd.getpwnam(config.get("system", "user"))[2]NEWLINEexcept KeyError:NEWLINE log.error("Username %s not found, exiting...", config.get("system", "user"))NEWLINE sys.exit(1)NEWLINEos.setuid(uid)NEWLINENEWLINENEWLINE#Main message loopNEWLINEwhile 1:NEWLINE try:NEWLINE message, address = s.recvfrom(8192)NEWLINE# log.debug("Got data from %s", address)NEWLINE decode_buddy(buddy, message)NEWLINE buddy.send()NEWLINE except (KeyboardInterrupt, SystemExit):NEWLINE raiseNEWLINENEWLINE NEWLINE |
import numpy as npNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINE# plt.style.use("dark_background")NEWLINENEWLINEfrom matplotlib.patches import PolygonNEWLINEfrom matplotlib.collections import PatchCollectionNEWLINEfrom matplotlib.collections import PolyCollectionNEWLINENEWLINEfrom mpl_toolkits.axes_grid1 import AxesGridNEWLINENEWLINE# import mpl_toolkitsNEWLINENEWLINE# = mpl_toolkits.legacy_colorbarNEWLINENEWLINE# legacy_colorbar.rcParam = FalseNEWLINENEWLINEfrom math import acosNEWLINEfrom math import degreesNEWLINEfrom math import fabsNEWLINENEWLINEfrom sklearn.cluster import KMeansNEWLINEfrom sklearn.cluster import SpectralClusteringNEWLINENEWLINEfrom directional_clustering.geometry import clockwiseNEWLINEfrom directional_clustering.geometry import laplacian_smoothedNEWLINEfrom directional_clustering.geometry import cosine_similarityNEWLINEfrom directional_clustering.geometry import contour_polygonsNEWLINENEWLINEfrom directional_clustering.clusters import kmeans_fitNEWLINEfrom directional_clustering.clusters import init_kmeans_farthestNEWLINEfrom directional_clustering.clusters import _kmeansNEWLINENEWLINEfrom directional_clustering.plotters import ClusterPlotterNEWLINEfrom directional_clustering.plotters import rgb_colorsNEWLINEfrom directional_clustering.plotters import plot_kmeans_vectorsNEWLINENEWLINEfrom compas.datastructures import MeshNEWLINEfrom compas.datastructures import mesh_unify_cyclesNEWLINENEWLINEfrom compas.geometry import dot_vectorsNEWLINEfrom compas.geometry import scale_vectorNEWLINEfrom compas.geometry import normalize_vectorNEWLINEfrom compas.geometry import length_vectorNEWLINEfrom compas.geometry import angle_vectorsNEWLINEfrom compas.geometry import length_vector_sqrdNEWLINEfrom compas.geometry import subtract_vectorsNEWLINENEWLINEfrom compas.utilities import geometric_keyNEWLINENEWLINE# =============================================================================NEWLINE# ConstantsNEWLINE# =============================================================================NEWLINENEWLINEtags = [NEWLINE "n_1",NEWLINE "n_2",NEWLINE "m_1",NEWLINE "m_2",NEWLINE "ps_1_top",NEWLINE "ps_1_bot",NEWLINE "ps_1_mid",NEWLINE "ps_2_top",NEWLINE "ps_2_bot",NEWLINE "ps_2_mid",NEWLINE "custom_1",NEWLINE "custom_2"NEWLINE ]NEWLINENEWLINENEWLINETHERE = "/Users/arpj/code/libraries/libigl/tutorial/508_ARP_MIQ/"NEWLINENEWLINE# HERE = "../data/json_files/two_point_wall" # leonhardtNEWLINE# HERE = "../data/json_files/wall_with_hole" # schlaichNEWLINE# HERE = "../data/json_files/cantilever_wall_3_1" # rozvany?NEWLINE# HERE = "../data/json_files/square_wall_cantilever" # michellNEWLINE# HERE = "../data/json_files/square_wall_down" # schlaichNEWLINE# HERE = "../data/json_files/perimeter_supported_slab"NEWLINENEWLINEHERE = "../data/json_files/four_point_slab"NEWLINE# HERE = "../data/json_files/four_point_slab_k_7"NEWLINE# HERE = "../data/json_files/perimeter_supported_slab_k_5"NEWLINE# HERE = "../data/json_files/perimeter_supported_slab"NEWLINEHERE = "../data/json_files/perimeter_supported_vault_z500mm_k_3" #vaultNEWLINENEWLINEtag = "n_1_k"NEWLINEtag_2 = "n_2_k"NEWLINENEWLINE# tag = "m_1_k"NEWLINE# tag_2 = "m_2_k"NEWLINENEWLINEx_lim = -10.0 # faces stay if x coord of their centroid is larger than x_limNEWLINEy_lim = -10.0 # faces stay if y coord of their centroid is larger than y_limNEWLINENEWLINE# =============================================================================NEWLINE# Import meshNEWLINE# =============================================================================NEWLINENEWLINEname = HERE.split("/").pop()NEWLINEmesh = Mesh.from_json(HERE + ".json")NEWLINEmesh_unify_cycles(mesh)NEWLINENEWLINE# ==========================================================================NEWLINE# Store subset attributesNEWLINE# ==========================================================================NEWLINENEWLINEcentroids = {}NEWLINEvectors = {}NEWLINEvectors_2 = {}NEWLINENEWLINEfor fkey in mesh.faces():NEWLINE centroids[geometric_key(mesh.face_centroid(fkey))] = fkeyNEWLINE vectors[fkey] = mesh.face_attribute(fkey, tag)NEWLINE vectors_2[fkey] = mesh.face_attribute(fkey, tag_2)NEWLINENEWLINE# ==========================================================================NEWLINE# Rebuild mesh - necessary to match ordering of collection.set(array)! NEWLINE# ==========================================================================NEWLINENEWLINEpolygons = []NEWLINEfor fkey in mesh.faces():NEWLINE x, y, z = mesh.face_centroid(fkey)NEWLINE if x >= x_lim and y >= y_lim:NEWLINE polygons.append(mesh.face_coordinates(fkey))NEWLINENEWLINEmesh = Mesh.from_polygons(polygons)NEWLINEmesh_unify_cycles(mesh)NEWLINENEWLINEfor fkey in mesh.faces():NEWLINE gkey = geometric_key(mesh.face_centroid(fkey))NEWLINE ofkey = centroids[gkey]NEWLINE vector = vectors[ofkey]NEWLINE vector_2 = vectors_2[ofkey]NEWLINE mesh.face_attribute(fkey, tag, vector)NEWLINE mesh.face_attribute(fkey, tag_2, vector_2)NEWLINENEWLINE# =============================================================================NEWLINE# Export vertices and facesNEWLINE# =============================================================================NEWLINENEWLINEvertices, faces = mesh.to_vertices_and_faces()NEWLINENEWLINEV = np.array(vertices)NEWLINEprint("V shape: ", V.shape)NEWLINEprint("V first row: {}".format(V[0,:]))NEWLINEprint("V last row: {}".format(V[-1,:]))NEWLINENEWLINEF = np.array(faces)NEWLINEprint("F shape: ", F.shape)NEWLINEprint("F first row: {}".format(F[0,:]))NEWLINEprint("F last row: {}".format(F[-1,:]))NEWLINENEWLINEnp.savetxt(THERE + "vertices.txt", V, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINEnp.savetxt(THERE + "faces.txt", F, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# # =============================================================================NEWLINE# # Export edges on boundaryNEWLINE# # =============================================================================NEWLINENEWLINE# E = np.array(mesh.edges_on_boundary())NEWLINE# print("E shape: ", E.shape)NEWLINE# print("E first row: {}".format(E[0,:]))NEWLINE# print("E last row: {}".format(E[-1,:]))NEWLINENEWLINE# np.savetxt(THERE + "edges_boundary.txt", E, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# # =============================================================================NEWLINE# # Export vertices on boundaryNEWLINE# # =============================================================================NEWLINENEWLINE# B = np.array(mesh.vertices_on_boundary())NEWLINE# print("B shape: ", B.shape)NEWLINE# print("B first row: {}".format(B[0]))NEWLINE# print("B last row: {}".format(B[-1]))NEWLINENEWLINE# np.savetxt(THERE + "vertices_boundary.txt", E, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# =============================================================================NEWLINE# Principal stress directionsNEWLINE# =============================================================================NEWLINENEWLINEps1 = mesh.faces_attribute(name=tag, keys=mesh.faces())NEWLINEps1 = [normalize_vector(vector) for vector in ps1]NEWLINENEWLINEPS1 = np.array(ps1)NEWLINEprint("PS1 shape: ", PS1.shape)NEWLINEprint("PS1 first row: {}".format(PS1[0,:]))NEWLINEprint("PS1 last row: {}".format(PS1[-1,:]))NEWLINENEWLINEps2 = mesh.faces_attribute(name=tag_2, keys=mesh.faces())NEWLINEps2 = [normalize_vector(vector) for vector in ps2]NEWLINENEWLINEPS2 = np.array(ps2)NEWLINEprint("PS2 shape: ", PS2.shape)NEWLINEprint("PS2 first row: {}".format(PS2[0,:]))NEWLINEprint("PS2 last row: {}".format(PS2[-1,:]))NEWLINENEWLINEnp.savetxt(THERE + "ps1.txt", PS1, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINEnp.savetxt(THERE + "ps2.txt", PS2, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINENEWLINEprint("Dot product first row PS1 - PS2: {}".format(np.dot(PS1[0, :], PS2[0,:].T)))NEWLINE |
from checkov.common.models.enums import CheckResult, CheckCategoriesNEWLINEfrom checkov.cloudformation.checks.resource.base_resource_value_check import BaseResourceValueCheckNEWLINENEWLINENEWLINEclass ElasticacheReplicationGroupEncryptionAtRest(BaseResourceValueCheck):NEWLINE def __init__(self):NEWLINE name = "Ensure all data stored in the Elasticache Replication Group is securely encrypted at rest"NEWLINE id = "CKV_AWS_29"NEWLINE supported_resources = ['AWS::ElastiCache::ReplicationGroup']NEWLINE categories = [CheckCategories.ENCRYPTION]NEWLINE super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)NEWLINENEWLINE def get_inspected_key(self):NEWLINE return 'Properties/AtRestEncryptionEnabled'NEWLINENEWLINEcheck = ElasticacheReplicationGroupEncryptionAtRest()NEWLINE |
"""NEWLINETurner, Mann, Clandinin:NEWLINENEWLINEhttps://github.com/mhturner/[email protected]"""NEWLINEfrom scfc import bridge, anatomical_connectivityNEWLINEimport osNEWLINENEWLINEdata_dir = bridge.getUserConfiguration()['data_dir']NEWLINENEWLINEinclude_inds_branson, name_list_branson = bridge.getBransonNames()NEWLINEBranson_JRC2018 = anatomical_connectivity.getAtlasConnectivity(include_inds_branson, name_list_branson, 'branson')NEWLINENEWLINE# Shortest path distance:NEWLINEshortest_path_dist = bridge.getShortestPathStats(Branson_JRC2018)NEWLINENEWLINE# saveNEWLINEshortest_path_dist.to_pickle(os.path.join(data_dir, 'Branson_ShortestPathDistance.pkl'))NEWLINE |
from hp_model import operationNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINENEWLINEinit = 0NEWLINEstate = 0NEWLINEi = 0NEWLINENEWLINEcp1 = []NEWLINEcp2 = []NEWLINEerp = []NEWLINEpu = []NEWLINEefflist = []NEWLINElast_state = 0NEWLINEtemperature = []NEWLINEstate_list = []NEWLINEQ_list = []NEWLINEwhile 1:NEWLINE if i >= 30:NEWLINE state = 0NEWLINE state_list.append(state)NEWLINE last_q = initNEWLINE Q, P_total, COP, P, eff, T, current_q = operation(state, last_q, 'Heater')NEWLINE cp1.append(P[0])NEWLINE cp2.append(P[1])NEWLINE erp.append(P[2])NEWLINE pu.append(P[3])NEWLINE efflist.append(eff)NEWLINE temperature.append(T)NEWLINE Q_list.append(current_q)NEWLINE i += 1NEWLINE init = QNEWLINE last_state = stateNEWLINE state = 100NEWLINE if i == 50:NEWLINE breakNEWLINENEWLINEplt.figure()NEWLINEplt.plot(cp1, label='cp1')NEWLINEplt.plot(cp2, label='cp2')NEWLINEplt.plot(erp, label='erp')NEWLINEplt.plot(pu, label='pu')NEWLINEplt.legend()NEWLINEplt.xlabel('Minutes')NEWLINEplt.ylabel('Power')NEWLINEplt.show()NEWLINENEWLINEplt.figure()NEWLINEplt.plot(efflist)NEWLINEplt.xlabel('Minutes')NEWLINEplt.ylabel('Efficiency')NEWLINEplt.show()NEWLINENEWLINEplt.figure()NEWLINEplt.plot(temperature)NEWLINEplt.xlabel('Minutes')NEWLINEplt.ylabel('Temperature')NEWLINEplt.show()NEWLINENEWLINEplt.figure()NEWLINEplt.plot(state_list)NEWLINEplt.xlabel('Minutes')NEWLINEplt.ylabel('State')NEWLINEplt.show()NEWLINENEWLINEplt.figure()NEWLINEplt.plot(Q_list)NEWLINEplt.xlabel('Minutes')NEWLINEplt.ylabel('Q')NEWLINEplt.show()NEWLINENEWLINEprint("Thermal input to the Room: %10.3f" % Q + "\n",NEWLINE "Total power consumption: %10.3f" % P_total + "\n",NEWLINE "Energy efficiency of heat pump: %10.3f" % COP)NEWLINE |
from .companies import CompanyView, CompaniesViewNEWLINEfrom .employees import (NEWLINE EmployeeView, EmployeesView, EmployeeProductsView,NEWLINE EmployeeProductDeleteViewNEWLINE)NEWLINEfrom .products import ProductView, ProductsViewNEWLINENEWLINENEWLINEHANDLERS = (NEWLINE CompanyView, CompaniesView,NEWLINE EmployeeView, EmployeesView, EmployeeProductsView,NEWLINE EmployeeProductDeleteView,NEWLINE ProductView, ProductsViewNEWLINE)NEWLINE |
# Project: hardInfoNEWLINE# Author: George Keith WatsonNEWLINE# Date Started: March 18, 2022NEWLINE# Copyright: (c) Copyright 2022 George Keith WatsonNEWLINE# Module: model/Installation.pyNEWLINE# Date Started: March 20, 2022NEWLINE# Purpose: Store installation location and other local details.NEWLINE# Development:NEWLINE# To run this program locally having decompressed the source archive without errors,NEWLINE# certain constants in this file must be changed.NEWLINE# INSTALLATION_FOLDER must be the folder that hardInfo.py is located in. This will be the rootNEWLINE# of the source tree.NEWLINE#NEWLINENEWLINEDATA_FOLDER = "/home/keithcollins/PycharmProjects/CommonData/"NEWLINEINSTALLATION_FOLDER = '/home/keithcollins/PycharmProjects/hardInfo/'NEWLINELSHW_JSON_FILE = 'lshw.json'NEWLINE |
from __future__ import print_functionNEWLINEimport unittestNEWLINEimport SimPEG.daskNEWLINEfrom SimPEG import (NEWLINE directives,NEWLINE maps,NEWLINE inverse_problem,NEWLINE optimization,NEWLINE data_misfit,NEWLINE inversion,NEWLINE utils,NEWLINE regularization,NEWLINE)NEWLINENEWLINEfrom discretize.utils import meshutilsNEWLINENEWLINEimport shutilNEWLINENEWLINE# import SimPEG.PF as PFNEWLINEfrom SimPEG.potential_fields import magnetics as magNEWLINEimport numpy as npNEWLINENEWLINENEWLINEclass MagInvLinProblemTest(unittest.TestCase):NEWLINE def setUp(self):NEWLINENEWLINE np.random.seed(0)NEWLINENEWLINE # First we need to define the direction of the inducing fieldNEWLINE # As a simple case, we pick a vertical inducing field of magnitudeNEWLINE # 50,000nT.NEWLINE # From old convention, field orientation is given as anNEWLINE # azimuth from North (positive clockwise)NEWLINE # and dip from the horizontal (positive downward).NEWLINE H0 = (50000.0, 90.0, 0.0)NEWLINENEWLINE # Create a meshNEWLINE h = [5, 5, 5]NEWLINE padDist = np.ones((3, 2)) * 100NEWLINE nCpad = [2, 4, 2]NEWLINENEWLINE # Create grid of points for topographyNEWLINE # Lets create a simple Gaussian topo and set the active cellsNEWLINE [xx, yy] = np.meshgrid(NEWLINE np.linspace(-200.0, 200.0, 50), np.linspace(-200.0, 200.0, 50)NEWLINE )NEWLINENEWLINE b = 100NEWLINE A = 50NEWLINE zz = A * np.exp(-0.5 * ((xx / b) ** 2.0 + (yy / b) ** 2.0))NEWLINENEWLINE # We would usually load a topofileNEWLINE topo = np.c_[utils.mkvc(xx), utils.mkvc(yy), utils.mkvc(zz)]NEWLINENEWLINE # Create and array of observation pointsNEWLINE xr = np.linspace(-100.0, 100.0, 20)NEWLINE yr = np.linspace(-100.0, 100.0, 20)NEWLINE X, Y = np.meshgrid(xr, yr)NEWLINE Z = A * np.exp(-0.5 * ((X / b) ** 2.0 + (Y / b) ** 2.0)) + 5NEWLINENEWLINE # Create a MAGsurveyNEWLINE xyzLoc = np.c_[utils.mkvc(X.T), utils.mkvc(Y.T), utils.mkvc(Z.T)]NEWLINE rxLoc = mag.Point(xyzLoc)NEWLINE srcField = mag.SourceField([rxLoc], parameters=H0)NEWLINE survey = mag.Survey(srcField)NEWLINENEWLINE # self.mesh.finalize()NEWLINE self.mesh = meshutils.mesh_builder_xyz(NEWLINE xyzLoc,NEWLINE h,NEWLINE padding_distance=padDist,NEWLINE mesh_type="TREE",NEWLINE )NEWLINENEWLINE self.mesh = meshutils.refine_tree_xyz(NEWLINE self.mesh,NEWLINE topo,NEWLINE method="surface",NEWLINE octree_levels=nCpad,NEWLINE octree_levels_padding=nCpad,NEWLINE finalize=True,NEWLINE )NEWLINENEWLINE # Define an active cells from topoNEWLINE actv = utils.surface2ind_topo(self.mesh, topo)NEWLINE nC = int(actv.sum())NEWLINENEWLINE # We can now create a susceptibility model and generate dataNEWLINE # Lets start with a simple block in half-spaceNEWLINE self.model = utils.model_builder.addBlock(NEWLINE self.mesh.gridCC,NEWLINE np.zeros(self.mesh.nC),NEWLINE np.r_[-20, -20, -15],NEWLINE np.r_[20, 20, 20],NEWLINE 0.05,NEWLINE )[actv]NEWLINENEWLINE # Create active map to go from reduce set to fullNEWLINE self.actvMap = maps.InjectActiveCells(self.mesh, actv, np.nan)NEWLINENEWLINE # Creat reduced identity mapNEWLINE idenMap = maps.IdentityMap(nP=nC)NEWLINENEWLINE # Create the forward model operatorNEWLINE sim = mag.Simulation3DIntegral(NEWLINE self.mesh,NEWLINE survey=survey,NEWLINE chiMap=idenMap,NEWLINE actInd=actv,NEWLINE store_sensitivities="ram",NEWLINE )NEWLINE self.sim = simNEWLINE data = sim.make_synthetic_data(NEWLINE self.model, relative_error=0.0, noise_floor=1.0, add_noise=TrueNEWLINE )NEWLINENEWLINE # Create a regularizationNEWLINE reg = regularization.Sparse(self.mesh, indActive=actv, mapping=idenMap)NEWLINE reg.norms = np.c_[0, 0, 0, 0]NEWLINENEWLINE reg.mref = np.zeros(nC)NEWLINENEWLINE # Data misfit functionNEWLINE dmis = data_misfit.L2DataMisfit(simulation=sim, data=data)NEWLINENEWLINE # Add directives to the inversionNEWLINE opt = optimization.ProjectedGNCG(NEWLINE maxIter=10,NEWLINE lower=0.0,NEWLINE upper=10.0,NEWLINE maxIterLS=5,NEWLINE maxIterCG=5,NEWLINE tolCG=1e-4,NEWLINE stepOffBoundsFact=1e-4,NEWLINE )NEWLINENEWLINE invProb = inverse_problem.BaseInvProblem(dmis, reg, opt, beta=1e6)NEWLINENEWLINE # Here is where the norms are appliedNEWLINE # Use pick a treshold parameter empirically based on the distribution ofNEWLINE # model parametersNEWLINE IRLS = directives.Update_IRLS(NEWLINE f_min_change=1e-3, max_irls_iterations=20, beta_tol=1e-1, beta_search=FalseNEWLINE )NEWLINE update_Jacobi = directives.UpdatePreconditioner()NEWLINE sensitivity_weights = directives.UpdateSensitivityWeights()NEWLINE self.inv = inversion.BaseInversion(NEWLINE invProb, directiveList=[IRLS, sensitivity_weights, update_Jacobi]NEWLINE )NEWLINENEWLINE def test_mag_inverse(self):NEWLINENEWLINE # Run the inversionNEWLINE mrec = self.inv.run(self.model * 1e-4)NEWLINENEWLINE residual = np.linalg.norm(mrec - self.model) / np.linalg.norm(self.model)NEWLINE # print(residual)NEWLINE # import matplotlib.pyplot as pltNEWLINE # plt.figure()NEWLINE # ax = plt.subplot(1, 2, 1)NEWLINE # midx = 65NEWLINE # self.mesh.plotSlice(self.actvMap*mrec, ax=ax, normal='Y', ind=midx,NEWLINE # grid=True, clim=(0, 0.02))NEWLINE # ax.set_xlim(self.mesh.gridCC[:, 0].min(), self.mesh.gridCC[:, 0].max())NEWLINE # ax.set_ylim(self.mesh.gridCC[:, 2].min(), self.mesh.gridCC[:, 2].max())NEWLINENEWLINE # ax = plt.subplot(1, 2, 2)NEWLINE # self.mesh.plotSlice(self.actvMap*self.model, ax=ax, normal='Y', ind=midx,NEWLINE # grid=True, clim=(0, 0.02))NEWLINE # ax.set_xlim(self.mesh.gridCC[:, 0].min(), self.mesh.gridCC[:, 0].max())NEWLINE # ax.set_ylim(self.mesh.gridCC[:, 2].min(), self.mesh.gridCC[:, 2].max())NEWLINE # plt.show()NEWLINENEWLINE self.assertLess(residual, 1)NEWLINE # self.assertTrue(residual < 0.05)NEWLINENEWLINE def tearDown(self):NEWLINE # Clean up the working directoryNEWLINE if self.sim.store_sensitivities == "disk":NEWLINE shutil.rmtree(self.sim.sensitivity_path)NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE unittest.main()NEWLINE |
import taichi as tiNEWLINENEWLINEti.init()NEWLINENEWLINEn = 512NEWLINEx = ti.field(dtype=ti.f32, shape=(n, n))[email protected] paint():NEWLINE for i, j in ti.ndrange(n * 4, n * 4):NEWLINE # 4x4 super sampling:NEWLINE ret = ti.taichi_logo(ti.Vector([i, j]) / (n * 4))NEWLINE x[i // 4, j // 4] += ret / 16NEWLINENEWLINENEWLINEdef main():NEWLINE paint()NEWLINENEWLINE gui = ti.GUI('Logo', (n, n))NEWLINE while gui.running:NEWLINE gui.set_image(x)NEWLINE gui.show()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
def map_list(list, key, default=None):NEWLINE return filter(None, (item.get(key, default) for item in list))NEWLINENEWLINENEWLINEclass FilterModule(object):NEWLINE def filters(self):NEWLINE return {NEWLINE 'select': map_listNEWLINE }NEWLINE |
'''NEWLINENotice:NEWLINE ConstantOP only supports CPU.NEWLINE For supporting cross-device, please use ConstantOP2NEWLINE'''NEWLINEimport sysNEWLINEsys.path.append('../') # Add MobulaOP pathNEWLINEimport mobulaNEWLINEimport numpy as npNEWLINENEWLINE# ConstantOP only supports [email protected](need_top_grad=False)NEWLINEclass ConstantOP:NEWLINE def __init__(self, constant):NEWLINE self.constant = self.F.array(constant)NEWLINENEWLINE def forward(self):NEWLINE return self.constantNEWLINENEWLINE def backward(self, dy):NEWLINE return []NEWLINENEWLINE def infer_shape(self, in_shape):NEWLINE return [], [self.constant.shape][email protected](need_top_grad=False)NEWLINEclass ConstantOP2:NEWLINE def __init__(self, constant):NEWLINE self.constant = self.F.array(constant)NEWLINE self.constant_buffer = dict()NEWLINENEWLINE def forward(self, x):NEWLINE ctx = x.contextNEWLINE return self.constant_buffer.get(ctx, self.constant.as_in_context(ctx))NEWLINENEWLINE def backward(self, dy):NEWLINE return [0]NEWLINENEWLINE def infer_shape(self, in_shape):NEWLINE return in_shape, [self.constant.shape]NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE import mxnet as mxNEWLINE import numpy as npNEWLINENEWLINE # ConstantOP only supports CPU.NEWLINE if mx.current_context() == mx.cpu():NEWLINE # NDArrayNEWLINE a = mx.nd.array([1, 2, 3])NEWLINE b = mx.nd.array([4, 5, 6])NEWLINE c = a + ConstantOP[mx.nd.NDArray](b)NEWLINE print(c) # [5,7,9]NEWLINENEWLINE # SymbolNEWLINE a_sym = mx.sym.Variable('a')NEWLINE output_sym = a_sym + ConstantOP[mx.sym.Symbol](b)NEWLINE exe = output_sym.simple_bind(NEWLINE ctx=mx.context.current_context(), a=a.shape)NEWLINE exe.forward(a=np.array([1, 2, 3]))NEWLINENEWLINE print(exe.outputs[0].asnumpy()) # [5,7,9]NEWLINENEWLINE '''NEWLINE ConstantOP2: accept a variable for getting the context informationNEWLINE '''NEWLINENEWLINE # NDArrayNEWLINE a = mx.nd.array([1, 2, 3])NEWLINE b = mx.nd.array([4, 5, 6])NEWLINE c = a + ConstantOP2(a, constant=b)NEWLINE print(c) # [5,7,9]NEWLINENEWLINE # SymbolNEWLINE a_sym = mx.sym.Variable('a')NEWLINE # declare input_type explicitly because the inputs includes mx.sym.Symbol and mx.nd.NDArrayNEWLINE output_sym = a_sym + ConstantOP2[mx.sym.Symbol](a_sym, constant=b)NEWLINE exe = output_sym.simple_bind(ctx=mx.context.current_context(), a=a.shape)NEWLINE exe.forward(a=np.array([1, 2, 3]))NEWLINENEWLINE print(exe.outputs[0].asnumpy()) # [5,7,9]NEWLINE |
import numpy as npNEWLINEimport sysNEWLINEsys.path.append('..')NEWLINEfrom chap11.dubins_parameters import dubins_parametersNEWLINEfrom message_types.msg_path import msg_pathNEWLINENEWLINEclass path_manager:NEWLINE def __init__(self):NEWLINE # message sent to path followerNEWLINE self.path = msg_path()NEWLINE # pointers to previous, current, and next waypointsNEWLINE self.ptr_previous = 0NEWLINE self.ptr_current = 1NEWLINE self.ptr_next = 2NEWLINE # flag that request new waypoints from path plannerNEWLINE self.flag_need_new_waypoints = TrueNEWLINE self.num_waypoints = 0NEWLINE self.halfspace_n = np.inf * np.ones((3,1))NEWLINE self.halfspace_r = np.inf * np.ones((3,1))NEWLINE # state of the manager state machineNEWLINE self.manager_state = 1NEWLINE # dubins path parametersNEWLINE self.dubins_path = dubins_parameters()NEWLINENEWLINE def update(self, waypoints, radius, state):NEWLINE def update(self, waypoints, radius, state):NEWLINE # this flag is set for one time step to signal a redraw in the viewerNEWLINE if self.path.flag_path_changed == True:NEWLINE self.path.flag_path_changed = FalseNEWLINE if waypoints.num_waypoints == 0:NEWLINE waypoints.flag_manager_requests_waypoints = TrueNEWLINE else:NEWLINE if waypoints.type == 'straight_line':NEWLINE self.line_manager(waypoints, state)NEWLINE elif waypoints.type == 'fillet':NEWLINE self.fillet_manager(waypoints, radius, state)NEWLINE elif waypoints.type == 'dubins':NEWLINE self.dubins_manager(waypoints, radius, state)NEWLINE else:NEWLINE print('Error in Path Manager: Undefined waypoint type.')NEWLINE return self.pathNEWLINENEWLINE def line_manager(self, waypoints, state):NEWLINENEWLINE def fillet_manager(self, waypoints, radius, state):NEWLINENEWLINE def dubins_manager(self, waypoints, radius, state):NEWLINENEWLINE def initialize_pointers(self):NEWLINENEWLINE def increment_pointers(self):NEWLINENEWLINE def inHalfSpace(self, pos):NEWLINE if (pos-self.halfspace_r).T @ self.halfspace_n >= 0:NEWLINE return TrueNEWLINE else:NEWLINE return FalseNEWLINENEWLINE |
print('This is for test #1') |
# *************************NEWLINE# |docname| - Runestone APINEWLINE# *************************NEWLINE# This module implements the API that the Runestone Components use to communicate with a Runestone Server.NEWLINE#NEWLINE# ImportsNEWLINE# =======NEWLINE# These are listed in the order prescribed by `PEP 8NEWLINE# <http://www.python.org/dev/peps/pep-0008/#imports>`_.NEWLINEfrom collections import CounterNEWLINEimport datetimeNEWLINEfrom io import openNEWLINEimport jsonNEWLINEimport loggingNEWLINEfrom lxml import htmlNEWLINEimport mathNEWLINEimport osNEWLINEimport reNEWLINEimport subprocessNEWLINEfrom textwrap import dedentNEWLINEimport uuidNEWLINENEWLINE# Third-party importsNEWLINE# -------------------NEWLINEfrom bleach import cleanNEWLINEfrom dateutil.parser import parseNEWLINENEWLINE# Local application importsNEWLINE# -------------------------NEWLINEfrom feedback import is_server_feedback, fitb_feedback, lp_feedbackNEWLINEfrom rs_practice import _get_qualified_questionsNEWLINENEWLINElogger = logging.getLogger(settings.logger)NEWLINElogger.setLevel(settings.log_level)NEWLINENEWLINENEWLINEEVENT_TABLE = {NEWLINE "mChoice": "mchoice_answers",NEWLINE "fillb": "fitb_answers",NEWLINE "dragNdrop": "dragndrop_answers",NEWLINE "clickableArea": "clickablearea_answers",NEWLINE "parsons": "parsons_answers",NEWLINE "codelensq": "codelens_answers",NEWLINE "shortanswer": "shortanswer_answers",NEWLINE "fillintheblank": "fitb_answers",NEWLINE "mchoice": "mchoice_answers",NEWLINE "dragndrop": "dragndrop_answers",NEWLINE "clickablearea": "clickablearea_answers",NEWLINE "parsonsprob": "parsons_answers",NEWLINE}NEWLINENEWLINECOMMENT_MAP = {NEWLINE "sql": "--",NEWLINE "python": "#",NEWLINE "java": "//",NEWLINE "javascript": "//",NEWLINE "c": "//",NEWLINE "cpp": "//",NEWLINE}NEWLINENEWLINENEWLINEdef compareAndUpdateCookieData(sid: str):NEWLINE if (NEWLINE "ipuser" in request.cookiesNEWLINE and request.cookies["ipuser"].value != sidNEWLINE and request.cookies["ipuser"].value.endswith("@" + request.client)NEWLINE ):NEWLINE db.useinfo.update_or_insert(NEWLINE db.useinfo.sid == request.cookies["ipuser"].value, sid=sidNEWLINE )NEWLINENEWLINENEWLINE# EndpointsNEWLINE# =========NEWLINE#NEWLINE# .. _hsblog endpoint:NEWLINE#NEWLINE# hsblog endpointNEWLINE# ---------------NEWLINE# Given a JSON record of a clickstream event record the event in the ``useinfo`` table.NEWLINE# If the event is an answer to a runestone question record that answer in the database inNEWLINE# one of the xxx_answers tables.NEWLINE#NEWLINEdef hsblog():NEWLINE setCookie = FalseNEWLINE if auth.user:NEWLINE if request.vars.course != auth.user.course_name:NEWLINE return json.dumps(NEWLINE dict(NEWLINE log=False,NEWLINE message="You appear to have changed courses in another tab. Please switch to this course",NEWLINE )NEWLINE )NEWLINE sid = auth.user.usernameNEWLINE compareAndUpdateCookieData(sid)NEWLINE setCookie = True # we set our own cookie anyway to eliminate many of the extraneous anonymousNEWLINE # log entries that come from auth timing out even but the user hasn't reloadedNEWLINE # the page.NEWLINE else:NEWLINE if request.vars.clientLoginStatus == "true":NEWLINE logger.error("Session Expired")NEWLINE return json.dumps(dict(log=False, message="Session Expired"))NEWLINENEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINE setCookie = TrueNEWLINE else:NEWLINE sid = str(uuid.uuid1().int) + "@" + request.clientNEWLINE setCookie = TrueNEWLINE act = request.vars.get("act", "")NEWLINE div_id = request.vars.div_idNEWLINE event = request.vars.eventNEWLINE course = request.vars.courseNEWLINE # Get the current time, rounded to the nearest second -- this is how time time will be stored in the database.NEWLINE ts = datetime.datetime.utcnow()NEWLINE ts -= datetime.timedelta(microseconds=ts.microsecond)NEWLINE tt = request.vars.timeNEWLINE if not tt:NEWLINE tt = 0NEWLINENEWLINE try:NEWLINE db.useinfo.insert(NEWLINE sid=sid,NEWLINE act=act[0:512],NEWLINE div_id=div_id,NEWLINE event=event,NEWLINE timestamp=ts,NEWLINE course_id=course,NEWLINE )NEWLINE except Exception as e:NEWLINE logger.error(NEWLINE "failed to insert log record for {} in {} : {} {} {}".format(NEWLINE sid, course, div_id, event, actNEWLINE )NEWLINE )NEWLINE logger.error("Details: {}".format(e))NEWLINENEWLINE if event == "timedExam" and (act == "finish" or act == "reset" or act == "start"):NEWLINE logger.debug(act)NEWLINE if act == "reset":NEWLINE r = "T"NEWLINE else:NEWLINE r = NoneNEWLINENEWLINE try:NEWLINE db.timed_exam.insert(NEWLINE sid=sid,NEWLINE course_name=course,NEWLINE correct=int(request.vars.correct or 0),NEWLINE incorrect=int(request.vars.incorrect or 0),NEWLINE skipped=int(request.vars.skipped or 0),NEWLINE time_taken=int(tt),NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE reset=r,NEWLINE )NEWLINE except Exception as e:NEWLINE logger.debug(NEWLINE "failed to insert a timed exam record for {} in {} : {}".format(NEWLINE sid, course, div_idNEWLINE )NEWLINE )NEWLINE logger.debug(NEWLINE "correct {} incorrect {} skipped {} time {}".format(NEWLINE request.vars.correct,NEWLINE request.vars.incorrect,NEWLINE request.vars.skipped,NEWLINE request.vars.time,NEWLINE )NEWLINE )NEWLINE logger.debug("Error: {}".format(e.message))NEWLINENEWLINE # Produce a default result.NEWLINE res = dict(log=True, timestamp=str(ts))NEWLINE try:NEWLINE pct = float(request.vars.percent)NEWLINE except ValueError:NEWLINE pct = NoneNEWLINE except TypeError:NEWLINE pct = NoneNEWLINENEWLINE # Process this event.NEWLINE if event == "mChoice" and auth.user:NEWLINE answer = request.vars.answerNEWLINE correct = request.vars.correctNEWLINE db.mchoice_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINE elif event == "fillb" and auth.user:NEWLINE answer_json = request.vars.answerNEWLINE correct = request.vars.correctNEWLINE # Grade on the server if needed.NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE correct, res_update = fitb_feedback(answer_json, feedback)NEWLINE res.update(res_update)NEWLINE pct = res["percent"]NEWLINENEWLINE # Save this data.NEWLINE db.fitb_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer_json,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "dragNdrop" and auth.user:NEWLINE answers = request.vars.answerNEWLINE minHeight = request.vars.minHeightNEWLINE correct = request.vars.correctNEWLINENEWLINE db.dragndrop_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answers,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE min_height=minHeight,NEWLINE percent=pct,NEWLINE )NEWLINE elif event == "clickableArea" and auth.user:NEWLINE correct = request.vars.correctNEWLINE db.clickablearea_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=act,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "parsons" and auth.user:NEWLINE correct = request.vars.correctNEWLINE answer = request.vars.answerNEWLINE source = request.vars.sourceNEWLINE db.parsons_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE source=source,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "codelensq" and auth.user:NEWLINE correct = request.vars.correctNEWLINE answer = request.vars.answerNEWLINE source = request.vars.sourceNEWLINE db.codelens_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=answer,NEWLINE source=source,NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "shortanswer" and auth.user:NEWLINE db.shortanswer_answers.insert(NEWLINE sid=sid,NEWLINE answer=act,NEWLINE div_id=div_id,NEWLINE timestamp=ts,NEWLINE course_name=course,NEWLINE )NEWLINENEWLINE elif event == "unittest" and auth.user:NEWLINE statslist = act.split(":")NEWLINE if "undefined" not in act:NEWLINE pct = float(statslist[1])NEWLINE passed = int(statslist[3])NEWLINE failed = int(statslist[5])NEWLINE if math.isnan(pct):NEWLINE pct = 0NEWLINE else:NEWLINE pct = passed = failed = 0NEWLINE logger.error(f"Got undefined unittest results for {div_id} {sid}")NEWLINE if pct >= 99.99999:NEWLINE correct = "T"NEWLINE else:NEWLINE correct = "F"NEWLINE db.unittest_answers.insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE correct=correct,NEWLINE passed=passed,NEWLINE failed=failed,NEWLINE course_name=course,NEWLINE percent=pct,NEWLINE )NEWLINENEWLINE elif event == "lp_build" and auth.user:NEWLINE ret, new_fields = db.lp_answers._validate_fields(NEWLINE dict(sid=sid, timestamp=ts, div_id=div_id, course_name=course)NEWLINE )NEWLINE if not ret.errors:NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE try:NEWLINE code_snippets = json.loads(request.vars.answer)["code_snippets"]NEWLINE except Exception:NEWLINE code_snippets = []NEWLINE result = lp_feedback(code_snippets, feedback)NEWLINE # If an error occurred or we're not testing, pass the answer through.NEWLINE res.update(result)NEWLINENEWLINE # Record the results in the database.NEWLINE correct = result.get("correct")NEWLINE answer = result.get("answer", {})NEWLINE answer["code_snippets"] = code_snippetsNEWLINE ret = db.lp_answers.validate_and_insert(NEWLINE sid=sid,NEWLINE timestamp=ts,NEWLINE div_id=div_id,NEWLINE answer=json.dumps(answer),NEWLINE correct=correct,NEWLINE course_name=course,NEWLINE )NEWLINE if ret.errors:NEWLINE res.setdefault("errors", []).append(ret.errors.as_dict())NEWLINE else:NEWLINE res["errors"] = ["No feedback provided."]NEWLINE else:NEWLINE res.setdefault("errors", []).append(ret.errors.as_dict())NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINE if setCookie:NEWLINE response.cookies["ipuser"] = sidNEWLINE response.cookies["ipuser"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["ipuser"]["path"] = "/"NEWLINE if auth.user:NEWLINE response.cookies["last_course"] = auth.user.course_nameNEWLINE response.cookies["last_course"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["last_course"]["path"] = "/"NEWLINENEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# .. _runlog endpoint:NEWLINE#NEWLINE# runlog endpointNEWLINE# ---------------NEWLINE# The `logRunEvent` client-side function calls this endpoint to record TODO...NEWLINEdef runlog(): # Log errors and runs with codeNEWLINE # response.headers['content-type'] = 'application/json'NEWLINE setCookie = FalseNEWLINE if auth.user:NEWLINE if request.vars.course != auth.user.course_name:NEWLINE return json.dumps(NEWLINE dict(NEWLINE log=False,NEWLINE message="You appear to have changed courses in another tab. Please switch to this course",NEWLINE )NEWLINE )NEWLINE sid = auth.user.usernameNEWLINE setCookie = TrueNEWLINE else:NEWLINE if request.vars.clientLoginStatus == "true":NEWLINE logger.error("Session Expired")NEWLINE return json.dumps(dict(log=False, message="Session Expired"))NEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINE setCookie = TrueNEWLINE else:NEWLINE sid = str(uuid.uuid1().int) + "@" + request.clientNEWLINE setCookie = TrueNEWLINE div_id = request.vars.div_idNEWLINE course = request.vars.courseNEWLINE code = request.vars.code if request.vars.code else ""NEWLINE ts = datetime.datetime.utcnow()NEWLINE error_info = request.vars.errinfoNEWLINE pre = request.vars.prefix if request.vars.prefix else ""NEWLINE post = request.vars.suffix if request.vars.suffix else ""NEWLINE if error_info != "success":NEWLINE event = "ac_error"NEWLINE act = str(error_info)[:512]NEWLINE else:NEWLINE act = "run"NEWLINE if request.vars.event:NEWLINE event = request.vars.eventNEWLINE else:NEWLINE event = "activecode"NEWLINE num_tries = 3NEWLINE done = FalseNEWLINE while num_tries > 0 and not done:NEWLINE try:NEWLINE db.useinfo.insert(NEWLINE sid=sid,NEWLINE act=act,NEWLINE div_id=div_id,NEWLINE event=event,NEWLINE timestamp=ts,NEWLINE course_id=course,NEWLINE )NEWLINE done = TrueNEWLINE except Exception as e:NEWLINE logger.error(NEWLINE "probable Too Long problem trying to insert sid={} act={} div_id={} event={} timestamp={} course_id={} exception={}".format(NEWLINE sid, act, div_id, event, ts, course, eNEWLINE )NEWLINE )NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Runlog Failed to insert into useinfo")NEWLINENEWLINE if auth.user:NEWLINE if "to_save" in request.vars and (NEWLINE request.vars.to_save == "True" or request.vars.to_save == "true"NEWLINE ):NEWLINE num_tries = 3NEWLINE done = FalseNEWLINE dbcourse = (NEWLINE db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE )NEWLINE while num_tries > 0 and not done:NEWLINE try:NEWLINE db.code.insert(NEWLINE sid=sid,NEWLINE acid=div_id,NEWLINE code=code,NEWLINE emessage=error_info,NEWLINE timestamp=ts,NEWLINE course_id=dbcourse,NEWLINE language=request.vars.lang,NEWLINE )NEWLINE if request.vars.partner:NEWLINE if _same_class(sid, request.vars.partner):NEWLINE comchar = COMMENT_MAP.get(request.vars.lang, "#")NEWLINE newcode = (NEWLINE "{} This code was shared by {}\n\n".format(comchar, sid)NEWLINE + codeNEWLINE )NEWLINE db.code.insert(NEWLINE sid=request.vars.partner,NEWLINE acid=div_id,NEWLINE code=newcode,NEWLINE emessage=error_info,NEWLINE timestamp=ts,NEWLINE course_id=dbcourse,NEWLINE language=request.vars.lang,NEWLINE )NEWLINE else:NEWLINE res = {NEWLINE "message": "You must be enrolled in the same class as your partner"NEWLINE }NEWLINE return json.dumps(res)NEWLINE done = TrueNEWLINE except Exception as e:NEWLINE num_tries -= 1NEWLINE logger.error("INSERT into code FAILED retrying -- {}".format(e))NEWLINE if num_tries == 0:NEWLINE raise Exception("Runlog Failed to insert into code")NEWLINENEWLINE res = {"log": True}NEWLINE if setCookie:NEWLINE response.cookies["ipuser"] = sidNEWLINE response.cookies["ipuser"]["expires"] = 24 * 3600 * 90NEWLINE response.cookies["ipuser"]["path"] = "/"NEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# Ajax Handlers for saving and restoring active code blocksNEWLINENEWLINENEWLINEdef gethist():NEWLINENEWLINE """NEWLINE return the history of saved code by this user for a particular acidNEWLINE :Parameters:NEWLINE - `acid`: id of the active code blockNEWLINE - `user`: optional identifier for the owner of the codeNEWLINE :Return:NEWLINE - json object containing a list/array of source textsNEWLINE """NEWLINE codetbl = db.codeNEWLINE acid = request.vars.acidNEWLINENEWLINE # if vars.sid then we know this is being called from the grading interfaceNEWLINE if request.vars.sid:NEWLINE sid = request.vars.sidNEWLINE if auth.user and verifyInstructorStatus(NEWLINE auth.user.course_name, auth.user.idNEWLINE ): # noqa: F405NEWLINE course_id = auth.user.course_idNEWLINE else:NEWLINE course_id = NoneNEWLINE elif auth.user:NEWLINE sid = auth.user.usernameNEWLINE course_id = auth.user.course_idNEWLINE else:NEWLINE sid = NoneNEWLINE course_id = NoneNEWLINENEWLINE res = {}NEWLINE if sid:NEWLINE query = (NEWLINE (codetbl.sid == sid)NEWLINE & (codetbl.acid == acid)NEWLINE & (codetbl.course_id == course_id)NEWLINE & (codetbl.timestamp != None) # noqa: E711NEWLINE )NEWLINE res["acid"] = acidNEWLINE res["sid"] = sidNEWLINE # get the code they saved in chronological order; id order gets that for usNEWLINE r = db(query).select(orderby=codetbl.id)NEWLINE res["history"] = [row.code for row in r]NEWLINE res["timestamps"] = [NEWLINE row.timestamp.replace(tzinfo=datetime.timezone.utc).isoformat() for row in rNEWLINE ]NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINE return json.dumps(res)NEWLINENEWLINENEWLINE# @auth.requires_login()NEWLINE# This function is deprecated as of June 2019NEWLINE# We need to keep it in place as long as we continue to serve booksNEWLINE# from runestone/static/ When that period is over we can eliminateNEWLINEdef getuser():NEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE if auth.user:NEWLINE try:NEWLINE # return the list of courses that auth.user is registered for to keep them fromNEWLINE # accidentally wandering into courses they are not registered for.NEWLINE cres = db(NEWLINE (db.user_courses.user_id == auth.user.id)NEWLINE & (db.user_courses.course_id == db.courses.id)NEWLINE ).select(db.courses.course_name)NEWLINE clist = []NEWLINE for row in cres:NEWLINE clist.append(row.course_name)NEWLINE res = {NEWLINE "email": auth.user.email,NEWLINE "nick": auth.user.username,NEWLINE "donated": auth.user.donated,NEWLINE "isInstructor": verifyInstructorStatus( # noqa: F405NEWLINE auth.user.course_name, auth.user.idNEWLINE ),NEWLINE "course_list": clist,NEWLINE }NEWLINE session.timezoneoffset = request.vars.timezoneoffsetNEWLINE logger.debug(NEWLINE "setting timezone offset in session %s hours" % session.timezoneoffsetNEWLINE )NEWLINE except Exception:NEWLINE res = dict(redirect=auth.settings.login_url) # ?_next=....NEWLINE else:NEWLINE res = dict(redirect=auth.settings.login_url) # ?_next=....NEWLINE if session.readings:NEWLINE res["readings"] = session.readingsNEWLINE logger.debug("returning login info: %s" % res)NEWLINE return json.dumps([res])NEWLINENEWLINENEWLINEdef set_tz_offset():NEWLINE session.timezoneoffset = request.vars.timezoneoffsetNEWLINE logger.debug("setting timezone offset in session %s hours" % session.timezoneoffset)NEWLINE return "done"NEWLINENEWLINENEWLINE#NEWLINE# Ajax Handlers to update and retrieve the last position of the user in the courseNEWLINE#NEWLINEdef updatelastpage():NEWLINE lastPageUrl = request.vars.lastPageUrlNEWLINE lastPageScrollLocation = request.vars.lastPageScrollLocationNEWLINE if lastPageUrl is None:NEWLINE return # todo: log request.vars, request.args and request.env.path_infoNEWLINE course = request.vars.courseNEWLINE completionFlag = request.vars.completionFlagNEWLINE lastPageChapter = lastPageUrl.split("/")[-2]NEWLINE lastPageSubchapter = ".".join(lastPageUrl.split("/")[-1].split(".")[:-1])NEWLINE if auth.user:NEWLINE done = FalseNEWLINE num_tries = 3NEWLINE while not done and num_tries > 0:NEWLINE try:NEWLINE db(NEWLINE (db.user_state.user_id == auth.user.id)NEWLINE & (db.user_state.course_id == course)NEWLINE ).update(NEWLINE last_page_url=lastPageUrl,NEWLINE last_page_chapter=lastPageChapter,NEWLINE last_page_subchapter=lastPageSubchapter,NEWLINE last_page_scroll_location=lastPageScrollLocation,NEWLINE last_page_accessed_on=datetime.datetime.utcnow(),NEWLINE )NEWLINE done = TrueNEWLINE except Exception:NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Failed to save the user state in update_last_page")NEWLINENEWLINE done = FalseNEWLINE num_tries = 3NEWLINE while not done and num_tries > 0:NEWLINE try:NEWLINE db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.chapter_id == lastPageChapter)NEWLINE & (NEWLINE db.user_sub_chapter_progress.sub_chapter_idNEWLINE == lastPageSubchapterNEWLINE )NEWLINE & (NEWLINE (db.user_sub_chapter_progress.course_name == course)NEWLINE | (NEWLINE db.user_sub_chapter_progress.course_name == NoneNEWLINE ) # Back fill for old entries without courseNEWLINE )NEWLINE ).update(NEWLINE status=completionFlag,NEWLINE end_date=datetime.datetime.utcnow(),NEWLINE course_name=course,NEWLINE )NEWLINE done = TrueNEWLINE except Exception:NEWLINE num_tries -= 1NEWLINE if num_tries == 0:NEWLINE raise Exception("Failed to save sub chapter progress in update_last_page")NEWLINENEWLINE practice_settings = db(db.course_practice.course_name == auth.user.course_name)NEWLINE if (NEWLINE practice_settings.count() != 0NEWLINE and practice_settings.select().first().flashcard_creation_method == 0NEWLINE ):NEWLINE # Since each authenticated user has only one active course, we retrieve the course this way.NEWLINE course = (NEWLINE db(db.courses.id == auth.user.course_id).select(**SELECT_CACHE).first()NEWLINE )NEWLINENEWLINE # We only retrieve questions to be used in flashcards if they are marked for practice purpose.NEWLINE questions = _get_qualified_questions(NEWLINE course.base_course, lastPageChapter, lastPageSubchapter, dbNEWLINE )NEWLINE if len(questions) > 0:NEWLINE now = datetime.datetime.utcnow()NEWLINE now_local = now - datetime.timedelta(NEWLINE hours=float(session.timezoneoffset)NEWLINE if "timezoneoffset" in sessionNEWLINE else 0NEWLINE )NEWLINE existing_flashcards = db(NEWLINE (db.user_topic_practice.user_id == auth.user.id)NEWLINE & (db.user_topic_practice.course_name == auth.user.course_name)NEWLINE & (db.user_topic_practice.chapter_label == lastPageChapter)NEWLINE & (db.user_topic_practice.sub_chapter_label == lastPageSubchapter)NEWLINE & (db.user_topic_practice.question_name == questions[0].name)NEWLINE )NEWLINE # There is at least one qualified question in this subchapter, so insert a flashcard for the subchapter.NEWLINE if completionFlag == "1" and existing_flashcards.isempty():NEWLINE db.user_topic_practice.insert(NEWLINE user_id=auth.user.id,NEWLINE course_name=auth.user.course_name,NEWLINE chapter_label=lastPageChapter,NEWLINE sub_chapter_label=lastPageSubchapter,NEWLINE question_name=questions[0].name,NEWLINE # Treat it as if the first eligible question is the last one asked.NEWLINE i_interval=0,NEWLINE e_factor=2.5,NEWLINE next_eligible_date=now_local.date(),NEWLINE # add as if yesterday, so can practice right awayNEWLINE last_presented=now - datetime.timedelta(1),NEWLINE last_completed=now - datetime.timedelta(1),NEWLINE creation_time=now,NEWLINE timezoneoffset=float(session.timezoneoffset)NEWLINE if "timezoneoffset" in sessionNEWLINE else 0,NEWLINE )NEWLINE if completionFlag == "0" and not existing_flashcards.isempty():NEWLINE existing_flashcards.delete()NEWLINENEWLINENEWLINEdef getCompletionStatus():NEWLINE if auth.user:NEWLINE lastPageUrl = request.vars.lastPageUrlNEWLINE lastPageChapter = lastPageUrl.split("/")[-2]NEWLINE lastPageSubchapter = ".".join(lastPageUrl.split("/")[-1].split(".")[:-1])NEWLINE result = db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.chapter_id == lastPageChapter)NEWLINE & (db.user_sub_chapter_progress.sub_chapter_id == lastPageSubchapter)NEWLINE & (NEWLINE (db.user_sub_chapter_progress.course_name == auth.user.course_name)NEWLINE | (NEWLINE db.user_sub_chapter_progress.course_name == NoneNEWLINE ) # for backward compatibilityNEWLINE )NEWLINE ).select(db.user_sub_chapter_progress.status)NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE res = {"completionStatus": row.status}NEWLINE rowarray_list.append(res)NEWLINE # question: since the javascript in user-highlights.js is going to look only at the first row, shouldn'tNEWLINE # we be returning just the *last* status? Or is there no history of status kept anyway?NEWLINE return json.dumps(rowarray_list)NEWLINE else:NEWLINE # haven't seen this Chapter/Subchapter beforeNEWLINE # make the insertions into the DB as necessaryNEWLINENEWLINE # we know the subchapter doesn't existNEWLINE db.user_sub_chapter_progress.insert(NEWLINE user_id=auth.user.id,NEWLINE chapter_id=lastPageChapter,NEWLINE sub_chapter_id=lastPageSubchapter,NEWLINE status=-1,NEWLINE start_date=datetime.datetime.utcnow(),NEWLINE course_name=auth.user.course_name,NEWLINE )NEWLINE # the chapter might exist without the subchapterNEWLINE result = db(NEWLINE (db.user_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_chapter_progress.chapter_id == lastPageChapter)NEWLINE ).select()NEWLINE if not result:NEWLINE db.user_chapter_progress.insert(NEWLINE user_id=auth.user.id, chapter_id=lastPageChapter, status=-1NEWLINE )NEWLINE return json.dumps([{"completionStatus": -1}])NEWLINENEWLINENEWLINEdef getAllCompletionStatus():NEWLINE if auth.user:NEWLINE result = db(NEWLINE (db.user_sub_chapter_progress.user_id == auth.user.id)NEWLINE & (db.user_sub_chapter_progress.course_name == auth.user.course_name)NEWLINE ).select(NEWLINE db.user_sub_chapter_progress.chapter_id,NEWLINE db.user_sub_chapter_progress.sub_chapter_id,NEWLINE db.user_sub_chapter_progress.status,NEWLINE db.user_sub_chapter_progress.status,NEWLINE db.user_sub_chapter_progress.end_date,NEWLINE )NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE if row.end_date is None:NEWLINE endDate = 0NEWLINE else:NEWLINE endDate = row.end_date.strftime("%d %b, %Y")NEWLINE res = {NEWLINE "chapterName": row.chapter_id,NEWLINE "subChapterName": row.sub_chapter_id,NEWLINE "completionStatus": row.status,NEWLINE "endDate": endDate,NEWLINE }NEWLINE rowarray_list.append(res)NEWLINE return json.dumps(rowarray_list)[email protected]_login()NEWLINEdef getlastpage():NEWLINE course = request.vars.courseNEWLINE course = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINENEWLINE result = db(NEWLINE (db.user_state.user_id == auth.user.id)NEWLINE & (db.user_state.course_id == course.course_name)NEWLINE & (db.chapters.course_id == course.base_course)NEWLINE & (db.user_state.last_page_chapter == db.chapters.chapter_label)NEWLINE & (db.sub_chapters.chapter_id == db.chapters.id)NEWLINE & (db.user_state.last_page_subchapter == db.sub_chapters.sub_chapter_label)NEWLINE ).select(NEWLINE db.user_state.last_page_url,NEWLINE db.user_state.last_page_hash,NEWLINE db.chapters.chapter_name,NEWLINE db.user_state.last_page_scroll_location,NEWLINE db.sub_chapters.sub_chapter_name,NEWLINE )NEWLINE rowarray_list = []NEWLINE if result:NEWLINE for row in result:NEWLINE res = {NEWLINE "lastPageUrl": row.user_state.last_page_url,NEWLINE "lastPageHash": row.user_state.last_page_hash,NEWLINE "lastPageChapter": row.chapters.chapter_name,NEWLINE "lastPageSubchapter": row.sub_chapters.sub_chapter_name,NEWLINE "lastPageScrollLocation": row.user_state.last_page_scroll_location,NEWLINE }NEWLINE rowarray_list.append(res)NEWLINE return json.dumps(rowarray_list)NEWLINE else:NEWLINE db.user_state.insert(user_id=auth.user.id, course_id=course.course_name)NEWLINENEWLINENEWLINEdef _getCorrectStats(miscdata, event):NEWLINE # TODO: update this to use the xxx_answer tableNEWLINE # select and count grouping by the correct columnNEWLINE # this version can suffer from division by zero errorNEWLINE sid = NoneNEWLINE dbtable = EVENT_TABLE[event] # translate event to correct tableNEWLINENEWLINE if auth.user:NEWLINE sid = auth.user.usernameNEWLINE else:NEWLINE if "ipuser" in request.cookies:NEWLINE sid = request.cookies["ipuser"].valueNEWLINENEWLINE if sid:NEWLINE course = (NEWLINE db(db.courses.course_name == miscdata["course"])NEWLINE .select(**SELECT_CACHE)NEWLINE .first()NEWLINE )NEWLINE tbl = db[dbtable]NEWLINENEWLINE count_expr = tbl.correct.count()NEWLINE rows = db((tbl.sid == sid) & (tbl.timestamp > course.term_start_date)).select(NEWLINE tbl.correct, count_expr, groupby=tbl.correctNEWLINE )NEWLINE total = 0NEWLINE correct = 0NEWLINE for row in rows:NEWLINE count = row[count_expr]NEWLINE total += countNEWLINE if row[dbtable].correct:NEWLINE correct = countNEWLINE if total > 0:NEWLINE pctcorr = round(float(correct) / total * 100)NEWLINE else:NEWLINE pctcorr = "unavailable"NEWLINE else:NEWLINE pctcorr = "unavailable"NEWLINENEWLINE miscdata["yourpct"] = pctcorrNEWLINENEWLINENEWLINEdef _getStudentResults(question: str):NEWLINE """NEWLINE Internal function to collect student answersNEWLINE """NEWLINE cc = db(db.courses.id == auth.user.course_id).select().first()NEWLINE qst = (NEWLINE db(NEWLINE (db.questions.name == question)NEWLINE & (db.questions.base_course == cc.base_course)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE tbl_name = EVENT_TABLE[qst.question_type]NEWLINE tbl = db[tbl_name]NEWLINENEWLINE res = db(NEWLINE (tbl.div_id == question)NEWLINE & (tbl.course_name == cc.course_name)NEWLINE & (tbl.timestamp >= cc.term_start_date)NEWLINE ).select(tbl.sid, tbl.answer, orderby=tbl.sid)NEWLINENEWLINE resultList = []NEWLINE if len(res) > 0:NEWLINE currentSid = res[0].sidNEWLINE currentAnswers = []NEWLINENEWLINE for row in res:NEWLINE if row.answer:NEWLINE answer = clean(row.answer)NEWLINE else:NEWLINE answer = NoneNEWLINENEWLINE if row.sid == currentSid:NEWLINE if answer is not None:NEWLINE currentAnswers.append(answer)NEWLINE else:NEWLINE currentAnswers.sort()NEWLINE resultList.append((currentSid, currentAnswers))NEWLINE currentAnswers = [answer] if answer is not None else []NEWLINE currentSid = row.sidNEWLINENEWLINE currentAnswers.sort()NEWLINE resultList.append((currentSid, currentAnswers))NEWLINENEWLINE return resultListNEWLINENEWLINENEWLINEdef getaggregateresults():NEWLINE course = request.vars.courseNEWLINE question = request.vars.div_idNEWLINE # select act, count(*) from useinfo where div_id = 'question4_2_1' group by act;NEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE if not auth.user:NEWLINE return json.dumps([dict(answerDict={}, misc={}, emess="You must be logged in")])NEWLINENEWLINE is_instructor = verifyInstructorStatus(course, auth.user.id) # noqa: F405NEWLINE # Yes, these two things could be done as a join. but this **may** be better for performanceNEWLINE if course in (NEWLINE "thinkcspy",NEWLINE "pythonds",NEWLINE "fopp",NEWLINE "csawesome",NEWLINE "apcsareview",NEWLINE "StudentCSP",NEWLINE ):NEWLINE start_date = datetime.datetime.utcnow() - datetime.timedelta(days=90)NEWLINE else:NEWLINE start_date = (NEWLINE db(db.courses.course_name == course)NEWLINE .select(db.courses.term_start_date)NEWLINE .first()NEWLINE .term_start_dateNEWLINE )NEWLINE count = db.useinfo.id.count()NEWLINE try:NEWLINE result = db(NEWLINE (db.useinfo.div_id == question)NEWLINE & (db.useinfo.course_id == course)NEWLINE & (db.useinfo.timestamp >= start_date)NEWLINE ).select(db.useinfo.act, count, groupby=db.useinfo.act)NEWLINE except Exception:NEWLINE return json.dumps(NEWLINE [dict(answerDict={}, misc={}, emess="Sorry, the request timed out")]NEWLINE )NEWLINENEWLINE tdata = {}NEWLINE tot = 0NEWLINE for row in result:NEWLINE tdata[clean(row.useinfo.act)] = row[count]NEWLINE tot += row[count]NEWLINENEWLINE tot = float(tot)NEWLINE rdata = {}NEWLINE miscdata = {}NEWLINE correct = ""NEWLINE if tot > 0:NEWLINE for key in tdata:NEWLINE all_a = key.split(":")NEWLINE try:NEWLINE answer = all_a[1]NEWLINE if "correct" in key:NEWLINE correct = answerNEWLINE count = int(tdata[key])NEWLINE if answer in rdata:NEWLINE count += rdata[answer] / 100.0 * totNEWLINE pct = round(count / tot * 100.0)NEWLINENEWLINE if answer != "undefined" and answer != "":NEWLINE rdata[answer] = pctNEWLINE except Exception as e:NEWLINE logger.error("Bad data for %s data is %s -- %s" % (question, key, e))NEWLINENEWLINE miscdata["correct"] = correctNEWLINE miscdata["course"] = courseNEWLINENEWLINE _getCorrectStats(miscdata, "mChoice")NEWLINENEWLINE returnDict = dict(answerDict=rdata, misc=miscdata)NEWLINENEWLINE if auth.user and is_instructor:NEWLINE resultList = _getStudentResults(question)NEWLINE returnDict["reslist"] = resultListNEWLINENEWLINE return json.dumps([returnDict])NEWLINENEWLINENEWLINEdef getpollresults():NEWLINE course = request.vars.courseNEWLINE div_id = request.vars.div_idNEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE query = """select act from useinfoNEWLINE join (select sid, max(id) midNEWLINE from useinfo where event='poll' and div_id = %s and course_id = %s group by sid) as TNEWLINE on id = T.mid"""NEWLINENEWLINE rows = db.executesql(query, (div_id, course))NEWLINENEWLINE result_list = []NEWLINE for row in rows:NEWLINE val = row[0].split(":")[0]NEWLINE result_list.append(int(val))NEWLINENEWLINE # maps option : countNEWLINE opt_counts = Counter(result_list)NEWLINENEWLINE if result_list:NEWLINE for i in range(max(result_list)):NEWLINE if i not in opt_counts:NEWLINE opt_counts[i] = 0NEWLINE # opt_list holds the option numbers from smallest to largestNEWLINE # count_list[i] holds the count of responses that chose option iNEWLINE opt_list = sorted(opt_counts.keys())NEWLINE count_list = []NEWLINE for i in opt_list:NEWLINE count_list.append(opt_counts[i])NEWLINENEWLINE user_res = NoneNEWLINE if auth.user:NEWLINE user_res = (NEWLINE db(NEWLINE (db.useinfo.sid == auth.user.username)NEWLINE & (db.useinfo.course_id == course)NEWLINE & (db.useinfo.div_id == div_id)NEWLINE )NEWLINE .select(db.useinfo.act, orderby=~db.useinfo.id)NEWLINE .first()NEWLINE )NEWLINENEWLINE if user_res:NEWLINE my_vote = user_res.actNEWLINE else:NEWLINE my_vote = -1NEWLINENEWLINE return json.dumps([len(result_list), opt_list, count_list, div_id, my_vote])NEWLINENEWLINENEWLINEdef gettop10Answers():NEWLINE course = request.vars.courseNEWLINE question = request.vars.div_idNEWLINE response.headers["content-type"] = "application/json"NEWLINE rows = []NEWLINENEWLINE try:NEWLINE dbcourse = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE count_expr = db.fitb_answers.answer.count()NEWLINE rows = db(NEWLINE (db.fitb_answers.div_id == question)NEWLINE & (db.fitb_answers.course_name == course)NEWLINE & (db.fitb_answers.timestamp > dbcourse.term_start_date)NEWLINE ).select(NEWLINE db.fitb_answers.answer,NEWLINE count_expr,NEWLINE groupby=db.fitb_answers.answer,NEWLINE orderby=~count_expr,NEWLINE limitby=(0, 10),NEWLINE )NEWLINE res = [NEWLINE {"answer": clean(row.fitb_answers.answer), "count": row[count_expr]}NEWLINE for row in rowsNEWLINE ]NEWLINE except Exception as e:NEWLINE logger.debug(e)NEWLINE res = "error in query"NEWLINENEWLINE miscdata = {"course": course}NEWLINE _getCorrectStats(NEWLINE miscdata, "fillb"NEWLINE ) # TODO: rewrite _getCorrectStats to use xxx_answersNEWLINENEWLINE if auth.user and verifyInstructorStatus(course, auth.user.id): # noqa: F405NEWLINE resultList = _getStudentResults(question)NEWLINE miscdata["reslist"] = resultListNEWLINENEWLINE return json.dumps([res, miscdata])NEWLINENEWLINENEWLINEdef getassignmentgrade():NEWLINE response.headers["content-type"] = "application/json"NEWLINE if not auth.user:NEWLINE return json.dumps([dict(message="not logged in")])NEWLINENEWLINE divid = request.vars.div_idNEWLINENEWLINE ret = {NEWLINE "grade": "Not graded yet",NEWLINE "comment": "No Comments",NEWLINE "avg": "None",NEWLINE "count": "None",NEWLINE "released": False,NEWLINE }NEWLINENEWLINE # check that the assignment is releasedNEWLINE #NEWLINE a_q = (NEWLINE db(NEWLINE (db.assignments.course == auth.user.course_id)NEWLINE & (db.assignment_questions.assignment_id == db.assignments.id)NEWLINE & (db.assignment_questions.question_id == db.questions.id)NEWLINE & (db.questions.name == divid)NEWLINE )NEWLINE .select(NEWLINE db.assignments.released, db.assignments.id, db.assignment_questions.pointsNEWLINE )NEWLINE .first()NEWLINE )NEWLINENEWLINE # if there is no assignment_questionNEWLINE # try new way that we store scores and commentsNEWLINE # divid is a question; find question_grades rowNEWLINE result = (NEWLINE db(NEWLINE (db.question_grades.sid == auth.user.username)NEWLINE & (db.question_grades.course_name == auth.user.course_name)NEWLINE & (db.question_grades.div_id == divid)NEWLINE )NEWLINE .select(db.question_grades.score, db.question_grades.comment)NEWLINE .first()NEWLINE )NEWLINE logger.debug(result)NEWLINE if result:NEWLINE # say that we're sending back result styles in new version, so they can be processed differently without affecting old way during transition.NEWLINE ret["version"] = 2NEWLINE ret["released"] = a_q.assignments.released if a_q else FalseNEWLINE if a_q and not a_q.assignments.released:NEWLINE ret["grade"] = "Not graded yet"NEWLINE elif a_q and a_q.assignments.released:NEWLINE ret["grade"] = result.score or "Written Feedback Only"NEWLINENEWLINE if a_q and a_q.assignments.released == True:NEWLINE ret["max"] = a_q.assignment_questions.pointsNEWLINE else:NEWLINE ret["max"] = ""NEWLINENEWLINE if result.comment:NEWLINE ret["comment"] = result.commentNEWLINENEWLINE return json.dumps([ret])NEWLINENEWLINENEWLINEdef _canonicalize_tz(tstring):NEWLINE x = re.search(r"\((.*)\)", tstring)NEWLINE x = x.group(1)NEWLINE y = x.split()NEWLINE if len(y) == 1:NEWLINE return tstringNEWLINE else:NEWLINE zstring = "".join([i[0] for i in y])NEWLINE return re.sub(r"(.*)\((.*)\)", r"\1({})".format(zstring), tstring)NEWLINENEWLINENEWLINE# .. _getAssessResults:NEWLINE#NEWLINE# getAssessResultsNEWLINE# ----------------NEWLINEdef getAssessResults():NEWLINE if not auth.user:NEWLINE # can't query for user's answers if we don't know who the user is, so just load from local storageNEWLINE return ""NEWLINENEWLINE course = request.vars.courseNEWLINE div_id = request.vars.div_idNEWLINE event = request.vars.eventNEWLINE if (NEWLINE verifyInstructorStatus(auth.user.course_name, auth.user) and request.vars.sidNEWLINE ): # retrieving results for graderNEWLINE sid = request.vars.sidNEWLINE else:NEWLINE sid = auth.user.usernameNEWLINENEWLINE # TODO This whole thing is messy - get the deadline from the assignment in the dbNEWLINE if request.vars.deadline:NEWLINE try:NEWLINE deadline = parse(_canonicalize_tz(request.vars.deadline))NEWLINE tzoff = session.timezoneoffset if session.timezoneoffset else 0NEWLINE deadline = deadline + datetime.timedelta(hours=float(tzoff))NEWLINE deadline = deadline.replace(tzinfo=None)NEWLINE except Exception:NEWLINE logger.error("Bad Timezone - {}".format(request.vars.deadline))NEWLINE deadline = datetime.datetime.utcnow()NEWLINE else:NEWLINE deadline = datetime.datetime.utcnow()NEWLINENEWLINE response.headers["content-type"] = "application/json"NEWLINENEWLINE # Identify the correct event and query the database so we can load it from the serverNEWLINE if event == "fillb":NEWLINE rows = (NEWLINE db(NEWLINE (db.fitb_answers.div_id == div_id)NEWLINE & (db.fitb_answers.course_name == course)NEWLINE & (db.fitb_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.fitb_answers.answer,NEWLINE db.fitb_answers.timestamp,NEWLINE orderby=~db.fitb_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return "" # server doesn't have it so we load from local storage insteadNEWLINE #NEWLINE res = {"answer": rows.answer, "timestamp": str(rows.timestamp)}NEWLINE do_server_feedback, feedback = is_server_feedback(div_id, course)NEWLINE if do_server_feedback:NEWLINE correct, res_update = fitb_feedback(rows.answer, feedback)NEWLINE res.update(res_update)NEWLINE return json.dumps(res)NEWLINE elif event == "mChoice":NEWLINE rows = (NEWLINE db(NEWLINE (db.mchoice_answers.div_id == div_id)NEWLINE & (db.mchoice_answers.course_name == course)NEWLINE & (db.mchoice_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.mchoice_answers.answer,NEWLINE db.mchoice_answers.timestamp,NEWLINE db.mchoice_answers.correct,NEWLINE orderby=~db.mchoice_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "dragNdrop":NEWLINE rows = (NEWLINE db(NEWLINE (db.dragndrop_answers.div_id == div_id)NEWLINE & (db.dragndrop_answers.course_name == course)NEWLINE & (db.dragndrop_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.dragndrop_answers.answer,NEWLINE db.dragndrop_answers.timestamp,NEWLINE db.dragndrop_answers.correct,NEWLINE db.dragndrop_answers.min_height,NEWLINE orderby=~db.dragndrop_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE "minHeight": str(rows.min_height),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "clickableArea":NEWLINE rows = (NEWLINE db(NEWLINE (db.clickablearea_answers.div_id == div_id)NEWLINE & (db.clickablearea_answers.course_name == course)NEWLINE & (db.clickablearea_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.clickablearea_answers.answer,NEWLINE db.clickablearea_answers.timestamp,NEWLINE db.clickablearea_answers.correct,NEWLINE orderby=~db.clickablearea_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "timestamp": str(rows.timestamp),NEWLINE "correct": rows.correct,NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "timedExam":NEWLINE rows = (NEWLINE db(NEWLINE (db.timed_exam.reset == None) # noqa: E711NEWLINE & (db.timed_exam.div_id == div_id)NEWLINE & (db.timed_exam.course_name == course)NEWLINE & (db.timed_exam.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.timed_exam.correct,NEWLINE db.timed_exam.incorrect,NEWLINE db.timed_exam.skipped,NEWLINE db.timed_exam.time_taken,NEWLINE db.timed_exam.timestamp,NEWLINE db.timed_exam.reset,NEWLINE orderby=~db.timed_exam.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "correct": rows.correct,NEWLINE "incorrect": rows.incorrect,NEWLINE "skipped": str(rows.skipped),NEWLINE "timeTaken": str(rows.time_taken),NEWLINE "timestamp": str(rows.timestamp),NEWLINE "reset": str(rows.reset),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "parsons":NEWLINE rows = (NEWLINE db(NEWLINE (db.parsons_answers.div_id == div_id)NEWLINE & (db.parsons_answers.course_name == course)NEWLINE & (db.parsons_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.parsons_answers.answer,NEWLINE db.parsons_answers.source,NEWLINE db.parsons_answers.timestamp,NEWLINE orderby=~db.parsons_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return ""NEWLINE res = {NEWLINE "answer": rows.answer,NEWLINE "source": rows.source,NEWLINE "timestamp": str(rows.timestamp),NEWLINE }NEWLINE return json.dumps(res)NEWLINE elif event == "shortanswer":NEWLINE logger.debug(f"Getting shortanswer: deadline is {deadline} ")NEWLINE rows = db(NEWLINE (db.shortanswer_answers.sid == sid)NEWLINE & (db.shortanswer_answers.div_id == div_id)NEWLINE & (db.shortanswer_answers.course_name == course)NEWLINE ).select(orderby=~db.shortanswer_answers.id)NEWLINE if not rows:NEWLINE return ""NEWLINE last_answer = NoneNEWLINE if not request.vars.deadline:NEWLINE row = rows[0]NEWLINE else:NEWLINE last_answer = rows[0]NEWLINE for row in rows:NEWLINE if row.timestamp <= deadline:NEWLINE breakNEWLINE if row.timestamp > deadline:NEWLINE row = NoneNEWLINENEWLINE if row and row == last_answer:NEWLINE res = {"answer": row.answer, "timestamp": row.timestamp.isoformat()}NEWLINE else:NEWLINE if row and row.timestamp <= deadline:NEWLINE res = {"answer": row.answer, "timestamp": row.timestamp.isoformat()}NEWLINE else:NEWLINE res = {NEWLINE "answer": "",NEWLINE "timestamp": None,NEWLINE "last_answer": last_answer.answer,NEWLINE "last_timestamp": last_answer.timestamp.isoformat(),NEWLINE }NEWLINE srow = (NEWLINE db(NEWLINE (db.question_grades.sid == sid)NEWLINE & (db.question_grades.div_id == div_id)NEWLINE & (db.question_grades.course_name == course)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE if srow:NEWLINE res["score"] = srow.scoreNEWLINE res["comment"] = srow.commentNEWLINENEWLINE return json.dumps(res)NEWLINE elif event == "lp_build":NEWLINE rows = (NEWLINE db(NEWLINE (db.lp_answers.div_id == div_id)NEWLINE & (db.lp_answers.course_name == course)NEWLINE & (db.lp_answers.sid == sid)NEWLINE )NEWLINE .select(NEWLINE db.lp_answers.answer,NEWLINE db.lp_answers.timestamp,NEWLINE db.lp_answers.correct,NEWLINE orderby=~db.lp_answers.id,NEWLINE )NEWLINE .first()NEWLINE )NEWLINE if not rows:NEWLINE return "" # server doesn't have it so we load from local storage insteadNEWLINE answer = json.loads(rows.answer)NEWLINE correct = rows.correctNEWLINE return json.dumps(NEWLINE {"answer": answer, "timestamp": str(rows.timestamp), "correct": correct}NEWLINE )NEWLINENEWLINENEWLINEdef tookTimedAssessment():NEWLINE if auth.user:NEWLINE sid = auth.user.usernameNEWLINE else:NEWLINE return json.dumps({"tookAssessment": False})NEWLINENEWLINE exam_id = request.vars.div_idNEWLINE course = request.vars.course_nameNEWLINE rows = (NEWLINE db(NEWLINE (db.timed_exam.div_id == exam_id)NEWLINE & (db.timed_exam.sid == sid)NEWLINE & (db.timed_exam.course_name == course)NEWLINE )NEWLINE .select(orderby=~db.timed_exam.id)NEWLINE .first()NEWLINE )NEWLINE logger.debug(f"checking {exam_id} {sid} {course} {rows}")NEWLINE if rows:NEWLINE return json.dumps({"tookAssessment": True})NEWLINE else:NEWLINE return json.dumps({"tookAssessment": False})NEWLINENEWLINENEWLINE# The request variable ``code`` must contain JSON-encoded RST to be rendered by Runestone. Only the HTML containing the actual Runestone component will be returned.NEWLINEdef preview_question():NEWLINENEWLINE begin = """NEWLINE.. raw:: htmlNEWLINENEWLINE <begin_directive>NEWLINENEWLINE"""NEWLINE end = """NEWLINENEWLINE.. raw:: htmlNEWLINENEWLINE <end_directive>NEWLINENEWLINE"""NEWLINENEWLINE try:NEWLINE code = begin + dedent(json.loads(request.vars.code)) + endNEWLINE with open(NEWLINE "applications/{}/build/preview/_sources/index.rst".format(NEWLINE request.applicationNEWLINE ),NEWLINE "w",NEWLINE encoding="utf-8",NEWLINE ) as ixf:NEWLINE ixf.write(code)NEWLINENEWLINE # Note that ``os.environ`` isn't a dict, it's an object whose setter modifies environment variables. So, modifications of a copy/deepcopy still `modify the original environment <https://stackoverflow.com/questions/13142972/using-copy-deepcopy-on-os-environ-in-python-appears-broken>`_. Therefore, convert it to a dict, where modifications will not affect the environment.NEWLINE env = dict(os.environ)NEWLINE # Prevent any changes to the database when building a preview question.NEWLINE env.pop("DBURL", None)NEWLINE # Run a runestone build.NEWLINE # We would like to use sys.executable But when we run web2pyNEWLINE # in uwsgi then sys.executable is uwsgi which doesn't work.NEWLINE # Why not just run runestone?NEWLINE if "python" not in settings.python_interpreter:NEWLINE logger.error(f"Error {settings.python_interpreter} is not a valid python")NEWLINE return json.dumps(NEWLINE f"Error: settings.python_interpreter must be set to a valid interpreter not {settings.python_interpreter}"NEWLINE )NEWLINE popen_obj = subprocess.Popen(NEWLINE [settings.python_interpreter, "-m", "runestone", "build"],NEWLINE # The build must be run from the directory containing a ``conf.py`` and all the needed support files.NEWLINE cwd="applications/{}/build/preview".format(request.application),NEWLINE # Capture the build output as text in case of an error.NEWLINE stdout=subprocess.PIPE,NEWLINE stderr=subprocess.PIPE,NEWLINE universal_newlines=True,NEWLINE # Pass the modified environment which doesn't contain ``DBURL``.NEWLINE env=env,NEWLINE )NEWLINE stdout, stderr = popen_obj.communicate()NEWLINE # If there was an error, return stdout and stderr from the build.NEWLINE if popen_obj.returncode != 0:NEWLINE return json.dumps(NEWLINE "Error: Runestone build failed:\n\n" + stdout + "\n" + stderrNEWLINE )NEWLINENEWLINE with open(NEWLINE "applications/{}/build/preview/build/preview/index.html".format(NEWLINE request.applicationNEWLINE ),NEWLINE "r",NEWLINE encoding="utf-8",NEWLINE ) as ixf:NEWLINE src = ixf.read()NEWLINE tree = html.fromstring(src)NEWLINE if len(tree.cssselect(".runestone")) == 0:NEWLINE src = ""NEWLINE result = re.search(NEWLINE "<begin_directive>(.*)<end_directive>", src, flags=re.DOTALLNEWLINE )NEWLINE if result:NEWLINE ctext = result.group(1)NEWLINE else:NEWLINE component = tree.cssselect(".system-message")NEWLINE if len(component) > 0:NEWLINE ctext = html.tostring(component[0]).decode("utf-8")NEWLINE logger.debug("error - ", ctext)NEWLINE else:NEWLINE ctext = "Error: Runestone content missing."NEWLINE return json.dumps(ctext)NEWLINE except Exception as ex:NEWLINE return json.dumps("Error: {}".format(ex))NEWLINENEWLINENEWLINEdef save_donate():NEWLINE if auth.user:NEWLINE db(db.auth_user.id == auth.user.id).update(donated=True)NEWLINENEWLINENEWLINEdef did_donate():NEWLINE if auth.user:NEWLINE d_status = (NEWLINE db(db.auth_user.id == auth.user.id).select(db.auth_user.donated).first()NEWLINE )NEWLINENEWLINE return json.dumps(dict(donate=d_status.donated))NEWLINE return json.dumps(dict(donate=False))NEWLINENEWLINENEWLINEdef get_datafile():NEWLINE """NEWLINE course_id - string, the name of the courseNEWLINE acid - the acid of this datafileNEWLINE """NEWLINE course = request.vars.course_id # the course nameNEWLINE the_course = db(db.courses.course_name == course).select(**SELECT_CACHE).first()NEWLINE acid = request.vars.acidNEWLINE file_contents = (NEWLINE db(NEWLINE (db.source_code.acid == acid)NEWLINE & (NEWLINE (db.source_code.course_id == the_course.base_course)NEWLINE | (db.source_code.course_id == course)NEWLINE )NEWLINE )NEWLINE .select(db.source_code.main_code)NEWLINE .first()NEWLINE )NEWLINENEWLINE if file_contents:NEWLINE file_contents = file_contents.main_codeNEWLINE else:NEWLINE file_contents = NoneNEWLINENEWLINE return json.dumps(dict(data=file_contents))[email protected](NEWLINE lambda: verifyInstructorStatus(auth.user.course_name, auth.user),NEWLINE requires_login=True,NEWLINE)NEWLINEdef broadcast_code():NEWLINE """NEWLINE Callable by an instructor to send the code in their scratch activecodeNEWLINE to all students in the class.NEWLINE """NEWLINE the_course = (NEWLINE db(db.courses.course_name == auth.user.course_name)NEWLINE .select(**SELECT_CACHE)NEWLINE .first()NEWLINE )NEWLINE cid = the_course.idNEWLINE student_list = db(NEWLINE (db.user_courses.course_id == cid)NEWLINE & (db.auth_user.id == db.user_courses.user_id)NEWLINE ).select()NEWLINE shared_code = (NEWLINE "{} Instructor shared code on {}\n".format(NEWLINE COMMENT_MAP.get(request.vars.lang, "#"), datetime.datetime.utcnow().date()NEWLINE )NEWLINE + request.vars.codeNEWLINE )NEWLINE counter = 0NEWLINE for student in student_list:NEWLINE if student.auth_user.id == auth.user.id:NEWLINE continueNEWLINE sid = student.auth_user.usernameNEWLINE try:NEWLINE db.code.insert(NEWLINE sid=sid,NEWLINE acid=request.vars.divid,NEWLINE code=shared_code,NEWLINE emessage="",NEWLINE timestamp=datetime.datetime.utcnow(),NEWLINE course_id=cid,NEWLINE language=request.vars.lang,NEWLINE comment="Instructor shared code",NEWLINE )NEWLINE except Exception as e:NEWLINE logger.error("Failed to insert instructor code! details: {}".format(e))NEWLINE return json.dumps(dict(mess="failed"))NEWLINENEWLINE counter += 1NEWLINENEWLINE return json.dumps(dict(mess="success", share_count=counter))NEWLINENEWLINENEWLINEdef _same_class(user1: str, user2: str) -> bool:NEWLINE user1_course = (NEWLINE db(db.auth_user.username == user1).select(db.auth_user.course_id).first()NEWLINE )NEWLINE user2_course = (NEWLINE db(db.auth_user.username == user2).select(db.auth_user.course_id).first()NEWLINE )NEWLINENEWLINE return user1_course == user2_courseNEWLINENEWLINENEWLINEdef login_status():NEWLINE if auth.user:NEWLINE return json.dumps(dict(status="loggedin", course_name=auth.user.course_name))NEWLINE else:NEWLINE return json.dumps(dict(status="loggedout", course_name=auth.user.course_name))NEWLINENEWLINENEWLINEauto_gradable_q = [NEWLINE "clickablearea",NEWLINE "mchoice",NEWLINE "parsonsprob",NEWLINE "dragndrop",NEWLINE "fillintheblank",NEWLINE][email protected]_login()NEWLINEdef get_question_source():NEWLINE """Called from the selectquestion directiveNEWLINE There are 4 cases:NEWLINENEWLINE 1. If there is only 1 question in the question list then return the html source for it.NEWLINE 2. If there are multiple questions then choose a question at randomNEWLINE 3. If a proficiency is selected then select a random question that tests that proficiencyNEWLINE 4. If the question is an AB question then see if this student is an A or a B or assign them to one randomly.NEWLINENEWLINE In the last two cases, first check to see if there is a question for this student for thisNEWLINE component that was previously selected.NEWLINENEWLINE Returns:NEWLINE json: html source for this questionNEWLINE """NEWLINE prof = FalseNEWLINE points = request.vars.pointsNEWLINE logger.debug(f"POINTS = {points}")NEWLINE min_difficulty = request.vars.min_difficultyNEWLINE max_difficulty = request.vars.max_difficultyNEWLINE not_seen_ever = request.vars.not_seen_everNEWLINE autogradable = request.vars.autogradableNEWLINE is_primary = request.vars.primaryNEWLINE is_ab = request.vars.ABNEWLINE selector_id = request.vars["selector_id"]NEWLINE assignment_name = request.vars["timedWrapper"]NEWLINE toggle = request.vars["toggle"]NEWLINENEWLINE # If the question has a :points: option then those points are the defaultNEWLINE # however sometimes questions are entered in the web ui without the :points:NEWLINE # and points are assigned in the UI instead. If this is part of anNEWLINE # assignment or timed exam AND the points are set in the web UI we willNEWLINE # use the points from the UI over the :points: If this is an assignmentNEWLINE # or exam that is totally written in RST then the points in the UI will matchNEWLINE # the points from the assignment anyway.NEWLINE if assignment_name:NEWLINE ui_points = (NEWLINE db(NEWLINE (db.assignments.name == assignment_name)NEWLINE & (db.assignments.id == db.assignment_questions.assignment_id)NEWLINE & (db.assignment_questions.question_id == db.questions.id)NEWLINE & (db.questions.name == selector_id)NEWLINE )NEWLINE .select(db.assignment_questions.points)NEWLINE .first()NEWLINE )NEWLINE logger.debug(NEWLINE f"Assignment Points for {assignment_name}, {selector_id} = {ui_points}"NEWLINE )NEWLINE points = ui_points.pointsNEWLINENEWLINE if request.vars["questions"]:NEWLINE questionlist = request.vars["questions"].split(",")NEWLINE questionlist = [q.strip() for q in questionlist]NEWLINE elif request.vars["proficiency"]:NEWLINE prof = request.vars["proficiency"]NEWLINENEWLINE query = (db.competency.competency == prof) & (NEWLINE db.competency.question == db.questions.idNEWLINE )NEWLINE if is_primary:NEWLINE query = query & (db.competency.is_primary == True)NEWLINE if min_difficulty:NEWLINE query = query & (db.questions.difficulty >= float(min_difficulty))NEWLINE if max_difficulty:NEWLINE query = query & (db.questions.difficulty <= float(max_difficulty))NEWLINE if autogradable:NEWLINE query = query & (NEWLINE (db.questions.autograde == "unittest")NEWLINE | db.questions.question_type.contains(auto_gradable_q, all=False)NEWLINE )NEWLINE res = db(query).select(db.questions.name)NEWLINE logger.debug(f"Query was {db._lastsql}")NEWLINE if res:NEWLINE questionlist = [row.name for row in res]NEWLINE else:NEWLINE questionlist = []NEWLINE logger.error(f"No questions found for proficiency {prof}")NEWLINE return json.dumps(f"<p>No Questions found for proficiency: {prof}</p>")NEWLINENEWLINE logger.debug(f"is_ab is {is_ab}")NEWLINE if is_ab:NEWLINENEWLINE res = db(NEWLINE (db.user_experiment.sid == auth.user.username)NEWLINE & (db.user_experiment.experiment_id == is_ab)NEWLINE ).select(orderby=db.user_experiment.id)NEWLINENEWLINE if not res:NEWLINE exp_group = random.randrange(2)NEWLINE db.user_experiment.insert(NEWLINE sid=auth.user.username, experiment_id=is_ab, exp_group=exp_groupNEWLINE )NEWLINE logger.debug(f"added {auth.user.username} to {is_ab} group {exp_group}")NEWLINENEWLINE else:NEWLINE exp_group = res[0].exp_groupNEWLINENEWLINE logger.debug(f"experimental group is {exp_group}")NEWLINENEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINENEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE questionid = questionlist[exp_group]NEWLINENEWLINE if not is_ab:NEWLINE poss = set()NEWLINE if not_seen_ever:NEWLINE seenq = db(NEWLINE (db.useinfo.sid == auth.user.username)NEWLINE & (db.useinfo.div_id.contains(questionlist, all=False))NEWLINE ).select(db.useinfo.div_id)NEWLINE seen = set([x.div_id for x in seenq])NEWLINE poss = set(questionlist)NEWLINE questionlist = list(poss - seen)NEWLINENEWLINE if len(questionlist) == 0 and len(poss) > 0:NEWLINE questionlist = list(poss)NEWLINENEWLINE htmlsrc = ""NEWLINENEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINENEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE # Eliminate any previous exam questions for this studentNEWLINE prev_questions = db(db.selected_questions.sid == auth.user.username).select(NEWLINE db.selected_questions.selected_idNEWLINE )NEWLINE prev_questions = set([row.selected_id for row in prev_questions])NEWLINE possible = set(questionlist)NEWLINE questionlist = list(possible - prev_questions)NEWLINE if questionlist:NEWLINE questionid = random.choice(questionlist)NEWLINE else:NEWLINE # If there are no questions left we should still return a random question.NEWLINE questionid = random.choice(list(possible))NEWLINENEWLINE logger.debug(f"toggle is {toggle}")NEWLINE if toggle:NEWLINE prev_selection = (NEWLINE db(NEWLINE (db.selected_questions.sid == auth.user.username)NEWLINE & (db.selected_questions.selector_id == selector_id)NEWLINE )NEWLINE .select()NEWLINE .first()NEWLINE )NEWLINE if prev_selection:NEWLINE questionid = prev_selection.selected_idNEWLINE else:NEWLINE questionid = request.vars["questions"].split(",")[0]NEWLINE # else:NEWLINE # logger.error(NEWLINE # f"Question ID '{questionid}' not found in select question list of '{selector_id}'."NEWLINE # )NEWLINE # return json.dumps(NEWLINE # f"<p>Question ID '{questionid}' not found in select question list of '{selector_id}'.</p>"NEWLINE # )NEWLINENEWLINE res = db((db.questions.name == questionid)).select(db.questions.htmlsrc).first()NEWLINENEWLINE if res and not prev_selection:NEWLINE qid = db.selected_questions.insert(NEWLINE selector_id=selector_id,NEWLINE sid=auth.user.username,NEWLINE selected_id=questionid,NEWLINE points=points,NEWLINE )NEWLINE if not qid:NEWLINE logger.error(NEWLINE f"Failed to insert a selected question for {selector_id} and {auth.user.username}"NEWLINE )NEWLINE else:NEWLINE logger.debug(NEWLINE f"Did not insert a record for {selector_id}, {questionid} Conditions are {res} QL: {questionlist} PREV: {prev_selection}"NEWLINE )NEWLINENEWLINE if res and res.htmlsrc:NEWLINE htmlsrc = res.htmlsrcNEWLINE else:NEWLINE logger.error(NEWLINE f"HTML Source not found for {questionid} in course {auth.user.course_name} for {auth.user.username}"NEWLINE )NEWLINE htmlsrc = "<p>No preview available</p>"NEWLINE return json.dumps(htmlsrc)[email protected]_login()NEWLINEdef update_selected_question():NEWLINE """NEWLINE This endpoint is used by the selectquestion problems that allow theNEWLINE student to select the problem they work on. For example they may haveNEWLINE a programming problem that can be solved with writing code, or theyNEWLINE can switch to a parsons problem if necessary.NEWLINENEWLINE Caller must provide:NEWLINE * ``metaid`` -- the id of the selectquestionNEWLINE * ``selected`` -- the id of the real question chosen by the studentNEWLINE """NEWLINE sid = auth.user.usernameNEWLINE selector_id = request.vars.metaidNEWLINE selected_id = request.vars.selectedNEWLINE logger.debug(f"USQ - {selector_id} --> {selected_id} for {sid}")NEWLINE db.selected_questions.update_or_insert(NEWLINE (db.selected_questions.selector_id == selector_id)NEWLINE & (db.selected_questions.sid == sid),NEWLINE selected_id=selected_id,NEWLINE selector_id=selector_id,NEWLINE sid=sid,NEWLINE )NEWLINE |
# SETTINGNEWLINEimport osNEWLINENEWLINEencoding_ = 'utf_8_sig'NEWLINEtime_zone = 'Asia/Shanghai'NEWLINEpool_max_workers = 8NEWLINEdefault_options_ = {NEWLINE 'encoding': encoding_,NEWLINE}NEWLINEbase_dir = os.path.dirname(os.path.abspath(__file__)).replace("\\", "/")NEWLINEstatic_dir = os.path.join(base_dir, 'static').replace("\\", "/")NEWLINEdefault_wkhtmltopdf_path = f'{base_dir}/bin/wkhtmltopdf.exe'NEWLINEdefault_wkhtmltoimage_path = f'{base_dir}/bin/wkhtmltoimage.exe'NEWLINENEWLINE# default_wkhtmltopdf_path = r'D:/wkhtmltopdf/bin/wkhtmltopdf.exe'NEWLINE# default_wkhtmltoimage_path = r'D:/wkhtmltopdf/bin/wkhtmltoimage.exe'NEWLINENEWLINEecho_info = '{}{} → {} exported successfully' |
"""This module contains miscellaneous utilities."""NEWLINENEWLINE__author__ = "Damián Silvani"NEWLINE__copyright__ = "Dymaxion Labs"NEWLINE__license__ = "MIT"NEWLINENEWLINENEWLINEdef flatten(list):NEWLINE return [item for sublist in list for item in sublist]NEWLINE |
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE##############################################################################NEWLINE# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. #NEWLINE# #NEWLINE# Licensed under the Amazon Software License (the "License"). You may not #NEWLINE# use this file except in compliance with the License. A copy of the #NEWLINE# License is located at #NEWLINE# #NEWLINE# http://aws.amazon.com/asl/ #NEWLINE# #NEWLINE# or in the "license" file accompanying this file. This file is distributed #NEWLINE# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, #NEWLINE# express or implied. See the License for the specific language governing #NEWLINE# permissions and limitations under the License. #NEWLINE##############################################################################NEWLINEfrom botocore.exceptions import ClientErrorNEWLINEimport boto3NEWLINEimport osNEWLINEimport loggingNEWLINENEWLINECOLLECTION_NAME = os.environ['RekognitionCollectionName']NEWLINEDYNAMODB_TABLE_NAME = os.environ['DynamoDBTableName']NEWLINELOG_LEVEL = os.environ['LogLevel']NEWLINESEND_ANONYMOUS_DATA = os.environ['SendAnonymousData']NEWLINENEWLINEdynamodb = boto3.client('dynamodb')NEWLINEs3 = boto3.client('s3')NEWLINErekognition = boto3.client('rekognition')NEWLINENEWLINElogger = logging.getLogger()NEWLINElogger.setLevel(LOG_LEVEL)NEWLINENEWLINENEWLINEdef lambda_handler(event, context):NEWLINE logger.info('Invoked the IndexFace Lambda function.')NEWLINE bucket = event['Records'][0]['s3']['bucket']['name']NEWLINE key = event['Records'][0]['s3']['object']['key']NEWLINENEWLINE name = os.path.splitext(os.path.basename(key))[0]NEWLINENEWLINE # Register a face image to RekognitionNEWLINE logger.info('Register a face image to Rekognition.')NEWLINE response = rekognition.index_faces(NEWLINE Image={NEWLINE "S3Object": {NEWLINE "Bucket": bucket,NEWLINE "Name": keyNEWLINE }NEWLINE },NEWLINE CollectionId=COLLECTION_NAMENEWLINE )NEWLINENEWLINE if response['ResponseMetadata']['HTTPStatusCode'] != 200 or len(response['FaceRecords']) == 0:NEWLINE raise RuntimeError('Fail to register a face to Rekognition.')NEWLINENEWLINE faceId = response['FaceRecords'][0]['Face']['FaceId']NEWLINENEWLINE # Insert the face data to DynamoDBNEWLINE logger.info('Insert the face ID to the DynamoDB table.')NEWLINE try:NEWLINE response = dynamodb.put_item(NEWLINE TableName=DYNAMODB_TABLE_NAME,NEWLINE Item={NEWLINE 'RekognitionId': {'S': faceId},NEWLINE 'Name': {'S': name},NEWLINE }NEWLINE )NEWLINE except ClientError as err:NEWLINE rekognition.delete_faces(NEWLINE CollectionId=COLLECTION_NAME,NEWLINE FaceId=faceIdNEWLINE )NEWLINE raise errNEWLINENEWLINE # If the face image was registered successfully, delete the image from s3.NEWLINE s3.delete_object(Bucket=bucket, Key=key)NEWLINE logger.info('Registered a face image successfully.')NEWLINE |
# Copyright (C) 2013 Jaedyn K. DraperNEWLINE#NEWLINE# Permission is hereby granted, free of charge, to any person obtainingNEWLINE# a copy of this software and associated documentation files (the "Software"),NEWLINE# to deal in the Software without restriction, including without limitationNEWLINE# the rights to use, copy, modify, merge, publish, distribute, sublicense,NEWLINE# and/or sell copies of the Software, and to permit persons to whom theNEWLINE# Software is furnished to do so, subject to the following conditions:NEWLINE#NEWLINE# The above copyright notice and this permission notice shall be included inNEWLINE# all copies or substantial portions of the Software.NEWLINE#NEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINE# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINE# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALLNEWLINE# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINE# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINE# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THENEWLINE# SOFTWARE.NEWLINENEWLINE"""NEWLINE**Provides info about the terminal the makefile's being run in**NEWLINE"""NEWLINENEWLINEimport platformNEWLINEimport sysNEWLINENEWLINEif platform.system( ) == "Windows":NEWLINE import ctypesNEWLINE import structNEWLINEelse:NEWLINE import cursesNEWLINENEWLINENEWLINEclass TermColor( object ):NEWLINE """NEWLINE Abstracts color in a cross-platform way. Values and types will differ based on platform.NEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE DGREY = 0 | 8NEWLINE RED = 4 | 8NEWLINE GREEN = 2 | 8NEWLINE YELLOW = 2 | 4 | 8NEWLINE BLUE = 1 | 8NEWLINE MAGENTA = 1 | 4 | 8NEWLINE CYAN = 1 | 2 | 8NEWLINE WHITE = 1 | 2 | 4 | 8NEWLINE BLACK = 0NEWLINE DRED = 4NEWLINE DGREEN = 2NEWLINE DYELLOW = 2 | 4NEWLINE DBLUE = 1NEWLINE DMAGENTA = 1 | 4NEWLINE DCYAN = 1 | 2NEWLINE LGREY = 1 | 2 | 4NEWLINE else:NEWLINE DGREY = "1;30"NEWLINE RED = "1;31"NEWLINE GREEN = "1;32"NEWLINE YELLOW = "1;33"NEWLINE BLUE = "1;34"NEWLINE MAGENTA = "1;35"NEWLINE CYAN = "1;36"NEWLINE WHITE = "1;37"NEWLINE BLACK = "22;30"NEWLINE DRED = "22;31"NEWLINE DGREEN = "22;32"NEWLINE DYELLOW = "22;33"NEWLINE DBLUE = "22;34"NEWLINE DMAGENTA = "22;35"NEWLINE DCYAN = "22;36"NEWLINE LGREY = "22;37"NEWLINENEWLINENEWLINEclass TermInfo( object ):NEWLINE """NEWLINE Provides access to cross-platform methods of getting terminal info and interacting withNEWLINE colored output.NEWLINE """NEWLINE @staticmethodNEWLINE def ResetColor( ):NEWLINE """NEWLINE Reset the color of the terminal to its default valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE ctypes.windll.kernel32.SetConsoleTextAttribute( ctypes.windll.kernel32.GetStdHandle( -11 ), TermInfo._reset )NEWLINE else:NEWLINE sys.stdout.write( "\033[0m" )NEWLINENEWLINENEWLINE @staticmethodNEWLINE def SetColor( color ):NEWLINE """NEWLINE Set the color of the terminalNEWLINENEWLINE :param color: The desired colorNEWLINE :type color: TermColor valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE ctypes.windll.kernel32.SetConsoleTextAttribute( ctypes.windll.kernel32.GetStdHandle( -11 ), color )NEWLINE else:NEWLINE sys.stdout.write( "\033[{}m".format( color ) )NEWLINENEWLINENEWLINE @staticmethodNEWLINE def GetNumColumns( ):NEWLINE """NEWLINE Retrieve the current column count for this terminalNEWLINENEWLINE :return: Number of columnsNEWLINE :rtype: intNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE csbi = ctypes.create_string_buffer( 22 )NEWLINE res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo( ctypes.windll.kernel32.GetStdHandle( -11 ), csbi )NEWLINE if res:NEWLINE (bufx, bufy, curx, cury, wattr, left, top, right, bottom, maxx, maxy) = struct.unpack( "hhhhHhhhhhh", csbi.raw )NEWLINE return right - leftNEWLINE else:NEWLINE return 0NEWLINENEWLINE else:NEWLINE if TermInfo.cursesValid:NEWLINE return curses.tigetnum( 'cols' )NEWLINE else:NEWLINE return 0NEWLINENEWLINENEWLINE @staticmethodNEWLINE def SupportsColor( ):NEWLINE """NEWLINE Check whether the active terminal supports colors.NEWLINENEWLINE :return: Whether or not color is supportedNEWLINE :rtype: boolNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE return TermInfo._color_supportedNEWLINE else:NEWLINE if TermInfo.cursesValid:NEWLINE return (curses.tigetnum( "colors" ) >= 8)NEWLINE else:NEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def GetDefaultColor( ):NEWLINE """NEWLINE Get the default color for this terminalNEWLINENEWLINE :return: The default colorNEWLINE :rtype: TermColor valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE # Based on IPython's winconsole.py, written by Alexander BelchenkoNEWLINE import structNEWLINENEWLINE csbi = ctypes.create_string_buffer( 22 )NEWLINE res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo( ctypes.windll.kernel32.GetStdHandle( -11 ), csbi )NEWLINE assert resNEWLINENEWLINE (bufx, bufy, curx, cury, wattr,NEWLINE left, top, right, bottom, maxx, maxy) = struct.unpack( "hhhhHhhhhhh", csbi.raw )NEWLINE return wattrNEWLINE else:NEWLINE return "0"NEWLINENEWLINENEWLINEif platform.system( ) == "Windows":NEWLINE # -11 = STD_OUTPUT_HANDLENEWLINE try:NEWLINE #TermInfo._handle = ctypes.windll.kernel32.GetStdHandle( -11 )NEWLINE TermInfo._reset = TermInfo.GetDefaultColor( )NEWLINE except:NEWLINE TermInfo._color_supported = FalseNEWLINE else:NEWLINE TermInfo._color_supported = TrueNEWLINEelse:NEWLINE try:NEWLINE curses.setupterm( )NEWLINE except:NEWLINE TermInfo.cursesValid = FalseNEWLINE else:NEWLINE TermInfo.cursesValid = TrueNEWLINE |
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License").NEWLINE# You may not use this file except in compliance with the License.NEWLINE# A copy of the License is located atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# or in the "license" file accompanying this file. This file is distributedNEWLINE# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, eitherNEWLINE# express or implied. See the License for the specific language governingNEWLINE# permissions and limitations under the License.NEWLINEfrom typing import Dict, OptionalNEWLINEimport loggingNEWLINENEWLINEfrom syne_tune.optimizer.schedulers.searchers.gp_searcher_factory import (NEWLINE gp_multifidelity_searcher_factory,NEWLINE gp_multifidelity_searcher_defaults,NEWLINE)NEWLINEfrom syne_tune.optimizer.schedulers.searchers.utils.default_arguments import (NEWLINE check_and_merge_defaults,NEWLINE)NEWLINEfrom syne_tune.optimizer.schedulers.searchers.gp_fifo_searcher import (NEWLINE GPFIFOSearcher,NEWLINE decode_state,NEWLINE)NEWLINEfrom syne_tune.optimizer.schedulers.searchers.gp_searcher_utils import (NEWLINE ResourceForAcquisitionMap,NEWLINE)NEWLINEfrom syne_tune.optimizer.schedulers.searchers.bayesopt.datatypes.common import (NEWLINE PendingEvaluation,NEWLINE MetricValues,NEWLINE)NEWLINEfrom syne_tune.optimizer.schedulers.searchers.bayesopt.models.gpiss_model import (NEWLINE GaussProcAdditiveModelFactory,NEWLINE)NEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINE__all__ = ["GPMultiFidelitySearcher"]NEWLINENEWLINENEWLINEclass GPMultiFidelitySearcher(GPFIFOSearcher):NEWLINE """Gaussian process Bayesian optimization for Hyperband schedulerNEWLINENEWLINE This searcher must be used with `HyperbandScheduler`. It provides a novelNEWLINE combination of Bayesian optimization, based on a Gaussian process surrogateNEWLINE model, with Hyperband scheduling. In particular, observations acrossNEWLINE resource levels are modelled jointly. It is created along with theNEWLINE scheduler, using `searcher='bayesopt'`:NEWLINENEWLINE Most of `GPFIFOSearcher` comments apply here as well.NEWLINE In multi-fidelity HPO, we optimize a function f(x, r), x the configuration,NEWLINE r the resource (or time) attribute. The latter must be a positive integer.NEWLINE In most applications, `resource_attr` == 'epoch', and the resource is theNEWLINE number of epochs already trained.NEWLINENEWLINE We model the function f(x, r) jointly over all resource levels r at whichNEWLINE it is observed (but see `searcher_data` in `HyperbandScheduler`). The kernelNEWLINE and mean function of our surrogate model are over (x, r). The surrogateNEWLINE model is selected by `gp_resource_kernel`. More details about the supportedNEWLINE kernels is in:NEWLINENEWLINE Tiao, Klein, Lienart, Archambeau, Seeger (2020)NEWLINE Model-based Asynchronous Hyperparameter and Neural Architecture SearchNEWLINE https://arxiv.org/abs/2003.10865NEWLINENEWLINE The acquisition function (EI) which is optimized in `get_config`, is obtainedNEWLINE by fixing the resource level r to a value which is determined depending onNEWLINE the current state. If `resource_acq` == 'bohb', r is the largest valueNEWLINE <= max_t, where we have seen >= dimension(x) metric values. IfNEWLINE `resource_acq` == 'first', r is the first milestone which config x wouldNEWLINE reach when started.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE config_space : DictNEWLINE Configuration space. Constant parameters are filtered outNEWLINE metric : strNEWLINE Name of reward attribute reported by evaluation functionNEWLINE points_to_evaluate: List[Dict] or NoneNEWLINE List of configurations to be evaluated initially (in that order).NEWLINE Each config in the list can be partially specified, or even be anNEWLINE empty dict. For each hyperparameter not specified, the default valueNEWLINE is determined using a midpoint heuristic.NEWLINE If None (default), this is mapped to [dict()], a single default configNEWLINE determined by the midpoint heuristic. If [] (empty list), no initialNEWLINE configurations are specified.NEWLINE random_seed_generator : RandomSeedGenerator (optional)NEWLINE If given, the random_seed for `random_state` is obtained from there,NEWLINE otherwise `random_seed` is usedNEWLINE random_seed : int (optional)NEWLINE This is used if `random_seed_generator` is not given.NEWLINE resource_attr : strNEWLINE Name of resource attribute in reports, equal to `resource_attr` ofNEWLINE schedulerNEWLINE debug_log : bool (default: False)NEWLINE If True, both searcher and scheduler output an informative log, fromNEWLINE which the configs chosen and decisions being made can be traced.NEWLINE cost_attr : str (optional)NEWLINE Name of cost attribute in data obtained from reporter (e.g., elapsedNEWLINE training time). Needed only by cost-aware searchers.NEWLINE model : strNEWLINE Selects surrogate model (learning curve model) to be used. ChoicesNEWLINE are 'gp_multitask' (default), 'gp_issm', 'gp_expdecay'NEWLINE num_init_random : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE num_init_candidates : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE num_fantasy_samples : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE no_fantasizing : boolNEWLINE See :class:`GPFIFOSearcher`NEWLINE initial_scoring : strNEWLINE See :class:`GPFIFOSearcher`NEWLINE skip_local_optimization : strNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_nstarts : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_maxiter : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_warmstart : boolNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_verbose : boolNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_skip_init_length : intNEWLINE See :class:`GPFIFOSearcher`NEWLINE opt_skip_period : intNEWLINE See `:class:GPFIFOSearcher`NEWLINE map_reward : str or MapRewardNEWLINE See :class:`GPFIFOSearcher`NEWLINE gp_resource_kernel : strNEWLINE Only relevant for `model == 'gp_multitask'`.NEWLINE Surrogate model over criterion function f(x, r), x the config, r theNEWLINE resource. Note that x is encoded to be a vector with entries in [0, 1],NEWLINE and r is linearly mapped to [0, 1], while the criterion data isNEWLINE normalized to mean 0, variance 1. The reference above provides detailsNEWLINE on the models supported here. For the exponential decay kernel, theNEWLINE base kernel over x is Matern 5/2 ARD.NEWLINE Values are 'matern52' (Matern 5/2 ARD kernel over [x, r]),NEWLINE 'matern52-res-warp' (Matern 5/2 ARD kernel over [x, r], with additionalNEWLINE warping on r),NEWLINE 'exp-decay-sum' (exponential decay kernel, with delta=0. This is theNEWLINE additive kernel from Freeze-Thaw Bayesian Optimization),NEWLINE 'exp-decay-delta1' (exponential decay kernel, with delta=1),NEWLINE 'exp-decay-combined' (exponential decay kernel, with delta in [0, 1]NEWLINE a hyperparameter).NEWLINE resource_acq : strNEWLINE Only relevant for `model == 'gp_multitask'`.NEWLINE Determines how the EI acquisition function is used (see above).NEWLINE Values: 'bohb', 'first'NEWLINE opt_skip_num_max_resource : boolNEWLINE Parameter for hyperparameter fitting, skip predicate. If True, andNEWLINE number of observations above `opt_skip_init_length`, fitting is doneNEWLINE only when there is a new datapoint at r = max_t, and skipped otherwise.NEWLINE issm_gamma_one : boolNEWLINE Only relevant for `model == 'gp_issm'`.NEWLINE If True, the gamma parameter of the ISSM is fixed to 1, otherwise itNEWLINE is optimized over.NEWLINE expdecay_normalize_inputs : boolNEWLINE Only relevant for `model == 'gp_expdecay'`.NEWLINE If True, resource values r are normalized to [0, 1] as input to theNEWLINE exponential decay surrogate model.NEWLINENEWLINE See AlsoNEWLINE --------NEWLINE GPFIFOSearcherNEWLINE """NEWLINENEWLINE def _create_kwargs_int(self, kwargs):NEWLINE _kwargs = check_and_merge_defaults(NEWLINE kwargs, *gp_multifidelity_searcher_defaults(), dict_name="search_options"NEWLINE )NEWLINE kwargs_int = gp_multifidelity_searcher_factory(**_kwargs)NEWLINE self._copy_kwargs_to_kwargs_int(kwargs_int, kwargs)NEWLINE return kwargs_intNEWLINENEWLINE def _call_create_internal(self, kwargs_int):NEWLINE """NEWLINE Part of constructor which can be different in subclassesNEWLINE """NEWLINE k = "resource_for_acquisition"NEWLINE self.resource_for_acquisition = kwargs_int.get(k)NEWLINE if self.resource_for_acquisition is not None:NEWLINE kwargs_int.pop(k)NEWLINE assert isinstance(self.resource_for_acquisition, ResourceForAcquisitionMap)NEWLINE self.config_space_ext = kwargs_int.pop("config_space_ext")NEWLINE self._create_internal(**kwargs_int)NEWLINENEWLINE def configure_scheduler(self, scheduler):NEWLINE from syne_tune.optimizer.schedulers.hyperband import HyperbandSchedulerNEWLINENEWLINE super().configure_scheduler(scheduler)NEWLINE assert isinstance(NEWLINE scheduler, HyperbandSchedulerNEWLINE ), "This searcher requires HyperbandScheduler scheduler"NEWLINE self._resource_attr = scheduler._resource_attrNEWLINE model_factory = self.state_transformer.model_factoryNEWLINE if isinstance(model_factory, GaussProcAdditiveModelFactory):NEWLINE assert scheduler.searcher_data == "all", (NEWLINE "For an additive Gaussian learning curve model (model="NEWLINE + "'gp_issm' or model='gp_expdecay' in search_options), you "NEWLINE + "need to set searcher_data='all' when creating the "NEWLINE + "HyperbandScheduler"NEWLINE )NEWLINENEWLINE def _hp_ranges_in_state(self):NEWLINE return self.config_space_ext.hp_ranges_extNEWLINENEWLINE def _config_ext_update(self, config, result):NEWLINE resource = int(result[self._resource_attr])NEWLINE return self.config_space_ext.get(config, resource)NEWLINENEWLINE def _metric_val_update(self, crit_val: float, result: Dict) -> MetricValues:NEWLINE resource = result[self._resource_attr]NEWLINE return {str(resource): crit_val}NEWLINENEWLINE def _trial_id_string(self, trial_id: str, result: Dict):NEWLINE """NEWLINE For multi-fidelity, we also want to output the resource levelNEWLINE """NEWLINE return f"{trial_id}:{result[self._resource_attr]}"NEWLINENEWLINE def register_pending(NEWLINE self, trial_id: str, config: Optional[Dict] = None, milestone=NoneNEWLINE ):NEWLINE """NEWLINE Registers trial as pending for resource level `milestone`. This meansNEWLINE the corresponding evaluation task is running and should reach thatNEWLINE level later, when update is called for it.NEWLINENEWLINE """NEWLINE assert (NEWLINE milestone is not NoneNEWLINE ), "This searcher works with a multi-fidelity scheduler only"NEWLINE # It is OK for the candidate already to be registered as pending, inNEWLINE # which case we do nothingNEWLINE state = self.state_transformer.stateNEWLINE if not state.is_pending(trial_id, resource=milestone):NEWLINE assert not state.is_labeled(trial_id, resource=milestone), (NEWLINE f"Trial trial_id = {trial_id} already has observation at "NEWLINE + f"resource = {milestone}, so cannot be pending there"NEWLINE )NEWLINE self.state_transformer.append_trial(NEWLINE trial_id, config=config, resource=milestoneNEWLINE )NEWLINENEWLINE def _fix_resource_attribute(self, **kwargs):NEWLINE """NEWLINE Determines target resource level r at which the current call ofNEWLINE `get_config` operates. This is done based onNEWLINE `resource_for_acquisition`. This resource level is then set inNEWLINE `config_space_ext.hp_ranges_ext.value_for_last_pos`. This does theNEWLINE job for GP surrogate models. But if in subclasses, other surrogateNEWLINE models are involved, they need to get informed separately (seeNEWLINE :class:`CostAwareGPMultiFidelitySearcher` for an example).NEWLINENEWLINE :param kwargs:NEWLINE :return:NEWLINE """NEWLINE if self.resource_for_acquisition is not None:NEWLINE # Only have to do this for 'gp_multitask' modelNEWLINE state = self.state_transformer.stateNEWLINE # BO should only search over configs at resource levelNEWLINE # target_resourceNEWLINE if state.trials_evaluations:NEWLINE target_resource = self.resource_for_acquisition(state, **kwargs)NEWLINE else:NEWLINE # Any valid value works here:NEWLINE target_resource = self.config_space_ext.resource_attr_range[0]NEWLINE self.config_space_ext.hp_ranges_ext.value_for_last_pos = target_resourceNEWLINE if self.debug_log is not None:NEWLINE self.debug_log.append_extra(NEWLINE f"Score values computed at target_resource = {target_resource}"NEWLINE )NEWLINENEWLINE def _postprocess_config(self, config: dict) -> dict:NEWLINE # If `config` is normal (not extended), nothing is removedNEWLINE return self.config_space_ext.remove_resource(config)NEWLINENEWLINE def evaluation_failed(self, trial_id: str):NEWLINE # Remove all pending evaluations for trialNEWLINE self.cleanup_pending(trial_id)NEWLINE # Mark config as failed (which means it will not be suggested again)NEWLINE self.state_transformer.mark_trial_failed(trial_id)NEWLINENEWLINE def cleanup_pending(self, trial_id: str):NEWLINE """NEWLINE Removes all pending evaluations for a trial.NEWLINE This should be called after an evaluation terminates. For variousNEWLINE reasons (e.g., termination due to convergence), pending candidatesNEWLINE for this evaluation may still be present.NEWLINE It is also called for a failed evaluation.NEWLINENEWLINE """NEWLINENEWLINE def filter_pred(x: PendingEvaluation) -> bool:NEWLINE return x.trial_id == trial_idNEWLINENEWLINE self.state_transformer.filter_pending_evaluations(filter_pred)NEWLINENEWLINE def remove_case(self, trial_id: str, **kwargs):NEWLINE resource = kwargs[self._resource_attr]NEWLINE self.state_transformer.remove_observed_case(trial_id, key=str(resource))NEWLINENEWLINE def clone_from_state(self, state):NEWLINE # Create clone with mutable state taken from 'state'NEWLINE init_state = decode_state(state["state"], self._hp_ranges_in_state())NEWLINE skip_optimization = state["skip_optimization"]NEWLINE model_factory = self.state_transformer.model_factoryNEWLINE # Call internal constructorNEWLINE new_searcher = GPMultiFidelitySearcher(NEWLINE **self._new_searcher_kwargs_for_clone(),NEWLINE model_factory=model_factory,NEWLINE init_state=init_state,NEWLINE skip_optimization=skip_optimization,NEWLINE config_space_ext=self.config_space_ext,NEWLINE resource_for_acquisition=self.resource_for_acquisition,NEWLINE )NEWLINE new_searcher._restore_from_state(state)NEWLINE # Invalidate self (must not be used afterwards)NEWLINE self.state_transformer = NoneNEWLINE return new_searcherNEWLINE |
#! /usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINEfrom collections import OrderedDictNEWLINENEWLINEimport numpy as npNEWLINEfrom landlab import RasterModelGridNEWLINEfrom landlab.bmi.bmi_bridge import TimeStepperNEWLINENEWLINEfrom compaction.landlab import CompactNEWLINENEWLINEfrom .bathymetry import BathymetryReaderNEWLINEfrom .fluvial import FluvialNEWLINENEWLINE# from .raster_model import RasterModelNEWLINEfrom .input_reader import load_configNEWLINEfrom .output_writer import OutputWriterNEWLINEfrom .sea_level import SeaLevelTimeSeries, SinusoidalSeaLevelNEWLINEfrom .sediment_flexure import SedimentFlexureNEWLINEfrom .shoreline import ShorelineFinderNEWLINEfrom .submarine import SubmarineDiffuserNEWLINEfrom .subsidence import SubsidenceTimeSeriesNEWLINENEWLINENEWLINEclass SequenceModel:NEWLINENEWLINE DEFAULT_PARAMS = {NEWLINE "grid": {NEWLINE "shape": [3, 100],NEWLINE "xy_spacing": 100.0,NEWLINE "xy_of_lower_left": [0.0, 0.0],NEWLINE "bc": {"top": "closed", "bottom": "closed"},NEWLINE },NEWLINE "clock": {"start": 0.0, "stop": 20000.0, "step": 100.0},NEWLINE "output": {NEWLINE "interval": 10,NEWLINE "filepath": "sequence.nc",NEWLINE "clobber": True,NEWLINE "rows": [1],NEWLINE "fields": ["sediment_deposit__thickness"],NEWLINE },NEWLINE "submarine_diffusion": {NEWLINE "plain_slope": 0.0008,NEWLINE "wave_base": 60.0,NEWLINE "shoreface_height": 15.0,NEWLINE "alpha": 0.0005,NEWLINE "shelf_slope": 0.001,NEWLINE "sediment_load": 3.0,NEWLINE "load_sealevel": 0.0,NEWLINE "basin_width": 500000.0,NEWLINE },NEWLINE "sea_level": {NEWLINE "amplitude": 10.0,NEWLINE "wave_length": 1000.0,NEWLINE "phase": 0.0,NEWLINE "linear": 0.0,NEWLINE },NEWLINE "subsidence": {"filepath": "subsidence.csv"},NEWLINE "flexure": {"method": "flexure", "rho_mantle": 3300.0, "isostasytime": 0},NEWLINE "sediments": {NEWLINE "layers": 2,NEWLINE "sand": 1.0,NEWLINE "mud": 0.006,NEWLINE "sand_density": 2650.0,NEWLINE "mud_density": 2720.0,NEWLINE "sand_frac": 0.5,NEWLINE "hemipelagic": 0.0,NEWLINE },NEWLINE "bathymetry": {"filepath": "bathymetry.csv", "kind": "linear"},NEWLINE "compaction": {NEWLINE "c": 5.0e-08,NEWLINE "porosity_max": 0.5,NEWLINE "porosity_min": 0.01,NEWLINE "rho_grain": 2650.0,NEWLINE "rho_void": 1000.0,NEWLINE },NEWLINE }NEWLINENEWLINE LONG_NAME = {"z": "topographic__elevation", "z0": "bedrock_surface__elevation"}NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE grid=None,NEWLINE clock=None,NEWLINE output=None,NEWLINE submarine_diffusion=None,NEWLINE sea_level=None,NEWLINE subsidence=None,NEWLINE flexure=None,NEWLINE sediments=None,NEWLINE bathymetry=None,NEWLINE compaction=None,NEWLINE ):NEWLINE config = {NEWLINE "grid": grid,NEWLINE "clock": clock,NEWLINE "output": output,NEWLINE "submarine_diffusion": submarine_diffusion,NEWLINE "sea_level": sea_level,NEWLINE "subsidence": subsidence,NEWLINE "flexure": flexure,NEWLINE "sediments": sediments,NEWLINE "bathymetry": bathymetry,NEWLINE "compaction": compaction,NEWLINE }NEWLINE missing_kwds = [kwd for kwd, value in config.items() if value is None]NEWLINE if missing_kwds:NEWLINE raise ValueError(NEWLINE "missing required config parameters for SequenceModel ({0})".format(NEWLINE ", ".join(missing_kwds)NEWLINE )NEWLINE )NEWLINENEWLINE self._clock = TimeStepper(**clock)NEWLINE self._grid = RasterModelGrid.from_dict(grid)NEWLINENEWLINE self._components = OrderedDict()NEWLINE if output:NEWLINE self._output = OutputWriter(self._grid, **output)NEWLINE self._components["output"] = self._outputNEWLINENEWLINE BathymetryReader(self.grid, **bathymetry).run_one_step()NEWLINENEWLINE z = self.grid.at_node["topographic__elevation"]NEWLINE z0 = self.grid.add_empty("bedrock_surface__elevation", at="node")NEWLINE z0[:] = z - 100.0NEWLINENEWLINE self.grid.at_grid["x_of_shore"] = np.nanNEWLINE self.grid.at_grid["x_of_shelf_edge"] = np.nanNEWLINENEWLINE self.grid.event_layers.add(NEWLINE 100.0,NEWLINE age=self.clock.start,NEWLINE water_depth=-z0[self.grid.core_nodes],NEWLINE t0=10.0,NEWLINE percent_sand=0.5,NEWLINE porosity=0.5,NEWLINE )NEWLINENEWLINE if "filepath" in sea_level:NEWLINE self._sea_level = SeaLevelTimeSeries(NEWLINE self.grid, sea_level.pop("filepath"), start=clock["start"], **sea_levelNEWLINE )NEWLINE else:NEWLINE self._sea_level = SinusoidalSeaLevel(NEWLINE self.grid, start=clock["start"], **sea_levelNEWLINE )NEWLINENEWLINE self._subsidence = SubsidenceTimeSeries(self.grid, **subsidence)NEWLINENEWLINE self._submarine_diffusion = SubmarineDiffuser(self.grid, **submarine_diffusion)NEWLINE self._fluvial = Fluvial(NEWLINE self.grid,NEWLINE 0.5,NEWLINE start=0,NEWLINE sediment_load=submarine_diffusion["sediment_load"],NEWLINE plain_slope=submarine_diffusion["plain_slope"],NEWLINE hemipelagic=sediments["hemipelagic"],NEWLINE )NEWLINE self._flexure = SedimentFlexure(self.grid, **flexure)NEWLINE self._shoreline = ShorelineFinder(self.grid, alpha=submarine_diffusion["alpha"])NEWLINE self._compaction = Compact(self.grid, **compaction)NEWLINENEWLINE self._components.update(NEWLINE sea_level=self._sea_level,NEWLINE subsidence=self._subsidence,NEWLINE compaction=self._compaction,NEWLINE submarine_diffusion=self._submarine_diffusion,NEWLINE fluvial=self._fluvial,NEWLINE flexure=self._flexure,NEWLINE shoreline=self._shoreline,NEWLINE )NEWLINENEWLINE @propertyNEWLINE def grid(self):NEWLINE return self._gridNEWLINENEWLINE @propertyNEWLINE def clock(self):NEWLINE return self._clockNEWLINENEWLINE @classmethodNEWLINE def from_path(cls, filepath, fmt=None):NEWLINE return cls(**load_config(filepath, fmt=fmt))NEWLINENEWLINE def set_params(self, params):NEWLINE for component, values in params.items():NEWLINE c = self._components[component]NEWLINE for param, value in values.items():NEWLINE setattr(c, param, value)NEWLINENEWLINE def run_one_step(self, dt=None, output=None):NEWLINE """Run each component for one time step."""NEWLINE dt = dt or self.clock.stepNEWLINE self.clock.dt = dtNEWLINE self.clock.advance()NEWLINENEWLINE self.advance_components(dt)NEWLINENEWLINE def run(self, output=None):NEWLINE """Run the model until complete."""NEWLINE try:NEWLINE while 1:NEWLINE self.run_one_step()NEWLINE except StopIteration:NEWLINE passNEWLINENEWLINE def advance_components(self, dt):NEWLINE for component in self._components.values():NEWLINE component.run_one_step(dt)NEWLINENEWLINE dz = self.grid.at_node["sediment_deposit__thickness"]NEWLINE percent_sand = self.grid.at_node["delta_sediment_sand__volume_fraction"]NEWLINE water_depth = (NEWLINE self.grid.at_grid["sea_level__elevation"]NEWLINE - self.grid.at_node["topographic__elevation"]NEWLINE )NEWLINENEWLINE self.grid.event_layers.add(NEWLINE dz[self.grid.node_at_cell],NEWLINE age=self.clock.time,NEWLINE water_depth=water_depth[self.grid.node_at_cell],NEWLINE t0=dz[self.grid.node_at_cell].clip(0.0),NEWLINE percent_sand=percent_sand[self.grid.node_at_cell],NEWLINE porosity=self._compaction.porosity_max,NEWLINE )NEWLINENEWLINE try:NEWLINE self._n_archived_layersNEWLINE except AttributeError:NEWLINE self._n_archived_layers = 0NEWLINENEWLINE if (NEWLINE self.grid.event_layers.number_of_layers - self._n_archived_layersNEWLINE ) % 20 == 0:NEWLINE self.grid.event_layers.reduce(NEWLINE self._n_archived_layers,NEWLINE self._n_archived_layers + 10,NEWLINE age=np.max,NEWLINE percent_sand=np.mean,NEWLINE porosity=np.mean,NEWLINE t0=np.sum,NEWLINE water_depth=np.mean,NEWLINE )NEWLINE self._n_archived_layers += 1NEWLINE |
import operatorNEWLINEimport reNEWLINEfrom functools import reduceNEWLINENEWLINEfrom aocd import get_dataNEWLINENEWLINENEWLINEdef rotate_once(data):NEWLINE return [''.join(r) for r in zip(*data[::-1])]NEWLINENEWLINENEWLINEdef rotate(data, rotation):NEWLINE if rotation >= 4:NEWLINE data = [r[::-1] for r in data]NEWLINE rotation -= 4NEWLINE while rotation > 0:NEWLINE rotation -= 1NEWLINE data = rotate_once(data)NEWLINE return dataNEWLINENEWLINENEWLINEclass Tile:NEWLINE def __init__(self, id, data):NEWLINE self.id = idNEWLINE self.data = dataNEWLINENEWLINE def __repr__(self):NEWLINE return "<Tile: {}>".format(self.id)NEWLINENEWLINE @staticmethodNEWLINE def parse_input(s):NEWLINE s = s.splitlines()NEWLINE id = int(re.findall(r"\d+", s[0])[0])NEWLINE data = s[1:]NEWLINE return Tile(id, data)NEWLINENEWLINE def get_edges(self):NEWLINE yield self.data[0]NEWLINE yield self.data[-1]NEWLINE yield ''.join(r[0] for r in self.data)NEWLINE yield ''.join(r[-1] for r in self.data)NEWLINE yield self.data[0][::-1]NEWLINE yield self.data[-1][::-1]NEWLINE yield ''.join(r[0] for r in self.data)[::-1]NEWLINE yield ''.join(r[-1] for r in self.data)[::-1]NEWLINENEWLINE def get_data(self, rotation):NEWLINE data = self.data.copy()NEWLINE return rotate(data, rotation)NEWLINENEWLINE def get_edge(self, rotation, edge):NEWLINE data = self.get_data(rotation)NEWLINE if edge == 't':NEWLINE return data[0]NEWLINE elif edge == 'b':NEWLINE return data[-1]NEWLINE elif edge == 'l':NEWLINE return ''.join(r[0] for r in data)NEWLINE elif edge == 'r':NEWLINE return ''.join(r[-1] for r in data)NEWLINENEWLINENEWLINEdef part1(a):NEWLINE return reduce(operator.mul, (t.id for t in a if sum(NEWLINE any(e == e2 for t2 in a for e2 in t2.get_edges() if t2.id != t.id) for e in t.get_edges()) == 4))NEWLINENEWLINENEWLINEdef part2(a):NEWLINE tiles = {t.id: t for t in a}NEWLINENEWLINE # Create the grid with the idsNEWLINE neighbors = {t.id: {t2.id for t2 in a for e2 in t2.get_edges() for e in t.get_edges() if t2.id != t.id and e == e2}NEWLINE for t in a}NEWLINE grid = [[None for _ in range(12)] for _ in range(12)]NEWLINE # first cornerNEWLINE grid[0][0] = next(k for k, v in neighbors.items() if len(v) == 2)NEWLINE # first edge pieceNEWLINE grid[0][1] = neighbors[grid[0][0]].pop()NEWLINE neighbors[grid[0][1]].remove(grid[0][0])NEWLINE # first rowNEWLINE for i in range(2, 12):NEWLINE grid[0][i] = next(n for n in neighbors[grid[0][i - 1]] if len(neighbors[n]) <= 3)NEWLINE neighbors[grid[0][i - 1]].remove(grid[0][i])NEWLINE neighbors[grid[0][i]].remove(grid[0][i - 1])NEWLINE # rest of the gridNEWLINE for r in range(1, 12):NEWLINE for c in range(12):NEWLINE grid[r][c] = neighbors[grid[r - 1][c]].pop()NEWLINE neighbors[grid[r][c]].remove(grid[r - 1][c])NEWLINE if c != 0:NEWLINE neighbors[grid[r][c - 1]].remove(grid[r][c])NEWLINE neighbors[grid[r][c]].remove(grid[r][c - 1])NEWLINENEWLINE # Rotate and flip the tilesNEWLINE # first cornerNEWLINE rotations = {grid[0][0]: next(r for r in range(8) ifNEWLINE tiles[grid[0][0]].get_edge(r, 'r') in tiles[grid[0][1]].get_edges()NEWLINE and tiles[grid[0][0]].get_edge(r, 'b') in tiles[grid[1][0]].get_edges())}NEWLINE # first rowNEWLINE for i in range(1, 12):NEWLINE rotations[grid[0][i]] = next(r for r in range(8)NEWLINE if tiles[grid[0][i]].get_edge(r, 'l')NEWLINE == tiles[grid[0][i - 1]].get_edge(rotations[grid[0][i - 1]], 'r'))NEWLINE # rest of the gridNEWLINE for x in range(1, 12):NEWLINE for y in range(12):NEWLINE rotations[grid[x][y]] = next(r for r in range(8)NEWLINE if tiles[grid[x][y]].get_edge(r, 't')NEWLINE == tiles[grid[x - 1][y]].get_edge(rotations[grid[x - 1][y]], 'b'))NEWLINE # assemble pictureNEWLINE tile_data = [[tiles[t].get_data(rotations[t]) for t in row] for row in grid]NEWLINE picture = [''.join(tile_data[row // 8][column // 8][row % 8 + 1][column % 8 + 1]NEWLINE for column in range(12 * 8)) for row in range(12 * 8)]NEWLINE # count sea monstersNEWLINE sea_monster = [" # ",NEWLINE "# ## ## ###",NEWLINE " # # # # # # "]NEWLINENEWLINE def count_sea_monsters(picture):NEWLINE count = 0NEWLINE for r in range(len(picture)-len(sea_monster)+1):NEWLINE for c in range(len(picture[0])-len(sea_monster[0])+1):NEWLINE if all(sea_monster[i][j] == ' ' or picture[r+i][c+j] == '#'NEWLINE for i in range(len(sea_monster)) for j in range(len(sea_monster[0]))):NEWLINE count += 1NEWLINE return countNEWLINENEWLINE sea_monsters = max(count_sea_monsters(rotate(picture, r)) for r in range(8))NEWLINE return sum(r.count('#') for r in picture) - sea_monsters*sum(r.count('#') for r in sea_monster)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE data = get_data(day=20, year=2020)NEWLINE inp = [Tile.parse_input(s) for s in data.split('\n\n')]NEWLINE print(part1(inp))NEWLINE print(part2(inp))NEWLINE |
# coding: utf-8NEWLINENEWLINEimport copyNEWLINEimport jsonNEWLINEimport loggingNEWLINEimport osNEWLINEimport uuidNEWLINEfrom io import BytesIONEWLINEfrom unittest.mock import patchNEWLINENEWLINEfrom django.contrib.auth import get_user_modelNEWLINEfrom django.contrib.auth.models import PermissionNEWLINEfrom django.contrib.contenttypes.models import ContentTypeNEWLINEfrom django.test import override_settingsNEWLINEfrom django.urls import reverseNEWLINEfrom requests import codesNEWLINEfrom rest_framework.test import APITestCaseNEWLINENEWLINEfrom edd.rest.tests import EddApiTestCaseMixinNEWLINEfrom edd.utilities import JSONDecoderNEWLINEfrom main import models as edd_modelsNEWLINEfrom main.tests import factory as main_factoryNEWLINENEWLINEfrom . import factoryNEWLINEfrom .test_utils import CONTEXT_PATHNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINE_TEST_FILES_DIR = os.path.join(NEWLINE os.path.dirname(os.path.abspath(__file__)), "files", "generic_import"NEWLINE)NEWLINENEWLINE_FBA_UPLOAD_PAYLOAD = {NEWLINE "category": 4, # OD600NEWLINE "file_format": 5, # genericNEWLINE "protocol": 3, # OD600NEWLINE "x_units": 2, # hoursNEWLINE "y_units": 1, # n/aNEWLINE "compartment": edd_models.Measurement.Compartment.UNKNOWN,NEWLINE "mime_type": "",NEWLINE "uuid": "69827386-d5f9-41b9-81d2-8469d735ed56",NEWLINE}NEWLINENEWLINENEWLINEdef load_permissions(model, *codenames):NEWLINE ct = ContentType.objects.get_for_model(model)NEWLINE return list(Permission.objects.filter(content_type=ct, codename__in=codenames))NEWLINENEWLINENEWLINE# use example files as the basis for DB records created by the fixtureNEWLINE@override_settings(MEDIA_ROOT=_TEST_FILES_DIR)NEWLINEclass ImportPatchTests(EddApiTestCaseMixin, APITestCase):NEWLINE fixtures = ["edd_file_importer/import_models"]NEWLINENEWLINE @classmethodNEWLINE def setUpTestData(cls):NEWLINE super().setUpTestData()NEWLINENEWLINE # create a test user and give it permission to write to the studyNEWLINE User = get_user_model()NEWLINE cls.write_user = User.objects.create(username="study.writer.user")NEWLINE cls.unprivileged_user = User.objects.create(username="unprivileged_user")NEWLINENEWLINE cls.user_write_study = main_factory.StudyFactory(name="User-writeable study")NEWLINE permissions = cls.user_write_study.userpermission_setNEWLINE permissions.update_or_create(NEWLINE permission_type=edd_models.UserPermission.WRITE, user=cls.write_userNEWLINE )NEWLINE cls.url = reverse(NEWLINE "edd.rest:study-imports-detail",NEWLINE kwargs={"study_pk": cls.user_write_study.pk, "pk": 15},NEWLINE )NEWLINENEWLINE def test_modify_privileges(self):NEWLINE # TODO: eventually add more detail to permissions checks here. Requires a lot moreNEWLINE # complexity in the fixture, and we should be covered by more rigorous checks onNEWLINE # uploadsNEWLINENEWLINE # send the submit request to actually perform the importNEWLINE self.client.force_login(ImportPatchTests.unprivileged_user)NEWLINE response = self.client.patch(NEWLINE ImportPatchTests.url, data={"status": "Submitted"}, format="json"NEWLINE )NEWLINE self.assertEqual(response.status_code, codes.not_found)NEWLINENEWLINE def test_final_submit(self):NEWLINE """NEWLINE Does a simple test that submits a "Ready" import defined in the fixtureNEWLINE """NEWLINE # load expected Redis context data from fileNEWLINE with factory.load_test_file(CONTEXT_PATH) as file:NEWLINE context_str = file.read()NEWLINENEWLINE # mock the notification brokerNEWLINE with patch("edd_file_importer.tasks.RedisBroker") as MockNotify:NEWLINE notify = MockNotify.return_valueNEWLINENEWLINE # mock the import brokerNEWLINE with patch("edd_file_importer.rest.views.ImportBroker") as MockBroker:NEWLINE broker = MockBroker.return_valueNEWLINE broker.load_context.return_value = context_strNEWLINENEWLINE # mock the method that determines whether Celery code is called synchronously orNEWLINE # asynchronously. TODO: this is a stopgap for replacing the legacy import task,NEWLINE # after which we can just mock the backend task...ATM we're chaining together otherNEWLINE # tasks that complicate the mockingNEWLINENEWLINE # mock the method that executes the final celery chain to performs the importNEWLINE with patch("celery.chain.delay") as submit_import:NEWLINENEWLINE # send the request to actually submit the importNEWLINE self.client.force_login(self.write_user)NEWLINE response = self.client.patch(NEWLINE ImportPatchTests.url,NEWLINE data={"status": "Submitted"},NEWLINE format="json",NEWLINE )NEWLINENEWLINE self.assertEqual(response.status_code, codes.accepted)NEWLINENEWLINE # test that the task was calledNEWLINE import_uuid = uuid.UUID("f464cca6-7370-4526-9718-be3ea55fea42")NEWLINE submit_import.assert_called_once()NEWLINENEWLINE notify_msg = (NEWLINE 'Your import for file "FBA-OD-generic.xlsx" is submitted'NEWLINE )NEWLINE notify.notify.assert_called_once_with(NEWLINE notify_msg,NEWLINE tags=["import-status-update"],NEWLINE payload={"status": "Submitted", "pk": 15, "uuid": import_uuid},NEWLINE )NEWLINE broker.add_page.assert_not_called()NEWLINENEWLINENEWLINEclass ImportUploadTests(EddApiTestCaseMixin, APITestCase):NEWLINE """NEWLINE Sets of tests to exercise the import upload stepNEWLINE """NEWLINENEWLINE fixtures = ["edd/rest/study_permissions"]NEWLINENEWLINE @classmethodNEWLINE def setUpTestData(cls):NEWLINE super().setUpTestData()NEWLINENEWLINE # get models from the fixture for studies with varying permission levelsNEWLINE User = get_user_model()NEWLINE cls.superuser = User.objects.get(username="superuser")NEWLINE cls.staffuser = User.objects.get(username="staff.user")NEWLINE # not doing this in fixture because it requires knowing the IDs, which can vary per deployNEWLINE cls.staffuser.user_permissions.add(NEWLINE *load_permissions(NEWLINE edd_models.Study, "add_study", "change_study", "delete_study"NEWLINE )NEWLINE )NEWLINE cls.unprivileged_user = User.objects.get(username="unprivileged_user")NEWLINE cls.readonly_user = User.objects.get(username="study.reader.user")NEWLINE cls.write_user = User.objects.get(username="study.writer.user")NEWLINE cls.write_group_user = User.objects.get(username="study.writer.group.user")NEWLINENEWLINE # create another study with write permissions by only a single userNEWLINE cls.user_write_study = main_factory.StudyFactory(name="User-writeable study")NEWLINE permissions = cls.user_write_study.userpermission_setNEWLINE permissions.update_or_create(NEWLINE permission_type=edd_models.UserPermission.WRITE, user=cls.write_userNEWLINE )NEWLINENEWLINE def setUp(self):NEWLINE super().setUp()NEWLINENEWLINE def _upload_import_file(NEWLINE self,NEWLINE study_pk,NEWLINE file_path,NEWLINE form_data,NEWLINE user,NEWLINE exp_status=codes.accepted,NEWLINE initial_upload=True,NEWLINE ):NEWLINE upload = self._build_file_upload(file_path)NEWLINENEWLINE if user:NEWLINE self.client.force_login(user)NEWLINE else:NEWLINE self.client.logout()NEWLINENEWLINE # mock the celery task so we're testing just the viewNEWLINE with patch("edd_file_importer.tasks.process_import_file.delay") as mock_task:NEWLINENEWLINE # mock the cache so we can test writes to itNEWLINE with patch("edd_file_importer.tasks.RedisBroker") as MockNotify:NEWLINE notify = MockNotify.return_valueNEWLINE url = reverse(NEWLINE "edd.rest:study-imports-list", kwargs={"study_pk": study_pk}NEWLINE )NEWLINE response = self.client.post(NEWLINE url, data={"file": upload, **form_data}, format="multipart"NEWLINE )NEWLINENEWLINE # test the results of the synchronous upload requestNEWLINE self.assertEqual(response.status_code, exp_status)NEWLINE response_json = json.loads(response.content, cls=JSONDecoder)NEWLINENEWLINE # if upload was accepted, test that the file processing task was called asNEWLINE # expectedNEWLINE if response.status_code == codes.accepted:NEWLINE self.assertEqual(response_json["uuid"], form_data["uuid"])NEWLINE import_pk = response_json["pk"]NEWLINE requested_status = form_data.get("status", None)NEWLINE mock_task.assert_called_with(NEWLINE import_pk,NEWLINE user.pk,NEWLINE requested_status,NEWLINE initial_upload=initial_upload,NEWLINE )NEWLINE else:NEWLINE mock_task.assert_not_called()NEWLINE notify.notify.assert_not_called()NEWLINE return response_jsonNEWLINENEWLINE def _build_file_upload(self, file_path):NEWLINE with open(file_path, "rb") as fp:NEWLINE upload = BytesIO(fp.read())NEWLINE upload.name = os.path.basename(file_path) # get file name from pathNEWLINE upload.content_type = (NEWLINE "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"NEWLINE )NEWLINE return uploadNEWLINENEWLINE def test_upload_failure(self):NEWLINE """NEWLINE Tests that disallowed users aren't able to create an import on others' studiesNEWLINE """NEWLINE file_path = factory.test_file_path("generic_import", "FBA-OD-generic.xlsx")NEWLINE study_pk = self.user_write_study.pkNEWLINENEWLINE # use an unprivileged account to upload a file (should fail)NEWLINE disallowed_users = {NEWLINE None: study_pk,NEWLINE ImportUploadTests.unprivileged_user: study_pk,NEWLINE ImportUploadTests.readonly_user: study_pk,NEWLINE ImportUploadTests.staffuser: study_pk,NEWLINE }NEWLINE for user, study_pk in disallowed_users.items():NEWLINE exp_status = codes.not_found if user else codes.forbiddenNEWLINE self._upload_import_file(NEWLINE study_pk, file_path, _FBA_UPLOAD_PAYLOAD, user, exp_statusNEWLINE )NEWLINENEWLINE def test_upload_success(self):NEWLINE """NEWLINE Tests that allowed users are able to create an import on studies they have access toNEWLINE """NEWLINE file_path = factory.test_file_path("generic_import", "FBA-OD-generic.xlsx")NEWLINE allowed_users = {NEWLINE ImportUploadTests.write_group_user: 22, # group write studyNEWLINE ImportUploadTests.write_user: ImportUploadTests.user_write_study.pk,NEWLINE ImportUploadTests.superuser: ImportUploadTests.user_write_study.pk,NEWLINE ImportUploadTests.unprivileged_user: 21, # everyone write studyNEWLINE }NEWLINENEWLINE for user, study_pk in allowed_users.items():NEWLINE # create a new UUID for each import so they don't conflictNEWLINE payload = copy.copy(_FBA_UPLOAD_PAYLOAD)NEWLINE payload["uuid"] = str(uuid.uuid4())NEWLINENEWLINE self._upload_import_file(study_pk, file_path, payload, user, codes.accepted)NEWLINENEWLINE def test_categories(self):NEWLINE """NEWLINE Tests the categories returned by the rest back endNEWLINE """NEWLINE url = reverse("edd.rest:import_categories-list")NEWLINE self.client.force_login(ImportUploadTests.unprivileged_user)NEWLINE response = self.client.get(url, data={"ordering": "display_order"})NEWLINE self.assertEqual(response.status_code, codes.ok)NEWLINE actual = json.loads(response.content)NEWLINE with factory.load_test_file("import_categories.json") as file:NEWLINE expected = json.loads(file.read())NEWLINE self.assertEqual(expected, actual)NEWLINE |
#!/usr/bin/env python2NEWLINENEWLINE#NEWLINE# Distributed under the MIT/X11 software license, see the accompanyingNEWLINE# file COPYING or http://www.opensource.org/licenses/mit-license.php.NEWLINE#NEWLINENEWLINEfrom test_framework.test_framework import ComparisonTestFrameworkNEWLINEfrom test_framework.util import *NEWLINEfrom test_framework.comptool import TestManager, TestInstance, RejectResultNEWLINEfrom test_framework.blocktools import *NEWLINEimport timeNEWLINEfrom test_framework.key import CECKeyNEWLINEfrom test_framework.script import CScript, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSENEWLINENEWLINEclass PreviousSpendableOutput(object):NEWLINE def __init__(self, tx = CTransaction(), n = -1):NEWLINE self.tx = txNEWLINE self.n = n # the output we're spendingNEWLINENEWLINE'''NEWLINEThis reimplements tests from the zumyj/FullBlockTestGenerator usedNEWLINEby the pull-tester.NEWLINENEWLINEWe use the testing framework in which we expect a particular answer fromNEWLINEeach test.NEWLINE'''NEWLINENEWLINEclass FullBlockTest(ComparisonTestFramework):NEWLINENEWLINE ''' Can either run this test as 1 node with expected answers, or two and compare them. NEWLINE Change the "outcome" variable from each TestInstance object to only do the comparison. '''NEWLINE def __init__(self):NEWLINE self.num_nodes = 1NEWLINE self.block_heights = {}NEWLINE self.coinbase_key = CECKey()NEWLINE self.coinbase_key.set_secretbytes(b"horsebattery")NEWLINE self.coinbase_pubkey = self.coinbase_key.get_pubkey()NEWLINE self.block_time = int(time.time())+1NEWLINE self.tip = NoneNEWLINE self.blocks = {}NEWLINENEWLINE def run_test(self):NEWLINE test = TestManager(self, self.options.tmpdir)NEWLINE test.add_all_connections(self.nodes)NEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE sync_masternodes(self.nodes)NEWLINE test.run()NEWLINENEWLINE def add_transactions_to_block(self, block, tx_list):NEWLINE [ tx.rehash() for tx in tx_list ]NEWLINE block.vtx.extend(tx_list)NEWLINE block.hashMerkleRoot = block.calc_merkle_root()NEWLINE block.rehash()NEWLINE return blockNEWLINE NEWLINE # Create a block on top of self.tip, and advance self.tip to point to the new blockNEWLINE # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,NEWLINE # and rest will go to fees.NEWLINE def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):NEWLINE if self.tip == None:NEWLINE base_block_hash = self.genesis_hashNEWLINE else:NEWLINE base_block_hash = self.tip.sha256NEWLINE # First create the coinbaseNEWLINE height = self.block_heights[base_block_hash] + 1NEWLINE coinbase = create_coinbase(height, self.coinbase_pubkey)NEWLINE coinbase.vout[0].nValue += additional_coinbase_valueNEWLINE if (spend != None):NEWLINE coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to feesNEWLINE coinbase.rehash()NEWLINE block = create_block(base_block_hash, coinbase, self.block_time)NEWLINE if (spend != None):NEWLINE tx = CTransaction()NEWLINE tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), b"", 0xffffffff)) # no signature yetNEWLINE # This copies the java comparison tool testing behavior: the firstNEWLINE # txout has a garbage scriptPubKey, "to make sure we're notNEWLINE # pre-verifying too much" (?)NEWLINE tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))NEWLINE if script == None:NEWLINE tx.vout.append(CTxOut(1, CScript([OP_TRUE])))NEWLINE else:NEWLINE tx.vout.append(CTxOut(1, script))NEWLINE # Now sign it if necessaryNEWLINE scriptSig = b""NEWLINE scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)NEWLINE if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spendNEWLINE scriptSig = CScript([OP_TRUE])NEWLINE else:NEWLINE # We have to actually sign itNEWLINE (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)NEWLINE scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])NEWLINE tx.vin[0].scriptSig = scriptSigNEWLINE # Now add the transaction to the blockNEWLINE block = self.add_transactions_to_block(block, [tx])NEWLINE block.solve()NEWLINE self.tip = blockNEWLINE self.block_heights[block.sha256] = heightNEWLINE self.block_time += 1NEWLINE assert number not in self.blocksNEWLINE self.blocks[number] = blockNEWLINE return blockNEWLINENEWLINE def get_tests(self):NEWLINE self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)NEWLINE self.block_heights[self.genesis_hash] = 0NEWLINE spendable_outputs = []NEWLINENEWLINE # save the current tip so it can be spent by a later blockNEWLINE def save_spendable_output():NEWLINE spendable_outputs.append(self.tip)NEWLINENEWLINE # get an output that we previous marked as spendableNEWLINE def get_spendable_output():NEWLINE return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)NEWLINENEWLINE # returns a test case that asserts that the current tip was acceptedNEWLINE def accepted():NEWLINE return TestInstance([[self.tip, True]])NEWLINENEWLINE # returns a test case that asserts that the current tip was rejectedNEWLINE def rejected(reject = None):NEWLINE if reject is None:NEWLINE return TestInstance([[self.tip, False]])NEWLINE else:NEWLINE return TestInstance([[self.tip, reject]])NEWLINE NEWLINE # move the tip back to a previous blockNEWLINE def tip(number):NEWLINE self.tip = self.blocks[number]NEWLINENEWLINE # add transactions to a block produced by next_blockNEWLINE def update_block(block_number, new_transactions):NEWLINE block = self.blocks[block_number]NEWLINE old_hash = block.sha256NEWLINE self.add_transactions_to_block(block, new_transactions)NEWLINE block.solve()NEWLINE # Update the internal state just like in next_blockNEWLINE self.tip = blockNEWLINE self.block_heights[block.sha256] = self.block_heights[old_hash]NEWLINE del self.block_heights[old_hash]NEWLINE self.blocks[block_number] = blockNEWLINE return blockNEWLINENEWLINE # creates a new block and advances the tip to that blockNEWLINE block = self.next_blockNEWLINENEWLINENEWLINE # Create a new blockNEWLINE block(0)NEWLINE save_spendable_output()NEWLINE yield accepted()NEWLINENEWLINENEWLINE # Now we need that block to mature so we can spend the coinbase.NEWLINE test = TestInstance(sync_every_block=False)NEWLINE for i in range(99):NEWLINE block(1000 + i)NEWLINE test.blocks_and_transactions.append([self.tip, True])NEWLINE save_spendable_output()NEWLINE yield testNEWLINENEWLINENEWLINE # Start by building a couple of blocks on top (which output is spent isNEWLINE # in parentheses):NEWLINE # genesis -> b1 (0) -> b2 (1)NEWLINE out0 = get_spendable_output()NEWLINE block(1, spend=out0)NEWLINE save_spendable_output()NEWLINE yield accepted()NEWLINENEWLINE out1 = get_spendable_output()NEWLINE b2 = block(2, spend=out1)NEWLINE yield accepted()NEWLINENEWLINENEWLINE # so fork like this:NEWLINE # NEWLINE # genesis -> b1 (0) -> b2 (1)NEWLINE # \-> b3 (1)NEWLINE # NEWLINE # Nothing should happen at this point. We saw b2 first so it takes priority.NEWLINE tip(1)NEWLINE b3 = block(3, spend=out1)NEWLINE txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)NEWLINE yield rejected()NEWLINENEWLINENEWLINE # Now we add another block to make the alternative chain longer.NEWLINE # NEWLINE # genesis -> b1 (0) -> b2 (1)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE out2 = get_spendable_output()NEWLINE block(4, spend=out2)NEWLINE yield accepted()NEWLINENEWLINENEWLINE # ... and back to the first chain.NEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(2)NEWLINE block(5, spend=out2)NEWLINE save_spendable_output()NEWLINE yield rejected()NEWLINENEWLINE out3 = get_spendable_output()NEWLINE block(6, spend=out3)NEWLINE yield accepted()NEWLINENEWLINENEWLINE # Try to create a fork that double-spendsNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b7 (2) -> b8 (4)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(5)NEWLINE block(7, spend=out2)NEWLINE yield rejected()NEWLINENEWLINE out4 = get_spendable_output()NEWLINE block(8, spend=out4)NEWLINE yield rejected()NEWLINENEWLINENEWLINE # Try to create a block that has too much feeNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b9 (4)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(6)NEWLINE block(9, spend=out4, additional_coinbase_value=1)NEWLINE yield rejected(RejectResult(16, b'bad-cb-amount'))NEWLINENEWLINE NEWLINE # Create a fork that ends in a block with too much fee (the one that causes the reorg)NEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b10 (3) -> b11 (4)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(5)NEWLINE block(10, spend=out3)NEWLINE yield rejected()NEWLINENEWLINE block(11, spend=out4, additional_coinbase_value=1)NEWLINE yield rejected(RejectResult(16, b'bad-cb-amount'))NEWLINENEWLINENEWLINE # Try again, but with a valid fork firstNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b14 (5)NEWLINE # (b12 added last)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(5)NEWLINE b12 = block(12, spend=out3)NEWLINE save_spendable_output()NEWLINE #yield TestInstance([[b12, False]])NEWLINE b13 = block(13, spend=out4)NEWLINE # Deliver the block header for b12, and the block b13.NEWLINE # b13 should be accepted but the tip won't advance until b12 is delivered.NEWLINE yield TestInstance([[CBlockHeader(b12), None], [b13, False]])NEWLINENEWLINE save_spendable_output()NEWLINE out5 = get_spendable_output()NEWLINE # b14 is invalid, but the node won't know that until it tries to connectNEWLINE # Tip still can't advance because b12 is missingNEWLINE block(14, spend=out5, additional_coinbase_value=1)NEWLINE yield rejected()NEWLINENEWLINE yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.NEWLINENEWLINE # Add a block with MAX_BLOCK_SIGOPS and one with one more sigopNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE NEWLINE # Test that a block with a lot of checksigs is okayNEWLINE lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50 - 1))NEWLINE tip(13)NEWLINE block(15, spend=out5, script=lots_of_checksigs)NEWLINE yield accepted()NEWLINENEWLINENEWLINE # Test that a block with too many checksigs is rejectedNEWLINE out6 = get_spendable_output()NEWLINE too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 // 50))NEWLINE block(16, spend=out6, script=too_many_checksigs)NEWLINE yield rejected(RejectResult(16, b'bad-blk-sigops'))NEWLINENEWLINENEWLINE # Attempt to spend a transaction created on a different forkNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(15)NEWLINE block(17, spend=txout_b3)NEWLINE yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))NEWLINENEWLINE # Attempt to spend a transaction created on a different fork (on a fork this time)NEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5)NEWLINE # \-> b18 (b3.vtx[1]) -> b19 (6)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(13)NEWLINE block(18, spend=txout_b3)NEWLINE yield rejected()NEWLINENEWLINE block(19, spend=out6)NEWLINE yield rejected()NEWLINENEWLINE # Attempt to spend a coinbase at depth too lowNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(15)NEWLINE out7 = get_spendable_output()NEWLINE block(20, spend=out7)NEWLINE yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))NEWLINENEWLINE # Attempt to spend a coinbase at depth too low (on a fork this time)NEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5)NEWLINE # \-> b21 (6) -> b22 (5)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(13)NEWLINE block(21, spend=out6)NEWLINE yield rejected()NEWLINENEWLINE block(22, spend=out5)NEWLINE yield rejected()NEWLINENEWLINE # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejectedNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)NEWLINE # \-> b24 (6) -> b25 (7)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(15)NEWLINE b23 = block(23, spend=out6)NEWLINE old_hash = b23.sha256NEWLINE tx = CTransaction()NEWLINE script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69NEWLINE script_output = CScript([b'\x00' * script_length])NEWLINE tx.vout.append(CTxOut(0, script_output))NEWLINE tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))NEWLINE b23 = update_block(23, [tx])NEWLINE # Make sure the math above worked out to produce a max-sized blockNEWLINE assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)NEWLINE yield accepted()NEWLINENEWLINE # Make the next block one byte bigger and check that it failsNEWLINE tip(15)NEWLINE b24 = block(24, spend=out6)NEWLINE script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69NEWLINE script_output = CScript([b'\x00' * (script_length+1)])NEWLINE tx.vout = [CTxOut(0, script_output)]NEWLINE b24 = update_block(24, [tx])NEWLINE assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)NEWLINE yield rejected(RejectResult(16, b'bad-blk-length'))NEWLINENEWLINE b25 = block(25, spend=out7)NEWLINE yield rejected()NEWLINENEWLINE # Create blocks with a coinbase input script size out of rangeNEWLINE # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)NEWLINE # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)NEWLINE # \-> ... (6) -> ... (7)NEWLINE # \-> b3 (1) -> b4 (2)NEWLINE tip(15)NEWLINE b26 = block(26, spend=out6)NEWLINE b26.vtx[0].vin[0].scriptSig = b'\x00'NEWLINE b26.vtx[0].rehash()NEWLINE # update_block causes the merkle root to get updated, even with no newNEWLINE # transactions, and updates the required state.NEWLINE b26 = update_block(26, [])NEWLINE yield rejected(RejectResult(16, b'bad-cb-length'))NEWLINENEWLINE # Extend the b26 chain to make sure zumyd isn't accepting b26NEWLINE b27 = block(27, spend=out7)NEWLINE yield rejected()NEWLINENEWLINE # Now try a too-large-coinbase scriptNEWLINE tip(15)NEWLINE b28 = block(28, spend=out6)NEWLINE b28.vtx[0].vin[0].scriptSig = b'\x00' * 101NEWLINE b28.vtx[0].rehash()NEWLINE b28 = update_block(28, [])NEWLINE yield rejected(RejectResult(16, b'bad-cb-length'))NEWLINENEWLINE # Extend the b28 chain to make sure zumyd isn't accepted b28NEWLINE b29 = block(29, spend=out7)NEWLINE # TODO: Should get a reject message back with "bad-prevblk", exceptNEWLINE # there's a bug that prevents this from being detected. Just noteNEWLINE # failure for now, and add the reject result later.NEWLINE yield rejected()NEWLINENEWLINE # b30 has a max-sized coinbase scriptSig.NEWLINE tip(23)NEWLINE b30 = block(30)NEWLINE b30.vtx[0].vin[0].scriptSig = b'\x00' * 100NEWLINE b30.vtx[0].rehash()NEWLINE b30 = update_block(30, [])NEWLINE yield accepted()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE FullBlockTest().main()NEWLINE |
# Copyright 1996-2021 Cyberbotics Ltd.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Webots DistanceSensor device wrapper for ROS2."""NEWLINENEWLINEfrom rclpy.qos import qos_profile_sensor_dataNEWLINEfrom sensor_msgs.msg import RangeNEWLINEfrom webots_ros2_core.math.interpolation import interpolate_lookup_tableNEWLINEfrom .sensor_device import SensorDeviceNEWLINENEWLINENEWLINEclass DistanceSensorDevice(SensorDevice):NEWLINE """NEWLINE ROS2 wrapper for Webots DistanceSensor node.NEWLINENEWLINE Creates suitable ROS2 interface based on Webots [DistanceSensor](https://cyberbotics.com/doc/reference/distancesensor) node.NEWLINENEWLINE It allows the following functinalities:NEWLINE - Publishes range measurements of type `sensor_msgs/Range`NEWLINENEWLINE Args:NEWLINE node (WebotsNode): The ROS2 node.NEWLINE device_key (str): Unique identifier of the device used for configuration.NEWLINE wb_device (DistanceSensor): Webots node of type DistanceSensor.NEWLINENEWLINE Kwargs:NEWLINE params (dict): Inherited from `SensorDevice`NEWLINENEWLINE """NEWLINENEWLINE def __init__(self, node, device_key, wb_device, params=None):NEWLINE super().__init__(node, device_key, wb_device, params)NEWLINE self._publisher = NoneNEWLINE self._min_range = self.__get_min_value() + self.__get_lower_std()NEWLINE self._max_range = self.__get_max_value() - self.__get_upper_std()NEWLINENEWLINE # Create topicsNEWLINE if not self._disable:NEWLINE self._publisher = self._node.create_publisher(Range, self._topic_name,NEWLINE qos_profile_sensor_data)NEWLINENEWLINE def __get_max_value(self):NEWLINE table = self._wb_device.getLookupTable()NEWLINE return max(table[0], table[-3])NEWLINENEWLINE def __get_min_value(self):NEWLINE table = self._wb_device.getLookupTable()NEWLINE return min(table[0], table[-3])NEWLINENEWLINE def __get_lower_std(self):NEWLINE table = self._wb_device.getLookupTable()NEWLINE if table[0] < table[-3]:NEWLINE return table[2] * table[0]NEWLINE return table[-1] * table[-3]NEWLINENEWLINE def __get_upper_std(self):NEWLINE table = self._wb_device.getLookupTable()NEWLINE if table[0] > table[-3]:NEWLINE return table[2] * table[0]NEWLINE return table[-1] * table[-3]NEWLINENEWLINE def step(self):NEWLINE stamp = super().step()NEWLINE if not stamp:NEWLINE returnNEWLINENEWLINE # Publish distance sensor dataNEWLINE if self._publisher.get_subscription_count() > 0 or self._always_publish:NEWLINE self._wb_device.enable(self._timestep)NEWLINE msg = Range()NEWLINE msg.header.stamp = stampNEWLINE msg.header.frame_id = self._frame_idNEWLINE msg.field_of_view = self._wb_device.getAperture()NEWLINE msg.min_range = self._min_rangeNEWLINE msg.max_range = self._max_rangeNEWLINE msg.range = interpolate_lookup_table(self._wb_device.getValue(), self._wb_device.getLookupTable())NEWLINE msg.radiation_type = Range.INFRAREDNEWLINE self._publisher.publish(msg)NEWLINE else:NEWLINE self._wb_device.disable()NEWLINE |
# _*_ coding: utf-8 _*_NEWLINE"""NEWLINE Swagger 配置NEWLINE"""NEWLINEimport osNEWLINEfrom collections import namedtupleNEWLINENEWLINEVERSION = "0.1.0" # 项目版本NEWLINENEWLINE# is_dev_mode = os.path.exists('app/config/dev_setting.py') # 'development' & 'product' (开发环境 or 生产环境)NEWLINENEWLINEis_dev_mode = TrueNEWLINENEWLINEEXTERNAL_URL = '182.92.242.32:8020' # 外部(云服务器)地址NEWLINEINTERNAL_URL = '0.0.0.0:8020' # 内部(本地)地址NEWLINESERVER_URL = INTERNAL_URL if is_dev_mode else EXTERNAL_URLNEWLINENEWLINEEXTERNAL_SCHEMES = ["https", "http"] # 外部(云服务器)支持 https 和 http 协议NEWLINEINTERNAL_SCHEMES = ["http"] # 内部只支持httpNEWLINESERVER_SCHEMES = INTERNAL_SCHEMES if is_dev_mode else EXTERNAL_SCHEMESNEWLINENEWLINESWAGGER_TAGS = [] # 在'/app/api/__init__.py'中create_blueprint_list设置NEWLINESWAGGER = {NEWLINE "swagger_version": "2.0",NEWLINE "info": {NEWLINE "title": "金峰项目: API文档",NEWLINE "version": VERSION,NEWLINE "description": "描述暂无",NEWLINE "contact": {NEWLINE "responsibleOrganization": "TaleCeres",NEWLINE "responsibleDeveloper": "董冬伟",NEWLINE "email": "[email protected]",NEWLINE "url": "http://51anquan.com"NEWLINE },NEWLINE "termsOfService": "http://51anquan.com"NEWLINE },NEWLINE "host": SERVER_URL, # "api.ivinetrue.com",NEWLINE "basePath": "/", # base bash for blueprint registrationNEWLINE "tags": SWAGGER_TAGS, # 在'/app/api/v1/__init__.py'定义NEWLINE "schemes": SERVER_SCHEMES,NEWLINE "operationId": "getmyData",NEWLINE "securityDefinitions": {NEWLINE 'basicAuth': {NEWLINE 'type': 'basic'NEWLINE }NEWLINE }NEWLINE}NEWLINENEWLINE# SWAGGER的安全访问方式NEWLINEspecs_security = [NEWLINE {NEWLINE "basicAuth": []NEWLINE }NEWLINE]NEWLINENEWLINE# all api by module(version)NEWLINE# 可以控制Swagger API文档的显示顺序NEWLINEALL_RP_API_LIST= \NEWLINE ['cms.admin', 'cms.group', 'cms.auth',NEWLINE 'cms.user', 'cms.cdkey', 'cms.agent', 'cms.company',NEWLINE 'cms.project', 'cms.device_category', 'cms.device'] +\NEWLINE ['v1.token', 'v1.user', 'v1.cdkey', 'v1.device', 'v1.project', 'v1.alarm', 'v1.device', 'v1.job', 'v1.statement']NEWLINENEWLINE# 所有endpoint的meta信息NEWLINEEP_META = {}NEWLINEEP_INFO_LIST = []NEWLINEEP_INFOS = {}NEWLINENEWLINE# 权限组(必须存在于数据库, 项目启动后自动导入)NEWLINEGroup = namedtuple('group', ['name', 'info', 'id'])NEWLINEAUTH_GROUPS = {NEWLINE # System 系统(金峰)NEWLINE # 'SYS_SUPER': Group('系统超级管理员', '', ''),NEWLINE 'SYS_ADMIN': Group('系统管理员', '', ''),NEWLINE # Company 企业NEWLINE 'CO_SUPER': Group('企业超级管理员', '', ''),NEWLINE 'CO_ADMIN': Group('企业管理员', '', ''),NEWLINE 'CO_PROJECT': Group('项目管理员', '', ''),NEWLINE 'CO_OPERATE': Group('运维管理员', '', ''),NEWLINE 'CO_USER': Group('普通员工', '', ''),NEWLINE # Agent 代理商NEWLINE 'AGENT': Group('代理商', '', ''),NEWLINE # Guest 访客NEWLINE 'GUEST': Group('访客', '', '')NEWLINE}NEWLINENEWLINE# tokenNEWLINEtmp_token = 'eyJhbGciOiJIUzUxMiIsImlhdCI6MTU4Mzk3NjE5NCwiZXhwIjoxNTg2NTY4MTk0fQ.eyJ1aWQiOiI1ZTY4NDQ4YTQ1YjY5YzdiNzc5MGIyYzYiLCJ0eXBlIjoxMDEsInNjb3BlIjoiU3lzU3VwZXJTY29wZSJ9.BM487QjEFINNKxrTgcd0YDoVvLuFJpVBjTlc3smzQ1wm1amSGYU1EaiLearM5SKtQEiugdWil03Wnj9H5Rkclw'NEWLINENEWLINEfrom app.libs.schedule_task import per_hour_statistics, per_day_statisticsNEWLINENEWLINEJOBS = [NEWLINE {NEWLINE "id": "per_hour_statistics",NEWLINE "func": per_hour_statistics,NEWLINE "trigger": {NEWLINE "type": "cron",NEWLINE "hour": "*"NEWLINE },NEWLINE "replace_existing": TrueNEWLINE },NEWLINE {NEWLINE "id": "per_day_statistics",NEWLINE "func": per_day_statistics,NEWLINE "trigger": {NEWLINE "type": "cron",NEWLINE "day": "*"NEWLINE },NEWLINE "replace_existing": TrueNEWLINE }NEWLINE]NEWLINE |
# Copyright 2015 The LUCI Authors. All rights reserved.NEWLINE# Use of this source code is governed under the Apache License, Version 2.0NEWLINE# that can be found in the LICENSE file.NEWLINENEWLINE"""OS abstraction OS specific utility functions."""NEWLINENEWLINE# pylint: disable=unnecessary-lambdaNEWLINENEWLINEimport sysNEWLINENEWLINEimport sixNEWLINENEWLINEif sys.platform == 'cygwin':NEWLINE from api.platforms import gceNEWLINE from api.platforms import posixNEWLINE from api.platforms import winNEWLINE is_gce = lambda: gce.is_gce() # to reuse gce.is_gce mock, if anyNEWLINENEWLINEif sys.platform == 'darwin':NEWLINE from api.platforms import osxNEWLINE from api.platforms import posixNEWLINE is_gce = lambda: FalseNEWLINENEWLINENEWLINEif sys.platform == 'win32':NEWLINE from api.platforms import gceNEWLINE from api.platforms import winNEWLINE is_gce = lambda: gce.is_gce() # to reuse gce.is_gce mock, if anyNEWLINENEWLINENEWLINEif sys.platform.startswith('linux'):NEWLINE try:NEWLINE from api.platforms import androidNEWLINE except OSError:NEWLINE logging.warning('failed to import android', exc_info=True)NEWLINE android = NoneNEWLINE from api.platforms import gceNEWLINE from api.platforms import linuxNEWLINE from api.platforms import posixNEWLINE is_gce = lambda: gce.is_gce() # to reuse gce.is_gce mock, if anyNEWLINE |
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE#NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code isNEWLINE# regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom .task_step_properties import TaskStepPropertiesNEWLINENEWLINENEWLINEclass DockerBuildStep(TaskStepProperties):NEWLINE """The Docker build step.NEWLINENEWLINE Variables are only populated by the server, and will be ignored whenNEWLINE sending a request.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :ivar base_image_dependencies: List of base image dependencies for a step.NEWLINE :vartype base_image_dependencies:NEWLINE list[~azure.mgmt.containerregistry.v2019_04_01.models.BaseImageDependency]NEWLINE :param context_path: The URL(absolute or relative) of the source contextNEWLINE for the task step.NEWLINE :type context_path: strNEWLINE :param context_access_token: The token (git PAT or SAS token of storageNEWLINE account blob) associated with the context for a step.NEWLINE :type context_access_token: strNEWLINE :param type: Required. Constant filled by server.NEWLINE :type type: strNEWLINE :param image_names: The fully qualified image names including theNEWLINE repository and tag.NEWLINE :type image_names: list[str]NEWLINE :param is_push_enabled: The value of this property indicates whether theNEWLINE image built should be pushed to the registry or not. Default value: True .NEWLINE :type is_push_enabled: boolNEWLINE :param no_cache: The value of this property indicates whether the imageNEWLINE cache is enabled or not. Default value: False .NEWLINE :type no_cache: boolNEWLINE :param docker_file_path: Required. The Docker file path relative to theNEWLINE source context.NEWLINE :type docker_file_path: strNEWLINE :param target: The name of the target build stage for the docker build.NEWLINE :type target: strNEWLINE :param arguments: The collection of override arguments to be used whenNEWLINE executing this build step.NEWLINE :type arguments:NEWLINE list[~azure.mgmt.containerregistry.v2019_04_01.models.Argument]NEWLINE """NEWLINENEWLINE _validation = {NEWLINE 'base_image_dependencies': {'readonly': True},NEWLINE 'type': {'required': True},NEWLINE 'docker_file_path': {'required': True},NEWLINE }NEWLINENEWLINE _attribute_map = {NEWLINE 'base_image_dependencies': {'key': 'baseImageDependencies', 'type': '[BaseImageDependency]'},NEWLINE 'context_path': {'key': 'contextPath', 'type': 'str'},NEWLINE 'context_access_token': {'key': 'contextAccessToken', 'type': 'str'},NEWLINE 'type': {'key': 'type', 'type': 'str'},NEWLINE 'image_names': {'key': 'imageNames', 'type': '[str]'},NEWLINE 'is_push_enabled': {'key': 'isPushEnabled', 'type': 'bool'},NEWLINE 'no_cache': {'key': 'noCache', 'type': 'bool'},NEWLINE 'docker_file_path': {'key': 'dockerFilePath', 'type': 'str'},NEWLINE 'target': {'key': 'target', 'type': 'str'},NEWLINE 'arguments': {'key': 'arguments', 'type': '[Argument]'},NEWLINE }NEWLINENEWLINE def __init__(self, **kwargs):NEWLINE super(DockerBuildStep, self).__init__(**kwargs)NEWLINE self.image_names = kwargs.get('image_names', None)NEWLINE self.is_push_enabled = kwargs.get('is_push_enabled', True)NEWLINE self.no_cache = kwargs.get('no_cache', False)NEWLINE self.docker_file_path = kwargs.get('docker_file_path', None)NEWLINE self.target = kwargs.get('target', None)NEWLINE self.arguments = kwargs.get('arguments', None)NEWLINE self.type = 'Docker'NEWLINE |
import osNEWLINEfrom typing import OptionalNEWLINENEWLINEimport numpy as npNEWLINEimport pytorch_lightning as plNEWLINENEWLINEimport torchNEWLINEimport torchvision.transforms as TNEWLINEfrom torch.utils.data import DataLoader, TensorDatasetNEWLINEfrom torchvision import datasetsNEWLINEfrom torchvision.datasets.utils import download_and_extract_archiveNEWLINENEWLINEfrom . import utilsNEWLINENEWLINENEWLINEclass TransformedTensorDataset(TensorDataset):NEWLINE def __init__(self, *tensors: torch.Tensor, transform=None) -> None:NEWLINE super().__init__(*tensors)NEWLINE self.transform = transformNEWLINENEWLINE def __getitem__(self, index):NEWLINE if self.transform is not None:NEWLINE image, label = super().__getitem__(index)NEWLINE return self.transform(image), labelNEWLINE else:NEWLINE return super().__getitem__(index)[email protected]_dataset(name='cifar10')NEWLINEclass CIFAR10DataModule(pl.LightningDataModule):NEWLINE def __init__(NEWLINE self, data_dir: str = './', train_batch_size=128,NEWLINE test_batch_size=256, num_workers=4, pin_memory=True,NEWLINE ):NEWLINE NEWLINE super().__init__()NEWLINE self.data_dir = data_dirNEWLINE self.mean = [0.4914, 0.4822, 0.4465]NEWLINE self.std = [0.2471, 0.2435, 0.2616]NEWLINE self.num_classes = 10NEWLINE self.train_transform = T.Compose(NEWLINE [NEWLINE T.ToTensor(),NEWLINE T.RandomCrop(32, padding=4),NEWLINE T.RandomHorizontalFlip(),NEWLINE T.Normalize(self.mean, self.std),NEWLINE ]NEWLINE )NEWLINE self.test_transform = T.Compose([NEWLINE T.ToTensor(), T.Normalize(self.mean, self.std)])NEWLINE self.train_batch_size = train_batch_sizeNEWLINE self.test_batch_size = test_batch_sizeNEWLINE self.dims = (3, 32, 32)NEWLINE self.num_workers = num_workersNEWLINE self.pin_memory = pin_memoryNEWLINENEWLINE def prepare_data(self):NEWLINE # downloadNEWLINE datasets.CIFAR10(self.data_dir, train=True, download=True)NEWLINE datasets.CIFAR10(self.data_dir, train=False, download=True)NEWLINENEWLINE def setup(self, stage: Optional[str] = None):NEWLINE self.train_ds = datasets.CIFAR10(NEWLINE self.data_dir, train=True, transform=self.train_transformNEWLINE )NEWLINE self.test_ds = datasets.CIFAR10(NEWLINE self.data_dir, train=False, transform=self.test_transformNEWLINE )NEWLINENEWLINE def train_dataloader(self):NEWLINE return DataLoader(NEWLINE self.train_ds,NEWLINE batch_size=self.train_batch_size,NEWLINE num_workers=self.num_workers,NEWLINE pin_memory=self.pin_memory,NEWLINE shuffle=True,NEWLINE )NEWLINENEWLINE def val_dataloader(self):NEWLINE return DataLoader(NEWLINE self.test_ds,NEWLINE batch_size=self.test_batch_size,NEWLINE num_workers=self.num_workers,NEWLINE pin_memory=self.pin_memory,NEWLINE )NEWLINENEWLINE def test_dataloader(self):NEWLINE return self.val_dataloader()[email protected]_cc_dataset(name='cifar10')NEWLINEclass CIFAR10CDataModule(pl.LightningDataModule):NEWLINE def __init__(NEWLINE self, data_dir: str = './', batch_size=256, num_workers=4, NEWLINE pin_memory=True, normalized=TrueNEWLINE ):NEWLINE super().__init__()NEWLINE self.data_dir = data_dirNEWLINE self.transform = NoneNEWLINE self.batch_size = batch_sizeNEWLINE self.dims = (3, 32, 32)NEWLINE self.num_workers = num_workersNEWLINE self.pin_memory = pin_memoryNEWLINE self.url = "https://zenodo.org/record/2535967/files/CIFAR-10-C.tar" # URL FROM ZENODONEWLINENEWLINE self.normalized = normalizedNEWLINE self.corruptions = [NEWLINE "gaussian_noise",NEWLINE "shot_noise",NEWLINE "impulse_noise",NEWLINE "defocus_blur",NEWLINE "glass_blur",NEWLINE "motion_blur",NEWLINE "zoom_blur",NEWLINE "snow",NEWLINE "frost",NEWLINE "fog",NEWLINE "brightness",NEWLINE "contrast",NEWLINE "elastic_transform",NEWLINE "pixelate",NEWLINE "jpeg_compression",NEWLINE ]NEWLINENEWLINE @propertyNEWLINE def dataset_path(self):NEWLINE return self.data_dirNEWLINE NEWLINE def prepare_data(self):NEWLINE if not os.path.exists(self.dataset_path):NEWLINE download_and_extract_archive(NEWLINE self.url,NEWLINE download_root=self.data_dir,NEWLINE filename=self.dirname + ".tar",NEWLINE remove_finished=True,NEWLINE )NEWLINE self.data_dir = os.path.join(self.data_dir, self.dirname)NEWLINE else:NEWLINE print("Files already downloaded and verified")NEWLINENEWLINE def setup(self, stage: Optional[str] = None):NEWLINE self.cc = {}NEWLINE labels = torch.tensor(np.load(os.path.join(self.dataset_path, "labels.npy")), dtype=torch.int64)NEWLINENEWLINE for corruption in self.corruptions:NEWLINE raw_imgs = np.load(os.path.join(self.dataset_path, corruption + ".npy"))NEWLINE images = raw_imgs.transpose([0, 3, 1, 2])NEWLINE if self.normalized:NEWLINE images = images.astype(np.float32) / 255.0NEWLINENEWLINE self.cc[corruption] = TransformedTensorDataset(NEWLINE torch.tensor(images), labels, transform=self.transformNEWLINE )NEWLINENEWLINE def test_dataloader(self):NEWLINE return {NEWLINE corruption: DataLoader(NEWLINE self.cc[corruption],NEWLINE batch_size=self.batch_size,NEWLINE num_workers=self.num_workers,NEWLINE pin_memory=self.pin_memory,NEWLINE )NEWLINE for corruption in self.corruptionsNEWLINE }NEWLINE [email protected]_dataset(name='cifar100')NEWLINEclass CIFAR100DataModule(CIFAR10DataModule):NEWLINE def __init__(NEWLINE self, data_dir: str = './', train_batch_size=128,NEWLINE test_batch_size=256, num_workers=4, pin_memory=True,NEWLINE ):NEWLINE super().__init__(NEWLINE data_dir, train_batch_size, test_batch_size, NEWLINE num_workers, pin_memory)NEWLINE self.mean = [0.5071, 0.4865, 0.4409]NEWLINE self.std = [0.2673, 0.2564, 0.2762]NEWLINE self.num_classes = 100NEWLINE self.train_transform = T.Compose(NEWLINE [NEWLINE T.ToTensor(),NEWLINE T.RandomCrop(32, padding=4),NEWLINE T.RandomHorizontalFlip(),NEWLINE T.Normalize(self.mean, self.std),NEWLINE ]NEWLINE )NEWLINE self.test_transform = T.Compose([NEWLINE T.ToTensor(), T.Normalize(self.mean, self.std)])NEWLINE NEWLINE def prepare_data(self):NEWLINE # downloadNEWLINE datasets.CIFAR100(self.data_dir, train=True, download=True)NEWLINE datasets.CIFAR100(self.data_dir, train=False, download=True)NEWLINENEWLINE def setup(self, stage: Optional[str] = None):NEWLINE self.train_ds = datasets.CIFAR100(NEWLINE self.data_dir, train=True, transform=self.train_transformNEWLINE )NEWLINE self.test_ds = datasets.CIFAR100(NEWLINE self.data_dir, train=False, transform=self.test_transformNEWLINE )[email protected]_cc_dataset(name='cifar100')NEWLINEclass CIFAR100CDataModule(CIFAR10CDataModule):NEWLINE def __init__(NEWLINE self, data_dir: str='./', batch_size=256, num_workers=4, NEWLINE pin_memory=True, normalized=TrueNEWLINE ):NEWLINE super().__init__(NEWLINE data_dir, batch_size, num_workers, pin_memory, normalized)NEWLINE self.dirname = "CIFAR-100-C"NEWLINE self.transform = NoneNEWLINE self.url = "https://zenodo.org/record/3555552/files/CIFAR-100-C.tar" # URL FROM ZENODO |
from django_asservio_core.models import (NEWLINE CodeDictionary, NameDictionary,NEWLINE Dictionary, DescriptionDictionaryNEWLINE)NEWLINENEWLINENEWLINEclass Code(CodeDictionary):NEWLINE """Code dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Name(NameDictionary):NEWLINE """Name dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Description(DescriptionDictionary):NEWLINE """Description dictionary."""NEWLINE passNEWLINENEWLINENEWLINEclass Info(Dictionary):NEWLINE """Regular dictionary."""NEWLINE passNEWLINE |
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright (c) 2016 Red Hat, Inc.NEWLINE#NEWLINE# This file is part of AnsibleNEWLINE#NEWLINE# Ansible is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU General Public License as published byNEWLINE# the Free Software Foundation, either version 3 of the License, orNEWLINE# (at your option) any later version.NEWLINE#NEWLINE# Ansible is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU General Public License for more details.NEWLINE#NEWLINE# You should have received a copy of the GNU General Public LicenseNEWLINE# along with Ansible. If not, see <http://www.gnu.org/licenses/>.NEWLINE#NEWLINENEWLINEANSIBLE_METADATA = {'metadata_version': '1.1',NEWLINE 'status': ['preview'],NEWLINE 'supported_by': 'community'}NEWLINENEWLINENEWLINEDOCUMENTATION = '''NEWLINE---NEWLINEmodule: ovirt_external_provider_factsNEWLINEshort_description: Retrieve facts about one or more oVirt/RHV external providersNEWLINEauthor: "Ondra Machacek (@machacekondra)"NEWLINEversion_added: "2.3"NEWLINEdescription:NEWLINE - "Retrieve facts about one or more oVirt/RHV external providers."NEWLINEnotes:NEWLINE - "This module creates a new top-level C(ovirt_external_providers) fact, whichNEWLINE contains a list of external_providers."NEWLINEoptions:NEWLINE type:NEWLINE description:NEWLINE - "Type of the external provider."NEWLINE choices: ['os_image', 'os_network', 'os_volume', 'foreman']NEWLINE required: trueNEWLINE name:NEWLINE description:NEWLINE - "Name of the external provider, can be used as glob expression."NEWLINEextends_documentation_fragment: ovirt_factsNEWLINE'''NEWLINENEWLINEEXAMPLES = '''NEWLINE# Examples don't contain auth parameter for simplicity,NEWLINE# look at ovirt_auth module to see how to reuse authentication:NEWLINENEWLINE# Gather facts about all image external providers named C<glance>:NEWLINE- ovirt_external_provider_facts:NEWLINE type: os_imageNEWLINE name: glanceNEWLINE- debug:NEWLINE var: ovirt_external_providersNEWLINE'''NEWLINENEWLINERETURN = '''NEWLINEexternal_host_providers:NEWLINE description: "List of dictionaries of all the external_host_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/external_host_provider."NEWLINE returned: "On success and if parameter 'type: foreman' is used."NEWLINE type: listNEWLINEopenstack_image_providers:NEWLINE description: "List of dictionaries of all the openstack_image_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_image_provider."NEWLINE returned: "On success and if parameter 'type: os_image' is used."NEWLINE type: listNEWLINEopenstack_volume_providers:NEWLINE description: "List of dictionaries of all the openstack_volume_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_volume_provider."NEWLINE returned: "On success and if parameter 'type: os_volume' is used."NEWLINE type: listNEWLINEopenstack_network_providers:NEWLINE description: "List of dictionaries of all the openstack_network_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_network_provider."NEWLINE returned: "On success and if parameter 'type: os_network' is used."NEWLINE type: listNEWLINE'''NEWLINENEWLINEimport fnmatchNEWLINEimport tracebackNEWLINENEWLINEfrom ansible.module_utils.basic import AnsibleModuleNEWLINEfrom ansible.module_utils.ovirt import (NEWLINE check_sdk,NEWLINE create_connection,NEWLINE get_dict_of_struct,NEWLINE ovirt_facts_full_argument_spec,NEWLINE)NEWLINENEWLINENEWLINEdef _external_provider_service(provider_type, system_service):NEWLINE if provider_type == 'os_image':NEWLINE return system_service.openstack_image_providers_service()NEWLINE elif provider_type == 'os_network':NEWLINE return system_service.openstack_network_providers_service()NEWLINE elif provider_type == 'os_volume':NEWLINE return system_service.openstack_volume_providers_service()NEWLINE elif provider_type == 'foreman':NEWLINE return system_service.external_host_providers_service()NEWLINENEWLINENEWLINEdef main():NEWLINE argument_spec = ovirt_facts_full_argument_spec(NEWLINE name=dict(default=None, required=False),NEWLINE type=dict(NEWLINE default=None,NEWLINE required=True,NEWLINE choices=[NEWLINE 'os_image', 'os_network', 'os_volume', 'foreman',NEWLINE ],NEWLINE aliases=['provider'],NEWLINE ),NEWLINE )NEWLINE module = AnsibleModule(argument_spec)NEWLINENEWLINE if module._name == 'ovirt_external_providers_facts':NEWLINE module.deprecate("The 'ovirt_external_providers_facts' module is being renamed 'ovirt_external_provider_facts'", version=2.8)NEWLINENEWLINE check_sdk(module)NEWLINENEWLINE try:NEWLINE auth = module.params.pop('auth')NEWLINE connection = create_connection(auth)NEWLINE external_providers_service = _external_provider_service(NEWLINE provider_type=module.params.pop('type'),NEWLINE system_service=connection.system_service(),NEWLINE )NEWLINE if module.params['name']:NEWLINE external_providers = [NEWLINE e for e in external_providers_service.list()NEWLINE if fnmatch.fnmatch(e.name, module.params['name'])NEWLINE ]NEWLINE else:NEWLINE external_providers = external_providers_service.list()NEWLINENEWLINE module.exit_json(NEWLINE changed=False,NEWLINE ansible_facts=dict(NEWLINE ovirt_external_providers=[NEWLINE get_dict_of_struct(NEWLINE struct=c,NEWLINE connection=connection,NEWLINE fetch_nested=module.params.get('fetch_nested'),NEWLINE attributes=module.params.get('nested_attributes'),NEWLINE ) for c in external_providersNEWLINE ],NEWLINE ),NEWLINE )NEWLINE except Exception as e:NEWLINE module.fail_json(msg=str(e), exception=traceback.format_exc())NEWLINE finally:NEWLINE connection.close(logout=auth.get('token') is None)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport copyNEWLINEimport contextlibNEWLINEimport threadingNEWLINEimport uuidNEWLINENEWLINEctx = threading.local()NEWLINENEWLINENEWLINEclass TrackerBase(object):NEWLINE def __init__(self, client=None, server=None):NEWLINE self.client = clientNEWLINE self.server = serverNEWLINENEWLINE def handle(self, header):NEWLINE ctx.header = headerNEWLINE ctx.counter = 0NEWLINENEWLINE def gen_header(self, header):NEWLINE header.request_id = self.get_request_id()NEWLINENEWLINE if not hasattr(ctx, "counter"):NEWLINE ctx.counter = 0NEWLINENEWLINE ctx.counter += 1NEWLINENEWLINE if hasattr(ctx, "header"):NEWLINE header.seq = "{prev_seq}.{cur_counter}".format(NEWLINE prev_seq=ctx.header.seq, cur_counter=ctx.counter)NEWLINE header.meta = ctx.header.metaNEWLINE else:NEWLINE header.meta = {}NEWLINE header.seq = str(ctx.counter)NEWLINENEWLINE if hasattr(ctx, "meta"):NEWLINE header.meta.update(ctx.meta)NEWLINENEWLINE def record(self, header, exception):NEWLINE passNEWLINENEWLINE @classmethodNEWLINE @contextlib.contextmanagerNEWLINE def counter(cls, init=0):NEWLINE """Context for manually setting counter of seq number.NEWLINENEWLINE :init: init valueNEWLINE """NEWLINE if not hasattr(ctx, "counter"):NEWLINE ctx.counter = 0NEWLINENEWLINE old = ctx.counterNEWLINE ctx.counter = initNEWLINENEWLINE try:NEWLINE yieldNEWLINE finally:NEWLINE ctx.counter = oldNEWLINENEWLINE @classmethodNEWLINE @contextlib.contextmanagerNEWLINE def annotate(cls, **kwargs):NEWLINE ctx.annotation = kwargsNEWLINE try:NEWLINE yield ctx.annotationNEWLINE finally:NEWLINE del ctx.annotationNEWLINENEWLINE @classmethodNEWLINE @contextlib.contextmanagerNEWLINE def add_meta(cls, **kwds):NEWLINE if hasattr(ctx, 'meta'):NEWLINE old_dict = copy.copy(ctx.meta)NEWLINE ctx.meta.update(kwds)NEWLINE try:NEWLINE yield ctx.metaNEWLINE finally:NEWLINE ctx.meta = old_dictNEWLINE else:NEWLINE ctx.meta = kwdsNEWLINE try:NEWLINE yield ctx.metaNEWLINE finally:NEWLINE del ctx.metaNEWLINENEWLINE @propertyNEWLINE def meta(self):NEWLINE meta = ctx.header.meta if hasattr(ctx, "header") else {}NEWLINE if hasattr(ctx, "meta"):NEWLINE meta.update(ctx.meta)NEWLINE return metaNEWLINENEWLINE @propertyNEWLINE def annotation(self):NEWLINE return ctx.annotation if hasattr(ctx, "annotation") else {}NEWLINENEWLINE def get_request_id(self):NEWLINE if hasattr(ctx, "header"):NEWLINE return ctx.header.request_idNEWLINE return str(uuid.uuid4())NEWLINENEWLINE def init_handshake_info(self, handshake_obj):NEWLINE passNEWLINENEWLINE def handle_handshake_info(self, handshake_obj):NEWLINE passNEWLINENEWLINENEWLINEclass ConsoleTracker(TrackerBase):NEWLINE def record(self, header, exception):NEWLINE print(header)NEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINEimport osNEWLINEimport timeNEWLINEimport waveNEWLINEfrom Queue import QueueNEWLINEfrom threading import ThreadNEWLINENEWLINEfrom bottle import route, run, request, static_file, viewNEWLINENEWLINEfrom recognition import recognizeNEWLINEfrom textlogger import add_log, get_logsNEWLINENEWLINEupload_dir = 'upload_dir/'NEWLINENEWLINENEWLINE@route('/', method='GET')NEWLINE@view('logs')NEWLINEdef logs():NEWLINE return dict(logs=get_logs())NEWLINENEWLINENEWLINE@route('/wave', method='POST')NEWLINEdef do_upload():NEWLINE wav_file = request.files.get('file')NEWLINE name, ext = os.path.splitext(wav_file.filename)NEWLINE # Listnr uploads audio data as “sample.r16”NEWLINE if ext not in ('.r16'):NEWLINE return 'File extension not allowed.'NEWLINENEWLINE if not os.path.exists(upload_dir):NEWLINE os.mkdir(upload_dir)NEWLINENEWLINE file_name = str(int(time.time())) + '.wav'NEWLINE file_path = os.path.join(upload_dir, file_name)NEWLINE write_wave(file_path, wav_file.file.read())NEWLINE q.put({NEWLINE "file_path": file_path,NEWLINE "file_name": file_nameNEWLINE })NEWLINE return 'OK'NEWLINENEWLINENEWLINE@route('/files/<filename:re:.+\.wav>')NEWLINEdef wav_files(filename):NEWLINE return static_file(filename, root=upload_dir)NEWLINENEWLINENEWLINE@route('/img/<filename:re:.+\.png>')NEWLINEdef img_files(filename):NEWLINE return static_file(filename, root='img/')NEWLINENEWLINENEWLINE@route('/css/<filename:re:.+\.css>')NEWLINEdef css_files(filename):NEWLINE return static_file(filename, root='css/')NEWLINENEWLINENEWLINE@route('/js/<filename:re:.+\.js>')NEWLINEdef js_files(filename):NEWLINE return static_file(filename, root='js/')NEWLINENEWLINENEWLINEdef write_wave(file_path, wave_bin):NEWLINE wave_file = wave.open(file_path, 'wb')NEWLINE # Mono, 16bit, 16kHzNEWLINE wave_file.setparams((1, 2, 16000, 0, 'NONE', 'not compressed'))NEWLINE wave_file.writeframes(wave_bin)NEWLINE wave_file.close()NEWLINENEWLINENEWLINEdef worker():NEWLINE while True:NEWLINE item = q.get()NEWLINE text = recognize(item["file_path"], language="ja-JP")NEWLINE add_log(item["file_name"], text)NEWLINE q.task_done()NEWLINENEWLINENEWLINEq = Queue()NEWLINEt = Thread(target=worker)NEWLINEt.daemon = TrueNEWLINEt.start()NEWLINENEWLINErun(host='0.0.0.0', port=8080, debug=True, reloader=True)NEWLINE |
import purerpcNEWLINEimport greeter_pb2NEWLINENEWLINENEWLINEclass GreeterServicer(purerpc.Servicer):NEWLINE async def SayHello(self, input_message):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloGoodbye(self, input_message):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloToMany(self, input_messages):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloToManyAtOnce(self, input_messages):NEWLINE raise NotImplementedError()NEWLINENEWLINE @propertyNEWLINE def service(self) -> purerpc.Service:NEWLINE service_obj = purerpc.Service(NEWLINE "Greeter"NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHello",NEWLINE self.SayHello,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloGoodbye",NEWLINE self.SayHelloGoodbye,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloToMany",NEWLINE self.SayHelloToMany,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloToManyAtOnce",NEWLINE self.SayHelloToManyAtOnce,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE return service_objNEWLINENEWLINENEWLINEclass GreeterStub:NEWLINE def __init__(self, channel):NEWLINE self._client = purerpc.Client(NEWLINE "Greeter",NEWLINE channelNEWLINE )NEWLINE self.SayHello = self._client.get_method_stub(NEWLINE "SayHello",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloGoodbye = self._client.get_method_stub(NEWLINE "SayHelloGoodbye",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloToMany = self._client.get_method_stub(NEWLINE "SayHelloToMany",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloToManyAtOnce = self._client.get_method_stub(NEWLINE "SayHelloToManyAtOnce",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE ) |
NEWLINE# coding: utf-8NEWLINENEWLINE# # Plotting Functions in Azure Machine Learning Package for Forecasting (AMLPF)NEWLINE# In this notebook, we go over the plotting functions in AMLPF for visualizing and analyzing forecasting results. The following functions are covered.NEWLINE# 1. Plot aggregated error distribution. NEWLINE# 2. Plot error density function.NEWLINE# 3. Plot error histogram.NEWLINE# 4. Plot the total forecast vs. actual of the target column.NEWLINE# 5. Plot forecast vs. actual by grain.NEWLINENEWLINE# ## Import packagesNEWLINENEWLINE# In[1]:NEWLINENEWLINEimport warningsNEWLINE# Suppress warningsNEWLINEwarnings.filterwarnings("ignore")NEWLINE NEWLINEimport matplotlibNEWLINEmatplotlib.use('agg')NEWLINEget_ipython().magic('matplotlib inline')NEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINEimport numpy as npNEWLINEimport mathNEWLINEimport pandas as pdNEWLINENEWLINEfrom ftk import TimeSeriesDataFrame, ForecastDataFrameNEWLINEfrom ftk.models import SeasonalNaiveNEWLINEfrom ftk.data import load_dow_jones_datasetNEWLINENEWLINEprint('Imports done')NEWLINENEWLINENEWLINE# ## Data PreparationNEWLINE# We first fit a seasonal naive model on the Dow Jones dataset. The forecast results are used to demonstrate the plotting functions in the following sections.NEWLINENEWLINE# In[2]:NEWLINENEWLINE# Load the dataNEWLINEtrain_tsdf, test_tsdf = load_dow_jones_dataset(test_size=8)NEWLINENEWLINE# Set model parametersNEWLINEfrequency = 'QS'NEWLINEseasonality = 4NEWLINENEWLINE# Fit the a seasonal naive model on the training dataNEWLINEsnaive_model = SeasonalNaive(freq=frequency, seasonality=seasonality)NEWLINEsnaive_model_fitted = snaive_model.fit(train_tsdf)NEWLINENEWLINE# Forecast on test dataNEWLINEforecast = snaive_model_fitted.predict(test_tsdf)NEWLINEforecast.head(10)NEWLINENEWLINENEWLINE# ## Plot Aggregated Error DistributionNEWLINENEWLINE# ### Basic usage of `show_error` NEWLINE# By default, the `show_error` method of `ForecastDataFrame` plots the Mean Absolute Percentage Error(MAPE) distribution by grain. Optionally, you can use the `sorted` argument to sort grains by MAPE in `ascending` or `descending` order. NEWLINE# The figure and axes objects are returned to the user for further customization.NEWLINENEWLINE# In[3]:NEWLINENEWLINEfig1, ax1 = forecast.show_error(sorted='ascending', figure_size=(7,7))NEWLINENEWLINENEWLINE# ### Plot error by arbitrary column or transformation of a columnNEWLINE# Sometimes it's useful to visualize the error distribution by a different column or a transformation of a column. For example, we can visualize error distribution by different revenue ranges, 0 - 10k, 10k - 20k, ...above 130k. NEWLINENEWLINE# In[5]:NEWLINENEWLINE# Define the mapping/transformationNEWLINEdef mapping(x):NEWLINE if np.isnan(x):NEWLINE return 0NEWLINE return math.floor(x/10000) * 10000NEWLINENEWLINE# Use the by_col argument to specify an alternative column to grain_colnames.NEWLINE# Use the col_mapping argument to specify the transformation to apply on by_colNEWLINEfig2, ax2 = forecast.show_error(by_col='revenue', col_mapping=mapping, figure_size=(5,5))NEWLINENEWLINE# Format the x ticks, x a label, and title properly using the returned axes objectNEWLINElabels = [item.get_text() for item in ax2.get_xticklabels()]NEWLINEfor i in range(len(labels) - 1):NEWLINE labels[i] = labels[i] + ' - ' + labels[i+1]NEWLINEax2.set_xticklabels(labels)NEWLINEax2.set(xlabel='revenue range', title='MAPE by revenue range')NEWLINEplt.show()NEWLINENEWLINENEWLINE# ### Plot error of grains with top/bottom performance.NEWLINE# Sometimes, you may only be interested in the grains/groups with the best or worst performance, especially when you have a large number of time series. The `performance_percent` argument of `show_error` allows you to specify a performance interval and plot the error of a subset of grains/groups. For example, setting `performance_percent=(0.9, 1)` plots the grains with the top 10% error (i.e. bottom 10% performance). NEWLINENEWLINE# In[7]:NEWLINENEWLINEfig3, ax3 = forecast.show_error(performance_percent=(0.9, 1), figure_size=(5,5))NEWLINENEWLINENEWLINE# ### Custom error functionsNEWLINE# In addtion to the default metric, MAPE, you can also plot other built-in metrics like 'MAE', 'RMSE', 'SMAPE', and 'MASE'. You can even define a custom function to calculate any other performance metrics. For example, here we define a custom function to calculate the Median Absolute Percentage Error (Median APE). NEWLINENEWLINE# In[8]:NEWLINENEWLINEdef calc_median_ape(y_true, y_pred):NEWLINE y_true = np.array(y_true).astype(float)NEWLINE y_pred = np.array(y_pred).astype(float)NEWLINE y_true_rm_na = y_true[~(np.isnan(y_true) | np.isnan(y_pred))]NEWLINE y_pred_rm_na = y_pred[~(np.isnan(y_true) | np.isnan(y_pred))]NEWLINE y_true = y_true_rm_naNEWLINE y_pred = y_pred_rm_naNEWLINE if len(y_true) == 0:NEWLINE # if there is no entries left after removing na data, return np.nanNEWLINE return(np.nan)NEWLINE y_true_rm_zero = y_true[y_true != 0]NEWLINE y_pred_rm_zero = y_pred[y_true != 0]NEWLINE if len(y_true_rm_zero) == 0:NEWLINE # if all values are zero, np.nan will be returned.NEWLINE return(np.nan)NEWLINE ape = np.abs((y_true_rm_zero - y_pred_rm_zero) / y_true_rm_zero) * 100NEWLINE median_ape = np.median(ape)NEWLINE return median_apeNEWLINENEWLINENEWLINEfig4, ax4 = forecast.show_error(err_name='MedianAPE', err_fun=calc_median_ape, sorted='ascending', figure_size=(7,7))NEWLINENEWLINENEWLINE# ## Plot Error Density FunctionNEWLINENEWLINE# The `plot_error_density` method of `ForecastDataFrame` plots the error probability density function estimated by the [Univariate Kernel Density Estimator of statsmodels](http://www.statsmodels.org/dev/generated/statsmodels.nonparametric.kde.KDEUnivariate.html).NEWLINENEWLINE# In[18]:NEWLINENEWLINEfig5, ax5 = forecast.plot_error_density(figure_size=(5,5))NEWLINENEWLINENEWLINE# ### Weighted Error Density FunctionNEWLINE# Optionally, you can weight the error density on a specified column. For example you may want errors of companies with higher revenues to weight more than errors of companies with lower revenues. NEWLINENEWLINE# In[19]:NEWLINENEWLINEfig6, ax6 = forecast.plot_error_density(weighted_by='revenue', figure_size=(5,5))NEWLINENEWLINENEWLINE# ## Plot Error HistogramNEWLINENEWLINE# In[17]:NEWLINENEWLINEfig7, ax7 = forecast.plot_error_histogram(figure_size=(5,5), grid=False)NEWLINENEWLINENEWLINE# ## Plot Total Forecast vs. Total ActualNEWLINE# The `plot_forecast_sum` method of `ForecastDataFrame` plots the sum of `pred_point` against the sum of `actual` across all grains for each time point. NEWLINE# Different from the earlier functions, this method returns a list of (fig, ax) tuples. This is because this function can generate multiple plots when we discuss the `by_horizon` option later. NEWLINENEWLINE# In[16]:NEWLINENEWLINEfig_ax_list_1 = forecast.plot_forecast_sum(figure_size=(7, 7))NEWLINENEWLINENEWLINE# ## Plot Forecast By GrainNEWLINENEWLINE# Once you have an idea of the overall performance, you may want to explore individual grains, especially those performed poorly. The `plot_forecast_by_grain` method generates a plot for each grain sepecified by the `grains` argument. If the `grains` argument is not set, only the first grain is plotted, which prevents generating a large number of plots when you have many grains. NEWLINE# Here, we take a look at the two grains with the worst performance discovered by the `show_error` method. NEWLINENEWLINE# In[20]:NEWLINENEWLINEfig_ax_list_2 = forecast.plot_forecast_by_grain(grains=['CVX', 'XOM'], prediction_interval=True, figure_size=(7, 7))NEWLINENEWLINENEWLINE# ## Plot Forecast By HorizonNEWLINE# When you have multiple forecasts for the same date generated from different origin time points, i.e. multi-horizon forecasts, plotting all the horizons in the same plot makes the plot hard to interpret. Both `plot_forecast_sum` and `plot_forecast_by_grain` provide an option, `by_horizon`, to generate a separate plot for each horizon. By default, all horizons are plotted. The `horizon` option allows you to specify a subset of horizons to plot. NEWLINE# To demonstrate this functionality, we handcraft a dataset with two-horizon forecasts for the same date. NEWLINENEWLINE# In[15]:NEWLINENEWLINEdata_origin = {'date': pd.to_datetime(['2017-01-01', '2017-01-02',NEWLINE '2017-01-02', '2017-01-03',NEWLINE '2017-01-03', '2017-01-04',NEWLINE '2017-01-04', '2017-01-05',NEWLINE '2017-01-01', '2017-01-02',NEWLINE '2017-01-02', '2017-01-03',NEWLINE '2017-01-03', '2017-01-04',NEWLINE '2017-01-04', '2017-01-05']),NEWLINE 'grain': ['a'] * 8 + ['b'] * 8,NEWLINE 'origin_time': pd.to_datetime(NEWLINE ['2016-12-31'] * 2 + ['2017-01-01'] * 2 +NEWLINE ['2017-01-02'] * 2 + ['2017-01-03'] * 2 +NEWLINE ['2016-12-31'] * 2 + ['2017-01-01'] * 2 +NEWLINE ['2017-01-02'] * 2 + ['2017-01-03'] * 2),NEWLINE 'pred_point': [0.779, 2.039, 3.747, 4.106, -0.378,NEWLINE 2.826, 1.504, 4.851, 5.775, 6.399,NEWLINE 6.014, 7.998, 4.308, 5.801, 7.920,NEWLINE 8.015],NEWLINE 'actual': [1., 2., 2., 3., 3., 4., 4., 5., 6., 7., 7., 8., 8.,NEWLINE 9., 9., 10.]NEWLINE }NEWLINEfdf = ForecastDataFrame(data_origin, grain_colnames='grain',NEWLINE time_colname='date', actual='actual',NEWLINE pred_point='pred_point',NEWLINE origin_time_colname='origin_time')NEWLINEfdf.head()NEWLINENEWLINENEWLINE# ### Plot total forecast by horizonNEWLINENEWLINE# In[21]:NEWLINENEWLINEfit_ax_list_3 = fdf.plot_forecast_sum(by_horizon=True, figure_size=(7, 7))NEWLINENEWLINENEWLINE# ### Plot forecast of each grain by horizonNEWLINENEWLINE# In[22]:NEWLINENEWLINEfit_ax_list_4 = fdf.plot_forecast_by_grain(grains=['a', 'b'], NEWLINE by_horizon=True, NEWLINE figure_size=(7, 7), NEWLINE horizon=pd.Timedelta('1 days'))NEWLINENEWLINE |
"""PyMC3-specific conversion code."""NEWLINEimport loggingNEWLINEfrom typing import Dict, List, Any, Optional, TYPE_CHECKINGNEWLINEfrom types import ModuleTypeNEWLINENEWLINEimport numpy as npNEWLINEimport xarray as xrNEWLINEfrom .. import utilsNEWLINEfrom .inference_data import InferenceData, concatNEWLINEfrom .base import requires, dict_to_dataset, generate_dims_coords, make_attrsNEWLINENEWLINEif TYPE_CHECKING:NEWLINE import pymc3 as pmNEWLINE from pymc3 import MultiTrace, Model # pylint: disable=invalid-nameNEWLINE import theanoNEWLINE from typing import Set # pylint: disable=ungrouped-importsNEWLINEelse:NEWLINE MultiTrace = Any # pylint: disable=invalid-nameNEWLINE Model = Any # pylint: disable=invalid-nameNEWLINENEWLINE___all__ = [""]NEWLINENEWLINE_log = logging.getLogger(__name__)NEWLINENEWLINECoords = Dict[str, List[Any]]NEWLINEDims = Dict[str, List[str]]NEWLINE# random variable object ...NEWLINEVar = Any # pylint: disable=invalid-nameNEWLINENEWLINENEWLINEdef _monkey_patch_pymc3(pm: ModuleType) -> None: # pylint: disable=invalid-nameNEWLINE assert pm.__name__ == "pymc3"NEWLINENEWLINE def fixed_eq(self, other):NEWLINE """Use object identity for MultiObservedRV equality."""NEWLINE return self is otherNEWLINENEWLINE if tuple([int(x) for x in pm.__version__.split(".")]) < (3, 9): # type: ignoreNEWLINE pm.model.MultiObservedRV.__eq__ = fixed_eq # type: ignoreNEWLINENEWLINENEWLINEclass PyMC3Converter: # pylint: disable=too-many-instance-attributesNEWLINE """Encapsulate PyMC3 specific logic."""NEWLINENEWLINE model = None # type: Optional[pm.Model]NEWLINE nchains = None # type: intNEWLINE ndraws = None # type: intNEWLINE posterior_predictive = None # Type: Optional[Dict[str, np.ndarray]]NEWLINE predictions = None # Type: Optional[Dict[str, np.ndarray]]NEWLINE prior = None # Type: Optional[Dict[str, np.ndarray]]NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE *,NEWLINE trace=None,NEWLINE prior=None,NEWLINE posterior_predictive=None,NEWLINE predictions=None,NEWLINE coords: Optional[Coords] = None,NEWLINE dims: Optional[Dims] = None,NEWLINE model=NoneNEWLINE ):NEWLINE import pymc3NEWLINE import theanoNEWLINENEWLINE _monkey_patch_pymc3(pymc3)NEWLINENEWLINE self.pymc3 = pymc3NEWLINE self.theano = theanoNEWLINENEWLINE self.trace = traceNEWLINENEWLINE # this permits us to get the model from command-line argument or from with model:NEWLINE try:NEWLINE self.model = self.pymc3.modelcontext(model or self.model)NEWLINE except TypeError:NEWLINE self.model = NoneNEWLINENEWLINE # This next line is brittle and may not work forever, but is a secretNEWLINE # way to access the model from the trace.NEWLINE if trace is not None:NEWLINE if self.model is None:NEWLINE self.model = self.trace._straces[0].model # pylint: disable=protected-accessNEWLINE self.nchains = trace.nchains if hasattr(trace, "nchains") else 1NEWLINE self.ndraws = len(trace)NEWLINE else:NEWLINE self.nchains = self.ndraws = 0NEWLINENEWLINE self.prior = priorNEWLINE self.posterior_predictive = posterior_predictiveNEWLINE self.predictions = predictionsNEWLINENEWLINE def arbitrary_element(dct: Dict[Any, np.ndarray]) -> np.ndarray:NEWLINE return next(iter(dct.values()))NEWLINENEWLINE if trace is None:NEWLINE # if you have a posterior_predictive built with keep_dims,NEWLINE # you'll lose here, but there's nothing I can do about that.NEWLINE self.nchains = 1NEWLINE get_from = NoneNEWLINE if predictions is not None:NEWLINE get_from = predictionsNEWLINE elif posterior_predictive is not None:NEWLINE get_from = posterior_predictiveNEWLINE elif prior is not None:NEWLINE get_from = priorNEWLINE if get_from is None:NEWLINE # pylint: disable=line-too-longNEWLINE raise ValueError(NEWLINE """When constructing InferenceData must have at leastNEWLINE one of trace, prior, posterior_predictive or predictions."""NEWLINE )NEWLINENEWLINE aelem = arbitrary_element(get_from)NEWLINE self.ndraws = aelem.shape[0]NEWLINENEWLINE self.coords = coordsNEWLINE self.dims = dimsNEWLINE self.observations = self.find_observations()NEWLINENEWLINE def find_observations(self) -> Optional[Dict[str, Var]]:NEWLINE """If there are observations available, return them as a dictionary."""NEWLINE has_observations = FalseNEWLINE if self.trace is not None:NEWLINE assert self.model is not None, "Cannot identify observations without PymC3 model"NEWLINE if any((hasattr(obs, "observations") for obs in self.model.observed_RVs)):NEWLINE has_observations = TrueNEWLINE if has_observations:NEWLINE assert self.model is not NoneNEWLINE return {obs.name: obs.observations for obs in self.model.observed_RVs}NEWLINE return NoneNEWLINENEWLINE def log_likelihood_vals_point(self, point, var, log_like_fun):NEWLINE """Compute log likelihood for each observed point."""NEWLINE log_like_val = utils.one_de(log_like_fun(point))NEWLINE if var.missing_values:NEWLINE log_like_val = np.where(var.observations.mask, np.nan, log_like_val)NEWLINE return log_like_valNEWLINENEWLINE @requires("trace")NEWLINE @requires("model")NEWLINE def _extract_log_likelihood(self):NEWLINE """Compute log likelihood of each observation."""NEWLINE # If we have predictions, then we have a thinned trace which does notNEWLINE # support extracting a log likelihood.NEWLINE cached = [(var, var.logp_elemwise) for var in self.model.observed_RVs]NEWLINE log_likelihood_dict = {}NEWLINE for var, log_like_fun in cached:NEWLINE chain_likelihoods = []NEWLINE for chain in self.trace.chains:NEWLINE log_like_chain = [NEWLINE self.log_likelihood_vals_point(point, var, log_like_fun)NEWLINE for point in self.trace.points([chain])NEWLINE ]NEWLINE chain_likelihoods.append(np.stack(log_like_chain))NEWLINE log_likelihood_dict[var.name] = np.stack(chain_likelihoods)NEWLINE return log_likelihood_dictNEWLINENEWLINE @requires("trace")NEWLINE def posterior_to_xarray(self):NEWLINE """Convert the posterior to an xarray dataset."""NEWLINE var_names = self.pymc3.util.get_default_varnames( # pylint: disable=no-memberNEWLINE self.trace.varnames, include_transformed=FalseNEWLINE )NEWLINE data = {}NEWLINE for var_name in var_names:NEWLINE data[var_name] = np.array(self.trace.get_values(var_name, combine=False, squeeze=False))NEWLINE return dict_to_dataset(data, library=self.pymc3, coords=self.coords, dims=self.dims)NEWLINENEWLINE @requires("trace")NEWLINE def sample_stats_to_xarray(self):NEWLINE """Extract sample_stats from PyMC3 trace."""NEWLINE data = {}NEWLINE rename_key = {"model_logp": "lp"}NEWLINE data = {}NEWLINE for stat in self.trace.stat_names:NEWLINE name = rename_key.get(stat, stat)NEWLINE data[name] = np.array(self.trace.get_sampler_stats(stat, combine=False))NEWLINENEWLINE return dict_to_dataset(data, library=self.pymc3, dims=None, coords=self.coords)NEWLINENEWLINE @requires("trace")NEWLINE @requires("model")NEWLINE def log_likelihood_to_xarray(self):NEWLINE """Extract log likelihood and log_p data from PyMC3 trace."""NEWLINE if self.predictions:NEWLINE return NoneNEWLINE data = self._extract_log_likelihood()NEWLINE return dict_to_dataset(data, library=self.pymc3, dims=self.dims, coords=self.coords)NEWLINENEWLINE def translate_posterior_predictive_dict_to_xarray(self, dct) -> xr.Dataset:NEWLINE """Take Dict of variables to numpy ndarrays (samples) and translate into dataset."""NEWLINE data = {}NEWLINE for k, ary in dct.items():NEWLINE shape = ary.shapeNEWLINE if shape[0] == self.nchains and shape[1] == self.ndraws:NEWLINE data[k] = aryNEWLINE elif shape[0] == self.nchains * self.ndraws:NEWLINE data[k] = ary.reshape((self.nchains, self.ndraws, *shape[1:]))NEWLINE else:NEWLINE data[k] = utils.expand_dims(ary)NEWLINE # pylint: disable=line-too-longNEWLINE _log.warning(NEWLINE "posterior predictive variable %s's shape not compatible with number of chains and draws. "NEWLINE "This can mean that some draws or even whole chains are not represented.",NEWLINE k,NEWLINE )NEWLINE return dict_to_dataset(data, library=self.pymc3, coords=self.coords, dims=self.dims)NEWLINENEWLINE @requires(["posterior_predictive"])NEWLINE def posterior_predictive_to_xarray(self):NEWLINE """Convert posterior_predictive samples to xarray."""NEWLINE return self.translate_posterior_predictive_dict_to_xarray(self.posterior_predictive)NEWLINENEWLINE @requires(["predictions"])NEWLINE def predictions_to_xarray(self):NEWLINE """Convert predictions (out of sample predictions) to xarray."""NEWLINE return self.translate_posterior_predictive_dict_to_xarray(self.predictions)NEWLINENEWLINE def priors_to_xarray(self):NEWLINE """Convert prior samples (and if possible prior predictive too) to xarray."""NEWLINE if self.prior is None:NEWLINE return {"prior": None, "prior_predictive": None}NEWLINE if self.trace is not None:NEWLINE prior_vars = self.pymc3.util.get_default_varnames( # pylint: disable=no-memberNEWLINE self.trace.varnames, include_transformed=FalseNEWLINE )NEWLINE prior_predictive_vars = [key for key in self.prior.keys() if key not in prior_vars]NEWLINE else:NEWLINE prior_vars = list(self.prior.keys())NEWLINE prior_predictive_vars = NoneNEWLINENEWLINE priors_dict = {}NEWLINE for group, var_names in zip(NEWLINE ("prior", "prior_predictive"), (prior_vars, prior_predictive_vars)NEWLINE ):NEWLINE priors_dict[group] = (NEWLINE NoneNEWLINE if var_names is NoneNEWLINE else dict_to_dataset(NEWLINE {k: utils.expand_dims(self.prior[k]) for k in var_names},NEWLINE library=self.pymc3,NEWLINE coords=self.coords,NEWLINE dims=self.dims,NEWLINE )NEWLINE )NEWLINE return priors_dictNEWLINENEWLINE @requires("observations")NEWLINE @requires("model")NEWLINE def observed_data_to_xarray(self):NEWLINE """Convert observed data to xarray."""NEWLINE if self.dims is None:NEWLINE dims = {}NEWLINE else:NEWLINE dims = self.dimsNEWLINE observed_data = {}NEWLINE for name, vals in self.observations.items():NEWLINE if hasattr(vals, "get_value"):NEWLINE vals = vals.get_value()NEWLINE vals = utils.one_de(vals)NEWLINE val_dims = dims.get(name)NEWLINE val_dims, coords = generate_dims_coords(NEWLINE vals.shape, name, dims=val_dims, coords=self.coordsNEWLINE )NEWLINE # filter coords based on the dimsNEWLINE coords = {key: xr.IndexVariable((key,), data=coords[key]) for key in val_dims}NEWLINE observed_data[name] = xr.DataArray(vals, dims=val_dims, coords=coords)NEWLINE return xr.Dataset(data_vars=observed_data, attrs=make_attrs(library=self.pymc3))NEWLINENEWLINE @requires(["trace", "predictions"])NEWLINE @requires("model")NEWLINE def constant_data_to_xarray(self):NEWLINE """Convert constant data to xarray."""NEWLINE # For constant data, we are concerned only with deterministics and data.NEWLINE # The constant data vars must be either pm.Data (TensorSharedVariable) or pm.DeterministicNEWLINE constant_data_vars = {} # type: Dict[str, Var]NEWLINE for var in self.model.deterministics:NEWLINE ancestors = self.theano.tensor.gof.graph.ancestors(var.owner.inputs)NEWLINE # no dependency on a random variableNEWLINE if not any((isinstance(a, self.pymc3.model.PyMC3Variable) for a in ancestors)):NEWLINE constant_data_vars[var.name] = varNEWLINENEWLINE def is_data(name, var) -> bool:NEWLINE assert self.model is not NoneNEWLINE return (NEWLINE var not in self.model.deterministicsNEWLINE and var not in self.model.observed_RVsNEWLINE and var not in self.model.free_RVsNEWLINE and (self.observations is None or name not in self.observations)NEWLINE )NEWLINENEWLINE # I don't know how to find pm.Data, except that they are named variables that aren'tNEWLINE # observed or free RVs, nor are they deterministics, and then we eliminate observations.NEWLINE for name, var in self.model.named_vars.items():NEWLINE if is_data(name, var):NEWLINE constant_data_vars[name] = varNEWLINENEWLINE if not constant_data_vars:NEWLINE return NoneNEWLINE if self.dims is None:NEWLINE dims = {}NEWLINE else:NEWLINE dims = self.dimsNEWLINE constant_data = {}NEWLINE for name, vals in constant_data_vars.items():NEWLINE if hasattr(vals, "get_value"):NEWLINE vals = vals.get_value()NEWLINE # this might be a Deterministic, and must be evaluatedNEWLINE elif hasattr(self.model[name], "eval"):NEWLINE vals = self.model[name].eval()NEWLINE vals = np.atleast_1d(vals)NEWLINE val_dims = dims.get(name)NEWLINE val_dims, coords = generate_dims_coords(NEWLINE vals.shape, name, dims=val_dims, coords=self.coordsNEWLINE )NEWLINE # filter coords based on the dimsNEWLINE coords = {key: xr.IndexVariable((key,), data=coords[key]) for key in val_dims}NEWLINE try:NEWLINE constant_data[name] = xr.DataArray(vals, dims=val_dims, coords=coords)NEWLINE except ValueError as e: # pylint: disable=invalid-nameNEWLINE raise ValueError("Error translating constant_data variable %s: %s" % (name, e))NEWLINE return xr.Dataset(data_vars=constant_data, attrs=make_attrs(library=self.pymc3))NEWLINENEWLINE def to_inference_data(self):NEWLINE """Convert all available data to an InferenceData object.NEWLINENEWLINE Note that if groups can not be created (e.g., there is no `trace`, soNEWLINE the `posterior` and `sample_stats` can not be extracted), then the InferenceDataNEWLINE will not have those groups.NEWLINE """NEWLINE id_dict = {NEWLINE "posterior": self.posterior_to_xarray(),NEWLINE "sample_stats": self.sample_stats_to_xarray(),NEWLINE "log_likelihood": self.log_likelihood_to_xarray(),NEWLINE "posterior_predictive": self.posterior_predictive_to_xarray(),NEWLINE "predictions": self.predictions_to_xarray(),NEWLINE **self.priors_to_xarray(),NEWLINE "observed_data": self.observed_data_to_xarray(),NEWLINE }NEWLINE if self.predictions:NEWLINE id_dict["predictions_constant_data"] = self.constant_data_to_xarray()NEWLINE else:NEWLINE id_dict["constant_data"] = self.constant_data_to_xarray()NEWLINE return InferenceData(**id_dict)NEWLINENEWLINENEWLINEdef from_pymc3(NEWLINE trace=None, *, prior=None, posterior_predictive=None, coords=None, dims=None, model=NoneNEWLINE):NEWLINE """Convert pymc3 data into an InferenceData object."""NEWLINE return PyMC3Converter(NEWLINE trace=trace,NEWLINE prior=prior,NEWLINE posterior_predictive=posterior_predictive,NEWLINE coords=coords,NEWLINE dims=dims,NEWLINE model=model,NEWLINE ).to_inference_data()NEWLINENEWLINENEWLINE### Later I could have this return ``None`` if the ``idata_orig`` argument is supplied. ButNEWLINE### perhaps we should have an inplace argument?NEWLINEdef from_pymc3_predictions(NEWLINE predictions,NEWLINE posterior_trace: Optional[MultiTrace] = None,NEWLINE model: Optional[Model] = None,NEWLINE coords=None,NEWLINE dims=None,NEWLINE idata_orig: Optional[InferenceData] = None,NEWLINE inplace: bool = False,NEWLINE) -> InferenceData:NEWLINE """Translate out-of-sample predictions into ``InferenceData``.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE predictions: Dict[str, np.ndarray]NEWLINE The predictions are the return value of ``pymc3.sample_posterior_predictive``,NEWLINE a dictionary of strings (variable names) to numpy ndarrays (draws).NEWLINE posterior_trace: pm.MultiTraceNEWLINE This should be a trace that has been thinned appropriately forNEWLINE ``pymc3.sample_posterior_predictive``. Specifically, any variable whose shape isNEWLINE a deterministic function of the shape of any predictor (explanatory, independent, etc.)NEWLINE variables must be *removed* from this trace.NEWLINE model: pymc3.ModelNEWLINE This argument is *not* optional, unlike in conventional uses of ``from_pymc3``.NEWLINE The reason is that the posterior_trace argument is likely to supply an incorrectNEWLINE value of model.NEWLINE coords: Dict[str, array-like[Any]]NEWLINE Coordinates for the variables. Map from coordinate names to coordinate values.NEWLINE dims: Dict[str, array-like[str]]NEWLINE Map from variable name to ordered set of coordinate names.NEWLINE idata_orig: InferenceData, optionalNEWLINE If supplied, then modify this inference data in place, adding ``predictions`` andNEWLINE (if available) ``predictions_constant_data`` groups. If this is not supplied, make aNEWLINE fresh InferenceDataNEWLINE inplace: boolean, optionalNEWLINE If idata_orig is supplied and inplace is True, merge the predictions into idata_orig,NEWLINE rather than returning a fresh InferenceData object.NEWLINENEWLINE ReturnsNEWLINE -------NEWLINE InferenceData:NEWLINE May be modified ``idata_orig``.NEWLINE """NEWLINE if inplace and not idata_orig:NEWLINE raise ValueError(NEWLINE (NEWLINE "Do not pass True for inplace unless passing"NEWLINE "an existing InferenceData as idata_orig"NEWLINE )NEWLINE )NEWLINE new_idata = PyMC3Converter(NEWLINE trace=posterior_trace, predictions=predictions, model=model, coords=coords, dims=dimsNEWLINE ).to_inference_data()NEWLINE if idata_orig is None:NEWLINE return new_idataNEWLINE elif inplace:NEWLINE concat([idata_orig, new_idata], dim=None, inplace=True)NEWLINE return idata_origNEWLINE else:NEWLINE # if we are not returning in place, then merge the old groups into the new inferenceNEWLINE # data and return that.NEWLINE concat([new_idata, idata_orig], dim=None, copy=True, inplace=True)NEWLINE return new_idataNEWLINE |
from typing import Dict, Type, AnyNEWLINENEWLINEimport torchNEWLINEimport torch.nn as nnNEWLINEfrom torch.utils.data import DataLoaderNEWLINEfrom torch.utils.tensorboard import SummaryWriterNEWLINENEWLINEfrom sklearn.model_selection import ParameterGridNEWLINENEWLINENEWLINEclass Learner(object):NEWLINENEWLINE def __init__(self, NEWLINE model_cls: Type, NEWLINE train_dl: DataLoader, NEWLINE valid_dl: DataLoader, NEWLINE test_dl: DataLoader, NEWLINE config: Dict[str, Any],NEWLINE cuda: int):NEWLINE self.train_dl = train_dlNEWLINE self.valid_dl = valid_dlNEWLINE self.test_dl = test_dlNEWLINE self.config = configNEWLINE self.model_cls = model_clsNEWLINE self.model = model_cls(**config["model"])NEWLINE self.epochs = config["epochs"]NEWLINE self.batch_size = config["batch_size"]NEWLINE if hasattr(nn, config['loss']):NEWLINE self.loss_func = getattr(nn, config['loss'])(reduction=config["reduction"])NEWLINENEWLINE self.optimizer = config['optimizer']NEWLINE self.learning_rate = config['learning_rate']NEWLINE self.weight_decay = config['weight_decay']NEWLINE self.early_stop_rounds = config["early_stop"]NEWLINE self.tensor_board = config['tensor_board']NEWLINE if self.tensor_board:NEWLINE self.writer = SummaryWriter(config['tensor_board_folder'])NEWLINE self.save_folder = config['save_folder']NEWLINE self.evaluate_path = config["evaluate_path"]NEWLINE self.predict_path = config["predict_path"]NEWLINE self.use_cuda = cudaNEWLINE self.param_space = config['grid_search']NEWLINE NEWLINE def train(self):NEWLINE raise NotImplementedError()NEWLINE NEWLINE def test(self):NEWLINE raise NotImplementedError()NEWLINE NEWLINE def get_preds(self):NEWLINE raise NotImplementedError()NEWLINENEWLINE def save(self, file):NEWLINE torch.save(self.model.state_dict(), file)NEWLINENEWLINE def load(self, file):NEWLINE self.model.load_state_dict(torch.load(file))NEWLINENEWLINE def cuda(self, x):NEWLINE return x.cuda(self.use_cuda) if self.use_cuda != -1 else xNEWLINENEWLINE def grid_search(self):NEWLINE model_grid = self.param_space['model']NEWLINE for idx, model_params in enumerate(list(ParameterGrid(model_grid))[0]):NEWLINE if self.tensor_board:NEWLINE self.writer = SummaryWriter(f"{self.config['tensor_board_folder']}/model_params_{idx}")NEWLINE self.model = self.model_cls(**model_params)NEWLINE self.train()NEWLINE |
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE# File: raw.pyNEWLINE# Author: Yuxin Wu <[email protected]>NEWLINENEWLINEimport numpy as npNEWLINEimport copyNEWLINEimport sixNEWLINEfrom six.moves import rangeNEWLINEfrom .base import DataFlow, RNGDataFlowNEWLINENEWLINE__all__ = ['FakeData', 'DataFromQueue', 'DataFromList']NEWLINENEWLINENEWLINEclass FakeData(RNGDataFlow):NEWLINE """ Generate fake data of given shapes"""NEWLINENEWLINE def __init__(self, shapes, size=1000, random=True, dtype='float32', domain=(0, 1)):NEWLINE """NEWLINE Args:NEWLINE shapes (list): a list of lists/tuples. Shapes of each component.NEWLINE size (int): size of this DataFlow.NEWLINE random (bool): whether to randomly generate data every iteration.NEWLINE Note that merely generating the data could sometimes be time-consuming!NEWLINE dtype (str or list): data type as string, or a list of data types.NEWLINE domain (tuple or list): (min, max) tuple, or a list of such tuplesNEWLINE """NEWLINE super(FakeData, self).__init__()NEWLINE self.shapes = shapesNEWLINE self._size = int(size)NEWLINE self.random = randomNEWLINE self.dtype = [dtype] * len(shapes) if isinstance(dtype, six.string_types) else dtypeNEWLINE self.domain = [domain] * len(shapes) if isinstance(domain, tuple) else domainNEWLINE assert len(self.dtype) == len(self.shapes)NEWLINE assert len(self.domain) == len(self.domain)NEWLINENEWLINE def size(self):NEWLINE return self._sizeNEWLINENEWLINE def get_data(self):NEWLINE if self.random:NEWLINE for _ in range(self._size):NEWLINE val = []NEWLINE for k in range(len(self.shapes)):NEWLINE v = self.rng.rand(*self.shapes[k]) * (self.domain[k][1] - self.domain[k][0]) + self.domain[k][0]NEWLINE val.append(v.astype(self.dtype[k]))NEWLINE yield valNEWLINE else:NEWLINE val = []NEWLINE for k in range(len(self.shapes)):NEWLINE v = self.rng.rand(*self.shapes[k]) * (self.domain[k][1] - self.domain[k][0]) + self.domain[k][0]NEWLINE val.append(v.astype(self.dtype[k]))NEWLINE for _ in range(self._size):NEWLINE yield copy.copy(val)NEWLINENEWLINENEWLINEclass DataFromQueue(DataFlow):NEWLINE """ Produce data from a queue """NEWLINE def __init__(self, queue):NEWLINE """NEWLINE Args:NEWLINE queue (queue): a queue with ``get()`` method.NEWLINE """NEWLINE self.queue = queueNEWLINENEWLINE def get_data(self):NEWLINE while True:NEWLINE yield self.queue.get()NEWLINENEWLINENEWLINEclass DataFromList(RNGDataFlow):NEWLINE """ Produce data from a list"""NEWLINENEWLINE def __init__(self, lst, shuffle=True):NEWLINE """NEWLINE Args:NEWLINE lst (list): input list.NEWLINE shuffle (bool): shuffle data.NEWLINE """NEWLINE super(DataFromList, self).__init__()NEWLINE self.lst = lstNEWLINE self.shuffle = shuffleNEWLINENEWLINE def size(self):NEWLINE return len(self.lst)NEWLINENEWLINE def get_data(self):NEWLINE if not self.shuffle:NEWLINE for k in self.lst:NEWLINE yield kNEWLINE else:NEWLINE idxs = np.arange(len(self.lst))NEWLINE self.rng.shuffle(idxs)NEWLINE for k in idxs:NEWLINE yield self.lst[k]NEWLINENEWLINENEWLINEclass DataFromGenerator(DataFlow):NEWLINE """NEWLINE Wrap a generator to a DataFlowNEWLINE """NEWLINE def __init__(self, gen, size=None):NEWLINE self._gen = genNEWLINE self._size = sizeNEWLINENEWLINE def size(self):NEWLINE if self._size:NEWLINE return self._sizeNEWLINE return super(DataFromGenerator, self).size()NEWLINENEWLINE def get_data(self):NEWLINE # yield fromNEWLINE for dp in self._gen:NEWLINE yield dpNEWLINE |
import spacyNEWLINEfrom spacy import symbolsNEWLINENEWLINEclass SentenceAnalyzer:NEWLINE '''NEWLINE Class for analyzing sentence structure and extracting key informationNEWLINE '''NEWLINENEWLINE def __init__(self):NEWLINE self.nlp = spacy.load('en_core_web_sm')NEWLINE NEWLINE #TODO method for checking the form of a sentenceNEWLINE #TODO return all strings in lower caseNEWLINENEWLINE def subject_object_analysis(self, clause):NEWLINE '''NEWLINE Processes a clause and locates the subject, object, and verbNEWLINE '''NEWLINE # check to make sure we have only one clauseNEWLINE if self.is_compound(clause):NEWLINE raise Exception('''Sentence has multiple clauses;NEWLINE compound sentences must be split before processing''')NEWLINENEWLINE #TODO use chunking or make into arraysNEWLINE subj = NoneNEWLINE obj = NoneNEWLINE verb = NoneNEWLINENEWLINE out = self.nlp(clause)NEWLINENEWLINE for word in out:NEWLINE if word.dep == symbols.nsubj:NEWLINE subj = wordNEWLINE elif word.dep == symbols.dobj or word.dep == symbols.pobj:NEWLINE obj = wordNEWLINENEWLINE parent_1 = subjNEWLINE parent_2 = objNEWLINE NEWLINE while not verb:NEWLINE try:NEWLINE parent_1 = parent_1.headNEWLINE parent_2 = parent_2.headNEWLINE except AttributeError:NEWLINE print("Incorrect sentence structure...")NEWLINE breakNEWLINENEWLINE if parent_1 == parent_2:NEWLINE verb = parent_1NEWLINENEWLINE return (str(subj), str(obj), str(verb))NEWLINENEWLINE def parse_named_entities(self, sentence):NEWLINE '''NEWLINE Searches the sentence for proper nounsNEWLINE '''NEWLINE out = self.nlp(sentence)NEWLINE entities = [(ent.text, ent.label_) for ent in out.ents]NEWLINENEWLINE return entitiesNEWLINENEWLINE def is_imperative(self, sentence):NEWLINE '''NEWLINE Check if a given sentence is imperativeNEWLINE '''NEWLINE out = self.nlp(sentence)NEWLINENEWLINE # check if the sentence begins with a present tense verbNEWLINE if out[0].tag_ != 'VB':NEWLINE return FalseNEWLINENEWLINE # check if there is a subjectNEWLINE for word in out:NEWLINE if word.dep == symbols.nsubj:NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINE def is_compound(self, sentence):NEWLINE '''NEWLINE Determine if sentence is compound or notNEWLINE '''NEWLINE out = self.nlp(sentence)NEWLINENEWLINE # generators have no inherent method to find lengthNEWLINE num_sentences = sum(1 for sent in out.sents)NEWLINE NEWLINE return num_sentences > 1NEWLINENEWLINE def split_compound_sentence(self, sentence):NEWLINE '''NEWLINE Separate a compound sentence into its clausesNEWLINE '''NEWLINE #TODO: make this workNEWLINE out = self.nlp(sentence)NEWLINENEWLINE return [str(sent) for sent in out.sents]NEWLINE |
# coding=utf-8NEWLINEfrom __future__ import unicode_literals, absolute_importNEWLINENEWLINEfrom django.core import urlresolversNEWLINEfrom django.db import modelsNEWLINEfrom django.utils.translation import ugettext_lazy as _NEWLINENEWLINEfrom filer.models import FileNEWLINENEWLINENEWLINEclass OrderedFile(File):NEWLINE order = models.PositiveIntegerField(_('order'))NEWLINENEWLINE class Meta(File.Meta):NEWLINE ordering = ['order']NEWLINENEWLINE def save(self, *args, **kwargs):NEWLINE """NEWLINE Overrides `filer.File` `save` method to set `order` field valueNEWLINE to newly uploaded files.NEWLINE """NEWLINE if not self.pk:NEWLINE max_order = OrderedFile.objects.\NEWLINE filter(folder_id=self.folder_id).\NEWLINE aggregate(models.Max('order'))NEWLINE try:NEWLINE next_order = max_order['order__max'] + 1NEWLINE except TypeError:NEWLINE next_order = 1NEWLINE self.order = next_orderNEWLINE super(OrderedFile, self).save(*args, **kwargs)NEWLINENEWLINE def get_admin_change_url(self):NEWLINE """NEWLINE Gets change view url for original model.NEWLINENEWLINE We don't want to change default change view, so overriding method toNEWLINE pass original url to the template.NEWLINENEWLINE :return: <str> -- change view urlNEWLINE """NEWLINE model_name = File._meta.model_nameNEWLINE filer_app_name = File._meta.app_labelNEWLINE return urlresolvers.reverse(NEWLINE 'admin:{0}_{1}_change'.format(filer_app_name, model_name),NEWLINE args=(self.pk,))NEWLINENEWLINE def get_admin_delete_url(self):NEWLINE """NEWLINE Gets delete view url for original model.NEWLINENEWLINE We don't want to change default delete view, so overriding method toNEWLINE pass original url to the template.NEWLINENEWLINE :return: <str> -- delete view urlNEWLINE """NEWLINE model_name = File._meta.model_nameNEWLINE filer_app_name = File._meta.app_labelNEWLINE return urlresolvers.reverse(NEWLINE 'admin:{0}_{1}_delete'.format(filer_app_name, model_name),NEWLINE args=(self.pk,))NEWLINE |
# Copyright 2020 Google LLC. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""Tests for tfx.orchestration.portable.cache_utils."""NEWLINEimport osNEWLINEimport tensorflow as tfNEWLINENEWLINEfrom tfx.dsl.io import fileioNEWLINEfrom tfx.orchestration import metadataNEWLINEfrom tfx.orchestration.portable import cache_utilsNEWLINEfrom tfx.orchestration.portable import execution_publish_utilsNEWLINEfrom tfx.orchestration.portable.mlmd import context_libNEWLINEfrom tfx.proto.orchestration import pipeline_pb2NEWLINEfrom tfx.types import standard_artifactsNEWLINEfrom tfx.utils import test_case_utilsNEWLINEfrom google.protobuf import text_formatNEWLINEfrom ml_metadata.proto import metadata_store_pb2NEWLINENEWLINENEWLINEclass CacheUtilsTest(test_case_utils.TfxTest):NEWLINENEWLINE def setUp(self):NEWLINE super().setUp()NEWLINE self._connection_config = metadata_store_pb2.ConnectionConfig()NEWLINE self._connection_config.sqlite.SetInParent()NEWLINE self._module_file_path = os.path.join(self.tmp_dir, 'module_file')NEWLINE self._input_artifacts = {'input_examples': [standard_artifacts.Examples()]}NEWLINE self._output_artifacts = {'output_models': [standard_artifacts.Model()]}NEWLINE self._parameters = {'module_file': self._module_file_path}NEWLINE self._module_file_content = 'module content'NEWLINE self._pipeline_node = text_format.Parse(NEWLINE """NEWLINE executor {NEWLINE python_class_executor_spec {class_path: 'a.b.c'}NEWLINE }NEWLINE """, pipeline_pb2.PipelineNode())NEWLINE self._executor_class_path = 'a.b.c'NEWLINE self._pipeline_info = pipeline_pb2.PipelineInfo(id='pipeline_id')NEWLINENEWLINE def _get_cache_context(self,NEWLINE metadata_handler,NEWLINE custom_pipeline_node=None,NEWLINE custom_pipeline_info=None,NEWLINE custom_input_artifacts=None,NEWLINE custom_output_artifacts=None,NEWLINE custom_parameters=None,NEWLINE custom_module_content=None):NEWLINE with fileio.open(self._module_file_path, 'w+') as f:NEWLINE f.write(custom_module_content or self._module_file_content)NEWLINE return cache_utils.get_cache_context(NEWLINE metadata_handler,NEWLINE custom_pipeline_node or self._pipeline_node,NEWLINE custom_pipeline_info or self._pipeline_info,NEWLINE input_artifacts=(custom_input_artifacts or self._input_artifacts),NEWLINE output_artifacts=(custom_output_artifacts or self._output_artifacts),NEWLINE parameters=(custom_parameters or self._parameters))NEWLINENEWLINE def testGetCacheContext(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE cache_context = self._get_cache_context(m)NEWLINE [context_from_mlmd] = m.store.get_contexts()NEWLINE self.assertProtoPartiallyEquals(NEWLINE cache_context,NEWLINE context_from_mlmd,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINENEWLINE def testGetCacheContextTwiceSameArgs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m)NEWLINE # Same args should not create a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 1)NEWLINENEWLINE def testGetCacheContextTwiceDifferentOutputUri(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE output_model_different_uri = standard_artifacts.Model()NEWLINE output_model_different_uri.uri = 'diff_uri'NEWLINE self._get_cache_context(NEWLINE m,NEWLINE custom_output_artifacts={NEWLINE 'output_models': [output_model_different_uri]NEWLINE })NEWLINE # Only different output uri should not create a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 1)NEWLINENEWLINE def testGetCacheContextTwiceDifferentOutputs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_output_artifacts={'k': [standard_artifacts.Model()]})NEWLINE # Different output skeleton will result in a new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentInputs(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_input_artifacts={'k': [standard_artifacts.Examples(),]})NEWLINE # Different input artifacts will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentParameters(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m, custom_parameters={'new_prop': 'value'})NEWLINE # Different parameters will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentModuleContent(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(m, custom_module_content='new module content')NEWLINE # Different module file content will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentPipelineInfo(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m, custom_pipeline_info=pipeline_pb2.PipelineInfo(id='new_id'))NEWLINE # Different pipeline info will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCacheContextTwiceDifferentExecutorSpec(self):NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE self._get_cache_context(m)NEWLINE self._get_cache_context(NEWLINE m,NEWLINE custom_pipeline_node=text_format.Parse(NEWLINE """NEWLINE executor {NEWLINE python_class_executor_spec {class_path: 'n.e.w'}NEWLINE }NEWLINE """, pipeline_pb2.PipelineNode()))NEWLINE # Different executor spec will result in new cache context.NEWLINE self.assertLen(m.store.get_contexts(), 2)NEWLINENEWLINE def testGetCachedOutputArtifacts(self):NEWLINE # Output artifacts that will be used by the first execution with the sameNEWLINE # cache key.NEWLINE output_model_one = standard_artifacts.Model()NEWLINE output_model_one.uri = 'model_one'NEWLINE output_model_two = standard_artifacts.Model()NEWLINE output_model_two.uri = 'model_two'NEWLINE output_example_one = standard_artifacts.Examples()NEWLINE output_example_one.uri = 'example_one'NEWLINE # Output artifacts that will be used by the second execution with the sameNEWLINE # cache key.NEWLINE output_model_three = standard_artifacts.Model()NEWLINE output_model_three.uri = 'model_three'NEWLINE output_model_four = standard_artifacts.Model()NEWLINE output_model_four.uri = 'model_four'NEWLINE output_example_two = standard_artifacts.Examples()NEWLINE output_example_two.uri = 'example_two'NEWLINE output_models_key = 'output_models'NEWLINE output_examples_key = 'output_examples'NEWLINE with metadata.Metadata(connection_config=self._connection_config) as m:NEWLINE cache_context = context_lib.register_context_if_not_exists(NEWLINE m, context_lib.CONTEXT_TYPE_EXECUTION_CACHE, 'cache_key')NEWLINE execution_one = execution_publish_utils.register_execution(NEWLINE m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])NEWLINE execution_publish_utils.publish_succeeded_execution(NEWLINE m,NEWLINE execution_one.id, [cache_context],NEWLINE output_artifacts={NEWLINE output_models_key: [output_model_one, output_model_two],NEWLINE output_examples_key: [output_example_one]NEWLINE })NEWLINE execution_two = execution_publish_utils.register_execution(NEWLINE m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])NEWLINE output_artifacts = execution_publish_utils.publish_succeeded_execution(NEWLINE m,NEWLINE execution_two.id, [cache_context],NEWLINE output_artifacts={NEWLINE output_models_key: [output_model_three, output_model_four],NEWLINE output_examples_key: [output_example_two]NEWLINE })NEWLINE # The cached output got should be the artifacts produced by the mostNEWLINE # recent execution under the given cache context.NEWLINE cached_output = cache_utils.get_cached_outputs(m, cache_context)NEWLINE self.assertLen(cached_output, 2)NEWLINE self.assertLen(cached_output[output_models_key], 2)NEWLINE self.assertLen(cached_output[output_examples_key], 1)NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_models_key][0].mlmd_artifact,NEWLINE output_artifacts[output_models_key][0].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_models_key][1].mlmd_artifact,NEWLINE output_artifacts[output_models_key][1].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINE self.assertProtoPartiallyEquals(NEWLINE cached_output[output_examples_key][0].mlmd_artifact,NEWLINE output_artifacts[output_examples_key][0].mlmd_artifact,NEWLINE ignored_fields=[NEWLINE 'create_time_since_epoch', 'last_update_time_since_epoch'NEWLINE ])NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE tf.test.main()NEWLINE |
# Copied from botocore, licensed under the Apache licence 2.0.NEWLINE# Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.NEWLINENEWLINEimport reNEWLINENEWLINE_xform_cache = {NEWLINE ('CreateCachediSCSIVolume', '_'): 'create_cached_iscsi_volume',NEWLINE ('CreateCachediSCSIVolume', '-'): 'create-cached-iscsi-volume',NEWLINE ('DescribeCachediSCSIVolumes', '_'): 'describe_cached_iscsi_volumes',NEWLINE ('DescribeCachediSCSIVolumes', '-'): 'describe-cached-iscsi-volumes',NEWLINE ('DescribeStorediSCSIVolumes', '_'): 'describe_stored_iscsi_volumes',NEWLINE ('DescribeStorediSCSIVolumes', '-'): 'describe-stored-iscsi-volumes',NEWLINE ('CreateStorediSCSIVolume', '_'): 'create_stored_iscsi_volume',NEWLINE ('CreateStorediSCSIVolume', '-'): 'create-stored-iscsi-volume',NEWLINE ('ListHITsForQualificationType', '_'): 'list_hits_for_qualification_type',NEWLINE ('ListHITsForQualificationType', '-'): 'list-hits-for-qualification-type',NEWLINE}NEWLINENEWLINE_partial_renames = {'ipv-6': 'ipv6', 'ipv_6': 'ipv6', 's_3_resources': 's3_resources', 's-3-resources': 's3-resources'}NEWLINENEWLINE_special_case_transform = re.compile('[A-Z]{3,}s$')NEWLINE_first_cap_regex = re.compile('(.)([A-Z][a-z]+)')NEWLINE_number_cap_regex = re.compile('([a-z])([0-9]+)')NEWLINE_end_cap_regex = re.compile('([a-z0-9])([A-Z])')NEWLINENEWLINENEWLINEdef xform_name(name, sep='_', _xform_cache=_xform_cache, partial_renames=_partial_renames):NEWLINE """Convert camel case to a "pythonic" name.NEWLINENEWLINE If the name contains the ``sep`` character, then it isNEWLINE returned unchanged.NEWLINENEWLINE """NEWLINE if sep in name:NEWLINE # If the sep is in the name, assume that it's alreadyNEWLINE # transformed and return the string unchanged.NEWLINE return nameNEWLINE key = (name, sep)NEWLINE if key not in _xform_cache:NEWLINE if _special_case_transform.search(name) is not None:NEWLINE is_special = _special_case_transform.search(name)NEWLINE matched = is_special.group()NEWLINE # Replace something like ARNs, ACLs with _arns, _acls.NEWLINE name = name[: -len(matched)] + sep + matched.lower()NEWLINE s1 = _first_cap_regex.sub(r'\1' + sep + r'\2', name)NEWLINE s2 = _number_cap_regex.sub(r'\1' + sep + r'\2', s1)NEWLINE transformed = _end_cap_regex.sub(r'\1' + sep + r'\2', s2).lower()NEWLINENEWLINE # Do partial renamesNEWLINE for old, new in partial_renames.items():NEWLINE if old in transformed:NEWLINE transformed = transformed.replace(old, new)NEWLINE _xform_cache[key] = transformedNEWLINE return _xform_cache[key]NEWLINE |
# Copyright 2017 Google LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# https://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport osNEWLINENEWLINEimport numpy as npNEWLINEimport tensorflow as tfNEWLINENEWLINEimport test_utilsNEWLINEimport camera_utilsNEWLINEimport rasterize_trianglesNEWLINENEWLINENEWLINEclass RenderTest(tf.test.TestCase):NEWLINENEWLINE def setUp(self):NEWLINE self.test_data_directory = 'test_data/'NEWLINENEWLINE tf.reset_default_graph()NEWLINE self.cube_vertex_positions = tf.constant(NEWLINE [[-1, -1, 1], [-1, -1, -1], [-1, 1, -1], [-1, 1, 1], [1, -1, 1],NEWLINE [1, -1, -1], [1, 1, -1], [1, 1, 1]],NEWLINE dtype=tf.float32)NEWLINE self.cube_triangles = tf.constant(NEWLINE [[0, 1, 2], [2, 3, 0], [3, 2, 6], [6, 7, 3], [7, 6, 5], [5, 4, 7],NEWLINE [4, 5, 1], [1, 0, 4], [5, 6, 2], [2, 1, 5], [7, 4, 0], [0, 3, 7]],NEWLINE dtype=tf.int32)NEWLINENEWLINE def testRendersSimpleTriangle(self):NEWLINE """Directly renders a rasterized triangle's barycentric coordinates.NEWLINENEWLINE Tests only the kernel (rasterize_triangles_module).NEWLINE """NEWLINE ndc_init = np.array(NEWLINE [[-0.5, -0.5, 0.8], [0.0, 0.5, 0.3], [0.5, -0.5, 0.3]],NEWLINE dtype=np.float32)NEWLINENEWLINE image_height = 480NEWLINE image_width = 640NEWLINENEWLINE normalized_device_coordinates = tf.constant(ndc_init)NEWLINE triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)NEWLINENEWLINE rendered_coordinates, _, _ = (NEWLINE rasterize_triangles.rasterize_triangles_module.rasterize_triangles(NEWLINE normalized_device_coordinates, triangles, image_width,NEWLINE image_height))NEWLINE rendered_coordinates = tf.concat(NEWLINE [rendered_coordinates,NEWLINE tf.ones([image_height, image_width, 1])], axis=2)NEWLINE with self.test_session() as sess:NEWLINE image = rendered_coordinates.eval()NEWLINE target_image_name = 'Simple_Triangle.png'NEWLINE baseline_image_path = os.path.join(self.test_data_directory,NEWLINE target_image_name)NEWLINE test_utils.expect_image_file_and_render_are_near(NEWLINE self, sess, baseline_image_path, image)NEWLINENEWLINE def testRendersSimpleCube(self):NEWLINE """Renders a simple cube to test the kernel and python wrapper."""NEWLINENEWLINE tf_float = lambda x: tf.constant(x, dtype=tf.float32)NEWLINE # camera position:NEWLINE eye = tf_float([[2.0, 3.0, 6.0]])NEWLINE center = tf_float([[0.0, 0.0, 0.0]])NEWLINE world_up = tf_float([[0.0, 1.0, 0.0]])NEWLINE image_width = 640NEWLINE image_height = 480NEWLINENEWLINE look_at = camera_utils.look_at(eye, center, world_up)NEWLINE perspective = camera_utils.perspective(image_width / image_height,NEWLINE tf_float([40.0]), tf_float([0.01]),NEWLINE tf_float([10.0]))NEWLINENEWLINE vertex_rgb = (self.cube_vertex_positions * 0.5 + 0.5)NEWLINE vertex_rgba = tf.concat([vertex_rgb, tf.ones([8, 1])], axis=1)NEWLINENEWLINE projection = tf.matmul(perspective, look_at)NEWLINE background_value = [0.0, 0.0, 0.0, 0.0]NEWLINENEWLINE rendered = rasterize_triangles.rasterize_triangles(NEWLINE tf.expand_dims(self.cube_vertex_positions, axis=0),NEWLINE tf.expand_dims(vertex_rgba, axis=0), self.cube_triangles, projection,NEWLINE image_width, image_height, background_value)NEWLINENEWLINE with self.test_session() as sess:NEWLINE image = sess.run(rendered, feed_dict={})[0,...]NEWLINE target_image_name = 'Unlit_Cube_0.png'NEWLINE baseline_image_path = os.path.join(self.test_data_directory,NEWLINE target_image_name)NEWLINE test_utils.expect_image_file_and_render_are_near(NEWLINE self, sess, baseline_image_path, image)NEWLINENEWLINE def testSimpleTriangleGradientComputation(self):NEWLINE """Verifies the Jacobian matrix for a single pixel.NEWLINENEWLINE The pixel is in the center of a triangle facing the camera. This makes itNEWLINE easy to check which entries of the Jacobian might not make sense withoutNEWLINE worrying about corner cases.NEWLINE """NEWLINE image_height = 480NEWLINE image_width = 640NEWLINE test_pixel_x = 325NEWLINE test_pixel_y = 245NEWLINENEWLINE normalized_device_coordinates = tf.placeholder(tf.float32, shape=[3, 3])NEWLINENEWLINE triangles = tf.constant([[0, 1, 2]], dtype=tf.int32)NEWLINENEWLINE barycentric_coordinates, _, _ = (NEWLINE rasterize_triangles.rasterize_triangles_module.rasterize_triangles(NEWLINE normalized_device_coordinates, triangles, image_width,NEWLINE image_height))NEWLINENEWLINE pixels_to_compare = barycentric_coordinates[NEWLINE test_pixel_y:test_pixel_y + 1, test_pixel_x:test_pixel_x + 1, :]NEWLINENEWLINE with self.test_session():NEWLINE ndc_init = np.array(NEWLINE [[-0.5, -0.5, 0.8], [0.0, 0.5, 0.3], [0.5, -0.5, 0.3]],NEWLINE dtype=np.float32)NEWLINE theoretical, numerical = tf.test.compute_gradient(NEWLINE normalized_device_coordinates, (3, 3),NEWLINE pixels_to_compare, (1, 1, 3),NEWLINE x_init_value=ndc_init,NEWLINE delta=4e-2)NEWLINE jacobians_match, message = (NEWLINE test_utils.check_jacobians_are_nearly_equal(NEWLINE theoretical, numerical, 0.01, 0.0, True))NEWLINE self.assertTrue(jacobians_match, message)NEWLINENEWLINE def testInternalRenderGradientComputation(self):NEWLINE """Isolates and verifies the Jacobian matrix for the custom kernel."""NEWLINE image_height = 21NEWLINE image_width = 28NEWLINENEWLINE normalized_device_coordinates = tf.placeholder(tf.float32, shape=[8, 3])NEWLINENEWLINE barycentric_coordinates, _, _ = (NEWLINE rasterize_triangles.rasterize_triangles_module.rasterize_triangles(NEWLINE normalized_device_coordinates, self.cube_triangles, image_width,NEWLINE image_height))NEWLINENEWLINE with self.test_session():NEWLINE # Precomputed transformation of the simple cube to normalized deviceNEWLINE # coordinates, in order to isolate the rasterization gradient.NEWLINE # pyformat: disableNEWLINE ndc_init = np.array(NEWLINE [[-0.43889722, -0.53184521, 0.85293502],NEWLINE [-0.37635487, 0.22206162, 0.90555805],NEWLINE [-0.22849123, 0.76811147, 0.80993629],NEWLINE [-0.2805393, -0.14092168, 0.71602166],NEWLINE [0.18631913, -0.62634289, 0.88603103],NEWLINE [0.16183566, 0.08129397, 0.93020856],NEWLINE [0.44147962, 0.53497446, 0.85076219],NEWLINE [0.53008741, -0.31276882, 0.77620775]],NEWLINE dtype=np.float32)NEWLINE # pyformat: enableNEWLINE theoretical, numerical = tf.test.compute_gradient(NEWLINE normalized_device_coordinates, (8, 3),NEWLINE barycentric_coordinates, (image_height, image_width, 3),NEWLINE x_init_value=ndc_init,NEWLINE delta=4e-2)NEWLINE jacobians_match, message = (NEWLINE test_utils.check_jacobians_are_nearly_equal(NEWLINE theoretical, numerical, 0.01, 0.01))NEWLINE self.assertTrue(jacobians_match, message)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE tf.test.main()NEWLINE |
import sqlite3NEWLINEimport abcNEWLINEfrom .base import DatabaseNEWLINENEWLINEfrom logzero import loggerNEWLINENEWLINEclass SqliteDatabase(Database):NEWLINENEWLINE def initialise(self, settings):NEWLINE logger.info("sqlite_database: initialise()")NEWLINE con = NoneNEWLINENEWLINE create = FalseNEWLINENEWLINE self.db_name = settings["db_name"]NEWLINENEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE cur = con.cursor()NEWLINE cur.execute("SELECT * FROM active")NEWLINE _ = cur.fetchone()NEWLINE except sqlite3.Error:NEWLINE # no active tableNEWLINE create = TrueNEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE if create:NEWLINE self.create_schema()NEWLINE else:NEWLINE logger.info("sqlite_database: schema ready")NEWLINENEWLINENEWLINE def create_schema(self):NEWLINE logger.debug("sqlite_database: create_schema()")NEWLINE con = NoneNEWLINENEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE cur = con.cursor()NEWLINE cur.execute("CREATE TABLE active (environment_group TEXT, environment TEXT, endpoint_group TEXT, endpoint TEXT, timestamp INTEGER, message TEXT, url TEXT)")NEWLINE con.commit()NEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during create_schema() - %s" % str(e))NEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE def get_active(self, incident):NEWLINE logger.debug("sqlite_database: get_active()")NEWLINE con = NoneNEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE con.row_factory = sqlite3.RowNEWLINE cur = con.cursor()NEWLINE cur.execute('''SELECT * FROM active WHERE environment_group = ? AND environment = ? AND endpoint_group = ? AND endpoint = ?''',NEWLINE (incident.endpoint.environment_group, incident.endpoint.environment, incident.endpoint.endpoint_group, incident.endpoint.endpoint))NEWLINE data = cur.fetchone()NEWLINE return dataNEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during get_active() - %s" % str(e))NEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE def get_all_actives(self):NEWLINE logger.debug("sqlite_database: get_all_actives()")NEWLINE con = NoneNEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE con.row_factory = sqlite3.RowNEWLINE cur = con.cursor()NEWLINE cur.execute("SELECT * FROM active")NEWLINE data = cur.fetchall()NEWLINE return dataNEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during get_all_actives() - %s" % str(e))NEWLINE return NoneNEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE def active_exists(self, incident):NEWLINE logger.debug("sqlite_database: active_exists()")NEWLINE con = NoneNEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE cur = con.cursor()NEWLINE cur.execute('''SELECT COUNT(*) FROM active WHERE environment_group = ? AND environment = ? AND endpoint_group = ? AND endpoint = ?''',NEWLINE (incident.endpoint.environment_group, incident.endpoint.environment, incident.endpoint.endpoint_group, incident.endpoint.endpoint))NEWLINE data = cur.fetchone()NEWLINE return int(data[0]) > 0NEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during active_exists() - %s" % str(e))NEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE def save_active(self, incident):NEWLINE logger.debug("sqlite_database: save_active()")NEWLINE con = NoneNEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE cur = con.cursor()NEWLINE cur.execute("INSERT INTO active VALUES (?,?,?,?,?,?,?)",NEWLINE (incident.endpoint.environment_group, incident.endpoint.environment, incident.endpoint.endpoint_group, incident.endpoint.endpoint, incident.timestamp, incident.message, incident.endpoint.url))NEWLINE con.commit()NEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during save_active() - %s" % str(e))NEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINENEWLINE def remove_active(self, incident):NEWLINE logger.debug("sqlite_database: remove_active()")NEWLINE con = NoneNEWLINE try:NEWLINE con = sqlite3.connect(self.db_name)NEWLINE cur = con.cursor()NEWLINE cur.execute("DELETE FROM active WHERE environment_group = ? AND environment = ? AND endpoint_group = ? AND endpoint = ?",NEWLINE (incident.endpoint.environment_group, incident.endpoint.environment, incident.endpoint.endpoint_group, incident.endpoint.endpoint))NEWLINE con.commit()NEWLINE except sqlite3.Error as e:NEWLINE logger.error("sqlite_database: problem during remove_active() - %s" % str(e))NEWLINE finally:NEWLINE if con:NEWLINE con.close()NEWLINE |
# Generated by Django 2.2.2 on 2019-06-10 15:58NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [("core", "0007_auto_20190603_1459")]NEWLINENEWLINE operations = [NEWLINE migrations.AddField(NEWLINE model_name="profile",NEWLINE name="created_at",NEWLINE field=models.DateTimeField(blank=True, null=True),NEWLINE )NEWLINE ]NEWLINE |
#! /usr/bin/python3NEWLINENEWLINE"""Create and parse 'send'-type messages."""NEWLINENEWLINEimport structNEWLINEimport jsonNEWLINEimport loggingNEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINEfrom ... import (config, exceptions, util, message_type)NEWLINENEWLINEFORMAT = '>QQ'NEWLINELENGTH = 8 + 8NEWLINEID = 0NEWLINENEWLINEdef unpack(db, message, block_index):NEWLINE # Only used for `unpack` API call at the moment.NEWLINE try:NEWLINE asset_id, quantity = struct.unpack(FORMAT, message)NEWLINE asset = util.get_asset_name(db, asset_id, block_index)NEWLINENEWLINE except struct.error:NEWLINE raise exceptions.UnpackError('could not unpack')NEWLINENEWLINE except AssetNameError:NEWLINE raise exceptions.UnpackError('asset id invalid')NEWLINENEWLINE unpacked = {NEWLINE 'asset': asset,NEWLINE 'quantity': quantityNEWLINE }NEWLINE return unpackedNEWLINENEWLINEdef validate (db, source, destination, asset, quantity, block_index):NEWLINE problems = []NEWLINENEWLINE if asset == config.BTC: problems.append('cannot send bitcoins') # Only for parsing.NEWLINENEWLINE if not isinstance(quantity, int):NEWLINE problems.append('quantity must be in satoshis')NEWLINE return problemsNEWLINENEWLINE if quantity < 0:NEWLINE problems.append('negative quantity')NEWLINENEWLINE # For SQLite3NEWLINE if quantity > config.MAX_INT:NEWLINE problems.append('integer overflow')NEWLINENEWLINE if util.enabled('send_destination_required'): # Protocol change.NEWLINE if not destination:NEWLINE problems.append('destination is required')NEWLINENEWLINE if util.enabled('options_require_memo'):NEWLINE # Check destination address optionsNEWLINENEWLINE cursor = db.cursor()NEWLINE results = cursor.execute('SELECT options FROM addresses WHERE address=?', (destination,))NEWLINE if results:NEWLINE result = results.fetchone()NEWLINE if result and util.active_options(result['options'], config.ADDRESS_OPTION_REQUIRE_MEMO):NEWLINE problems.append('destination requires memo')NEWLINE cursor.close()NEWLINENEWLINE return problemsNEWLINENEWLINEdef compose (db, source, destination, asset, quantity):NEWLINE cursor = db.cursor()NEWLINENEWLINE # Just send BTC?NEWLINE if asset == config.BTC:NEWLINE return (source, [(destination, quantity)], None)NEWLINENEWLINE # resolve subassetsNEWLINE asset = util.resolve_subasset_longname(db, asset)NEWLINENEWLINE #quantity must be in int satoshi (not float, string, etc)NEWLINE if not isinstance(quantity, int):NEWLINE raise exceptions.ComposeError('quantity must be an int (in satoshi)')NEWLINENEWLINE # Only for outgoing (incoming will overburn).NEWLINE balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, asset)))NEWLINE if not balances or balances[0]['quantity'] < quantity:NEWLINE raise exceptions.ComposeError('insufficient funds')NEWLINENEWLINE block_index = util.CURRENT_BLOCK_INDEXNEWLINENEWLINE problems = validate(db, source, destination, asset, quantity, block_index)NEWLINE if problems: raise exceptions.ComposeError(problems)NEWLINENEWLINE asset_id = util.get_asset_id(db, asset, block_index)NEWLINE data = message_type.pack(ID)NEWLINE data += struct.pack(FORMAT, asset_id, quantity)NEWLINENEWLINE cursor.close()NEWLINE return (source, [(destination, None)], data)NEWLINENEWLINEdef parse (db, tx, message):NEWLINE cursor = db.cursor()NEWLINENEWLINE # Unpack message.NEWLINE try:NEWLINE if len(message) != LENGTH:NEWLINE raise exceptions.UnpackErrorNEWLINE asset_id, quantity = struct.unpack(FORMAT, message)NEWLINE asset = util.get_asset_name(db, asset_id, tx['block_index'])NEWLINE status = 'valid'NEWLINE except (exceptions.UnpackError, exceptions.AssetNameError, struct.error) as e:NEWLINE asset, quantity = None, NoneNEWLINE status = 'invalid: could not unpack'NEWLINENEWLINE if status == 'valid':NEWLINE # OversendNEWLINE cursor.execute('''SELECT * FROM balances \NEWLINE WHERE (address = ? AND asset = ?)''', (tx['source'], asset))NEWLINE balances = cursor.fetchall()NEWLINE if not balances:NEWLINE status = 'invalid: insufficient funds'NEWLINE elif balances[0]['quantity'] < quantity:NEWLINE quantity = min(balances[0]['quantity'], quantity)NEWLINENEWLINE # For SQLite3NEWLINE if quantity:NEWLINE quantity = min(quantity, config.MAX_INT)NEWLINENEWLINE if status == 'valid':NEWLINE problems = validate(db, tx['source'], tx['destination'], asset, quantity, tx['block_index'])NEWLINE if problems: status = 'invalid: ' + '; '.join(problems)NEWLINENEWLINE if status == 'valid':NEWLINE util.debit(db, tx['source'], asset, quantity, action='send', event=tx['tx_hash'])NEWLINE util.credit(db, tx['destination'], asset, quantity, action='send', event=tx['tx_hash'])NEWLINENEWLINE # Add parsed transaction to message-type–specific table.NEWLINE bindings = {NEWLINE 'tx_index': tx['tx_index'],NEWLINE 'tx_hash': tx['tx_hash'],NEWLINE 'block_index': tx['block_index'],NEWLINE 'source': tx['source'],NEWLINE 'destination': tx['destination'],NEWLINE 'asset': asset,NEWLINE 'quantity': quantity,NEWLINE 'status': status,NEWLINE }NEWLINE if "integer overflow" not in status and "quantity must be in satoshis" not in status:NEWLINE sql = 'insert into sends (tx_index, tx_hash, block_index, source, destination, asset, quantity, status, memo) values(:tx_index, :tx_hash, :block_index, :source, :destination, :asset, :quantity, :status, NULL)'NEWLINE cursor.execute(sql, bindings)NEWLINE else:NEWLINE logger.warn("Not storing [send] tx [%s]: %s" % (tx['tx_hash'], status))NEWLINE logger.debug("Bindings: %s" % (json.dumps(bindings), ))NEWLINENEWLINENEWLINE cursor.close()NEWLINENEWLINE# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4NEWLINE |
import osNEWLINEimport sysNEWLINEimport timeNEWLINEimport torchNEWLINEimport utilsNEWLINEimport loggingNEWLINEimport argparseNEWLINEimport torch.nn as nnNEWLINEimport torch.utilsNEWLINENEWLINEfrom adaptive_augmentor import AdaAugNEWLINEfrom networks import get_modelNEWLINEfrom networks.projection import ProjectionNEWLINEfrom dataset import get_num_class, get_dataloaders, get_label_name, get_dataset_dimensionNEWLINEfrom config import get_warmup_configNEWLINEfrom warmup_scheduler import GradualWarmupSchedulerNEWLINENEWLINEparser = argparse.ArgumentParser("ada_aug")NEWLINEparser.add_argument('--dataroot', type=str, default='./', help='location of the data corpus')NEWLINEparser.add_argument('--dataset', type=str, default='cifar10', help='name of dataset')NEWLINEparser.add_argument('--train_portion', type=float, default=0.5, help='portion of training data')NEWLINEparser.add_argument('--batch_size', type=int, default=96, help='batch size')NEWLINEparser.add_argument('--num_workers', type=int, default=0, help="num_workers")NEWLINEparser.add_argument('--learning_rate', type=float, default=0.025, help='init learning rate')NEWLINEparser.add_argument('--learning_rate_min', type=float, default=0.0001, help='min learning rate')NEWLINEparser.add_argument('--momentum', type=float, default=0.9, help='momentum')NEWLINEparser.add_argument('--weight_decay', type=float, default=3e-4, help='weight decay')NEWLINEparser.add_argument('--grad_clip', type=float, default=5, help='gradient clipping')NEWLINEparser.add_argument('--use_cuda', type=bool, default=True, help="use cuda default True")NEWLINEparser.add_argument('--gpu', type=int, default=0, help='gpu device id')NEWLINEparser.add_argument('--use_parallel', action='store_true', default=False, help="use data parallel default False")NEWLINEparser.add_argument('--model_name', type=str, default='wresnet40_2', help="model name")NEWLINEparser.add_argument('--model_path', type=str, default='saved_models', help='path to save the model')NEWLINEparser.add_argument('--cutout', action='store_true', default=False, help='use cutout')NEWLINEparser.add_argument('--cutout_length', type=int, default=16, help='cutout length')NEWLINEparser.add_argument('--drop_path_prob', type=float, default=0.2, help='drop path probability')NEWLINEparser.add_argument('--epochs', type=int, default=600, help='number of training epochs')NEWLINEparser.add_argument('--report_freq', type=float, default=50, help='report frequency')NEWLINEparser.add_argument('--save', type=str, default='EXP', help='experiment name')NEWLINEparser.add_argument('--seed', type=int, default=0, help='seed')NEWLINEparser.add_argument('--search_dataset', type=str, default='./', help='search dataset name')NEWLINEparser.add_argument('--gf_model_name', type=str, default='./', help='gf_model name')NEWLINEparser.add_argument('--gf_model_path', type=str, default='./', help='gf_model path')NEWLINEparser.add_argument('--h_model_path', type=str, default='./', help='h_model path')NEWLINEparser.add_argument('--k_ops', type=int, default=1, help="number of augmentation applied during training")NEWLINEparser.add_argument('--delta', type=float, default=0.3, help="degree of perturbation in magnitude")NEWLINEparser.add_argument('--temperature', type=float, default=1.0, help="temperature")NEWLINEparser.add_argument('--n_proj_layer', type=int, default=0, help="number of additional hidden layer in augmentation policy projection")NEWLINEparser.add_argument('--n_proj_hidden', type=int, default=128, help="number of hidden units in augmentation policy projection layers")NEWLINEparser.add_argument('--restore_path', type=str, default='./', help='restore model path')NEWLINEparser.add_argument('--restore', action='store_true', default=False, help='restore model default False')NEWLINENEWLINEargs = parser.parse_args()NEWLINEdebug = True if args.save == "debug" else FalseNEWLINEargs.save = '{}-{}'.format(time.strftime("%Y%m%d-%H%M%S"), args.save)NEWLINEif debug:NEWLINE args.save = os.path.join('debug', args.save)NEWLINEelse:NEWLINE args.save = os.path.join('eval', args.dataset, args.save)NEWLINEutils.create_exp_dir(args.save)NEWLINElog_format = '%(asctime)s %(message)s'NEWLINElogging.basicConfig(stream=sys.stdout, level=logging.INFO,NEWLINE format=log_format, datefmt='%m/%d %I:%M:%S %p')NEWLINEfh = logging.FileHandler(os.path.join(args.save, 'log.txt'))NEWLINEfh.setFormatter(logging.Formatter(log_format))NEWLINElogging.getLogger().addHandler(fh)NEWLINENEWLINENEWLINEdef main():NEWLINE if not torch.cuda.is_available():NEWLINE logging.info('no gpu device available')NEWLINE sys.exit(1)NEWLINENEWLINE torch.cuda.set_device(args.gpu)NEWLINE utils.reproducibility(args.seed)NEWLINE logging.info('gpu device = %d' % args.gpu)NEWLINE logging.info("args = %s", args)NEWLINENEWLINE # dataset settingsNEWLINE n_class = get_num_class(args.dataset)NEWLINE class2label = get_label_name(args.dataset, args.dataroot)NEWLINE train_queue, valid_queue, _, test_queue = get_dataloaders(NEWLINE args.dataset, args.batch_size, args.num_workers,NEWLINE args.dataroot, args.cutout, args.cutout_length,NEWLINE split=args.train_portion, split_idx=0, target_lb=-1,NEWLINE search=True)NEWLINENEWLINE logging.info(f'Dataset: {args.dataset}')NEWLINE logging.info(f' |total: {len(train_queue.dataset)}')NEWLINE logging.info(f' |train: {len(train_queue)*args.batch_size}')NEWLINE logging.info(f' |valid: {len(valid_queue)*args.batch_size}')NEWLINENEWLINE # task model settingsNEWLINE task_model = get_model(model_name=args.model_name,NEWLINE num_class=n_class,NEWLINE use_cuda=True, data_parallel=False)NEWLINE logging.info("param size = %fMB", utils.count_parameters_in_MB(task_model))NEWLINENEWLINE # task optimization settingsNEWLINE optimizer = torch.optim.SGD(NEWLINE task_model.parameters(),NEWLINE args.learning_rate,NEWLINE momentum=args.momentum,NEWLINE weight_decay=args.weight_decay,NEWLINE nesterov=TrueNEWLINE )NEWLINENEWLINE scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(NEWLINE optimizer, float(args.epochs), eta_min=args.learning_rate_min)NEWLINENEWLINE m, e = get_warmup_config(args.dataset)NEWLINE scheduler = GradualWarmupScheduler(NEWLINE optimizer,NEWLINE multiplier=m,NEWLINE total_epoch=e,NEWLINE after_scheduler=scheduler)NEWLINE logging.info(f'Optimizer: SGD, scheduler: CosineAnnealing, warmup: {m}/{e}')NEWLINE criterion = nn.CrossEntropyLoss()NEWLINE criterion = criterion.cuda()NEWLINENEWLINE # restore settingNEWLINE if args.restore:NEWLINE trained_epoch = utils.restore_ckpt(task_model, optimizer, scheduler, args.restore_path, location=args.gpu) + 1NEWLINE n_epoch = args.epochs - trained_epochNEWLINE logging.info(f'Restoring model from {args.restore_path}, starting from epoch {trained_epoch}')NEWLINE else:NEWLINE trained_epoch = 0NEWLINE n_epoch = args.epochsNEWLINENEWLINE # load trained adaaug sub modelsNEWLINE search_n_class = get_num_class(args.search_dataset)NEWLINE gf_model = get_model(model_name=args.gf_model_name,NEWLINE num_class=search_n_class,NEWLINE use_cuda=True, data_parallel=False)NEWLINENEWLINE h_model = Projection(in_features=gf_model.fc.in_features,NEWLINE n_layers=args.n_proj_layer,NEWLINE n_hidden=args.n_proj_hidden).cuda()NEWLINENEWLINE utils.load_model(gf_model, f'{args.gf_model_path}/gf_weights.pt', location=args.gpu)NEWLINE utils.load_model(h_model, f'{args.h_model_path}/h_weights.pt', location=args.gpu)NEWLINENEWLINE for param in gf_model.parameters():NEWLINE param.requires_grad = FalseNEWLINENEWLINE for param in h_model.parameters():NEWLINE param.requires_grad = FalseNEWLINENEWLINE after_transforms = train_queue.dataset.after_transformsNEWLINE adaaug_config = {'sampling': 'prob',NEWLINE 'k_ops': args.k_ops,NEWLINE 'delta': args.delta,NEWLINE 'temp': args.temperature,NEWLINE 'search_d': get_dataset_dimension(args.search_dataset),NEWLINE 'target_d': get_dataset_dimension(args.dataset)}NEWLINENEWLINE adaaug = AdaAug(after_transforms=after_transforms,NEWLINE n_class=search_n_class,NEWLINE gf_model=gf_model,NEWLINE h_model=h_model,NEWLINE save_dir=args.save,NEWLINE config=adaaug_config)NEWLINENEWLINE # start trainingNEWLINE for i_epoch in range(n_epoch):NEWLINE epoch = trained_epoch + i_epochNEWLINE lr = scheduler.get_last_lr()[0]NEWLINE logging.info('epoch %d lr %e', epoch, lr)NEWLINENEWLINE train_acc, train_obj = train(NEWLINE train_queue, task_model, criterion, optimizer, epoch, args.grad_clip, adaaug)NEWLINE logging.info('train_acc %f', train_acc)NEWLINENEWLINE valid_acc, valid_obj, _, _ = infer(valid_queue, task_model, criterion)NEWLINE logging.info('valid_acc %f', valid_acc)NEWLINENEWLINE scheduler.step()NEWLINENEWLINE if epoch % args.report_freq == 0:NEWLINE test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion)NEWLINE logging.info('test_acc %f %f', test_acc, test_acc5)NEWLINENEWLINE utils.save_ckpt(task_model, optimizer, scheduler, epoch,NEWLINE os.path.join(args.save, 'weights.pt'))NEWLINENEWLINE adaaug.save_history(class2label)NEWLINE figure = adaaug.plot_history()NEWLINE test_acc, test_obj, test_acc5, _ = infer(test_queue, task_model, criterion)NEWLINENEWLINE logging.info('test_acc %f %f', test_acc, test_acc5)NEWLINE logging.info(f'save to {args.save}')NEWLINENEWLINENEWLINEdef train(train_queue, model, criterion, optimizer, epoch, grad_clip, adaaug):NEWLINE objs = utils.AvgrageMeter()NEWLINE top1 = utils.AvgrageMeter()NEWLINE top5 = utils.AvgrageMeter()NEWLINENEWLINE for step, (input, target) in enumerate(train_queue):NEWLINE target = target.cuda(non_blocking=True)NEWLINENEWLINE # get augmented training data from adaaugNEWLINE aug_images = adaaug(input, mode='exploit')NEWLINE model.train()NEWLINE optimizer.zero_grad()NEWLINE logits = model(aug_images)NEWLINE loss = criterion(logits, target)NEWLINE loss.backward()NEWLINE nn.utils.clip_grad_norm_(model.parameters(), grad_clip)NEWLINE optimizer.step()NEWLINENEWLINE prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))NEWLINE n = input.size(0)NEWLINE objs.update(loss.detach().item(), n)NEWLINE top1.update(prec1.detach().item(), n)NEWLINE top5.update(prec5.detach().item(), n)NEWLINENEWLINE global_step = step + epoch * len(train_queue)NEWLINE if global_step % args.report_freq == 0:NEWLINE logging.info('train %03d %e %f %f', global_step, objs.avg, top1.avg, top5.avg)NEWLINENEWLINE # log the policyNEWLINE if step == 0:NEWLINE adaaug.add_history(input, target)NEWLINENEWLINE return top1.avg, objs.avgNEWLINENEWLINENEWLINEdef infer(valid_queue, model, criterion):NEWLINE objs = utils.AvgrageMeter()NEWLINE top1 = utils.AvgrageMeter()NEWLINE top5 = utils.AvgrageMeter()NEWLINE model.eval()NEWLINE with torch.no_grad():NEWLINE for input, target in valid_queue:NEWLINE input = input.cuda()NEWLINE target = target.cuda(non_blocking=True)NEWLINENEWLINE logits = model(input)NEWLINE loss = criterion(logits, target)NEWLINENEWLINE prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))NEWLINE n = input.size(0)NEWLINE objs.update(loss.detach().item(), n)NEWLINE top1.update(prec1.detach().item(), n)NEWLINE top5.update(prec5.detach().item(), n)NEWLINENEWLINE return top1.avg, objs.avg, top5.avg, objs.avgNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
#!/usr/bin/env pythonNEWLINE#NEWLINE# VMAccess extensionNEWLINE#NEWLINE# Copyright 2014 Microsoft CorporationNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport osNEWLINEimport platformNEWLINEimport reNEWLINEimport shutilNEWLINEimport sysNEWLINEimport tempfileNEWLINEimport timeNEWLINEimport tracebackNEWLINENEWLINEimport Utils.HandlerUtil as UtilNEWLINEfrom waagentloader import load_waagentNEWLINENEWLINEwaagent = load_waagent()NEWLINENEWLINE# Define global variablesNEWLINEExtensionShortName = 'VMAccess'NEWLINEBeginCertificateTag = '-----BEGIN CERTIFICATE-----'NEWLINEEndCertificateTag = '-----END CERTIFICATE-----'NEWLINEOutputSplitter = ';'NEWLINESshdConfigPath = '/etc/ssh/sshd_config'NEWLINENEWLINEdef main():NEWLINE waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')NEWLINE waagent.Log("%s started to handle." % (ExtensionShortName))NEWLINE waagent.MyDistro = waagent.GetMyDistro()NEWLINENEWLINE try:NEWLINE for a in sys.argv[1:]:NEWLINE if re.match("^([-/]*)(disable)", a):NEWLINE disable()NEWLINE elif re.match("^([-/]*)(uninstall)", a):NEWLINE uninstall()NEWLINE elif re.match("^([-/]*)(install)", a):NEWLINE install()NEWLINE elif re.match("^([-/]*)(enable)", a):NEWLINE enable()NEWLINE elif re.match("^([-/]*)(update)", a):NEWLINE update()NEWLINE except Exception as e:NEWLINE err_msg = "Failed with error: {0}, {1}".format(e, traceback.format_exc())NEWLINE waagent.Error(err_msg)NEWLINENEWLINENEWLINEdef install():NEWLINE hutil = Util.HandlerUtility(waagent.Log, waagent.Error)NEWLINE hutil.do_parse_context('Install')NEWLINE hutil.do_exit(0, 'Install', 'success', '0', 'Install Succeeded')NEWLINENEWLINENEWLINEdef enable():NEWLINE hutil = Util.HandlerUtility(waagent.Log, waagent.Error)NEWLINE hutil.do_parse_context('Enable')NEWLINE try:NEWLINE _forcibly_reset_chap(hutil)NEWLINENEWLINE reset_ssh = NoneNEWLINE remove_user = NoneNEWLINE protect_settings = hutil.get_protected_settings()NEWLINE if protect_settings:NEWLINE reset_ssh = protect_settings.get('reset_ssh')NEWLINE remove_user = protect_settings.get('remove_user')NEWLINENEWLINE if remove_user and _is_sshd_config_modified(protect_settings):NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=False,NEWLINE message="(03002)Argument error, conflicting operations")NEWLINE raise Exception("Cannot reset sshd_config and remove a user in one operation.")NEWLINENEWLINE # check port each time the VM boots upNEWLINE if reset_ssh:NEWLINE _open_ssh_port()NEWLINE hutil.log("Succeeded in check and open ssh port.")NEWLINENEWLINE hutil.exit_if_enabled()NEWLINE if _is_sshd_config_modified(protect_settings):NEWLINE _backup_sshd_config(SshdConfigPath)NEWLINENEWLINE if reset_ssh:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="reset-ssh")NEWLINE _reset_sshd_config(SshdConfigPath)NEWLINE hutil.log("Succeeded in reset sshd_config.")NEWLINENEWLINE if remove_user:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="remove-user")NEWLINE _remove_user_account(remove_user, hutil)NEWLINENEWLINE _set_user_account_pub_key(protect_settings, hutil)NEWLINENEWLINE if _is_sshd_config_modified(protect_settings):NEWLINE waagent.MyDistro.restartSshService()NEWLINENEWLINE check_and_repair_disk(hutil)NEWLINE hutil.do_exit(0, 'Enable', 'success', '0', 'Enable succeeded.')NEWLINE except Exception as e:NEWLINE hutil.error(("Failed to enable the extension with error: {0}, "NEWLINE "stack trace: {1}").format(str(e), traceback.format_exc()))NEWLINE hutil.do_exit(1, 'Enable', 'error', '0', "Enable failed: {0}".format(str(e)))NEWLINENEWLINENEWLINEdef _forcibly_reset_chap(hutil):NEWLINE name = "ChallengeResponseAuthentication"NEWLINE config = waagent.GetFileContents(SshdConfigPath).split("\n")NEWLINE for i in range(0, len(config)):NEWLINE if config[i].startswith(name) and "no" in config[i].lower():NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication no")NEWLINE returnNEWLINENEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="sshd", isSuccess=True, message="ChallengeResponseAuthentication yes")NEWLINE _backup_sshd_config(SshdConfigPath)NEWLINE _set_sshd_config(config, name, "no")NEWLINE waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))NEWLINE waagent.MyDistro.restartSshService()NEWLINENEWLINENEWLINEdef _is_sshd_config_modified(protected_settings):NEWLINE result = protected_settings.get('reset_ssh') or protected_settings.get('password')NEWLINE return result is not NoneNEWLINENEWLINENEWLINEdef uninstall():NEWLINE hutil = Util.HandlerUtility(waagent.Log, waagent.Error)NEWLINE hutil.do_parse_context('Uninstall')NEWLINE hutil.do_exit(0, 'Uninstall', 'success', '0', 'Uninstall succeeded')NEWLINENEWLINENEWLINEdef disable():NEWLINE hutil = Util.HandlerUtility(waagent.Log, waagent.Error)NEWLINE hutil.do_parse_context('Disable')NEWLINE hutil.do_exit(0, 'Disable', 'success', '0', 'Disable Succeeded')NEWLINENEWLINENEWLINEdef update():NEWLINE hutil = Util.HandlerUtility(waagent.Log, waagent.Error)NEWLINE hutil.do_parse_context('Update')NEWLINE hutil.do_exit(0, 'Update', 'success', '0', 'Update Succeeded')NEWLINENEWLINENEWLINEdef _remove_user_account(user_name, hutil):NEWLINE hutil.log("Removing user account")NEWLINENEWLINE try:NEWLINE sudoers = _get_other_sudoers(user_name)NEWLINE waagent.MyDistro.DeleteAccount(user_name)NEWLINE _save_other_sudoers(sudoers)NEWLINE except Exception as e:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=False,NEWLINE message="(02102)Failed to remove user.")NEWLINE raise Exception("Failed to remove user {0}".format(e))NEWLINENEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=True,NEWLINE message="Successfully removed user")NEWLINENEWLINENEWLINEdef _set_user_account_pub_key(protect_settings, hutil):NEWLINE ovf_xml = waagent.GetFileContents('/var/lib/waagent/ovf-env.xml')NEWLINE ovf_env = waagent.OvfEnv().Parse(ovf_xml)NEWLINENEWLINE # user name must be provided if set ssh key or passwordNEWLINE if not protect_settings or 'username' not in protect_settings:NEWLINE returnNEWLINENEWLINE user_name = protect_settings['username']NEWLINE user_pass = protect_settings.get('password')NEWLINE cert_txt = protect_settings.get('ssh_key')NEWLINE expiration = protect_settings.get('expiration')NEWLINE no_convert = FalseNEWLINE if not user_pass and not cert_txt and not ovf_env.SshPublicKeys:NEWLINE raise Exception("No password or ssh_key is specified.")NEWLINENEWLINE if user_pass is not None and len(user_pass) == 0:NEWLINE user_pass = NoneNEWLINE hutil.log("empty passwords are not allowed, ignoring password reset")NEWLINENEWLINE # Reset user account and password, password could be emptyNEWLINE sudoers = _get_other_sudoers(user_name)NEWLINE error_string = waagent.MyDistro.CreateAccount(NEWLINE user_name, user_pass, expiration, None)NEWLINE _save_other_sudoers(sudoers)NEWLINENEWLINE if error_string is not None:NEWLINE err_msg = "Failed to create the account or set the password"NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=False,NEWLINE message="(02101)" + err_msg)NEWLINE raise Exception(err_msg + " with " + error_string)NEWLINE hutil.log("Succeeded in create the account or set the password.")NEWLINENEWLINE # Allow password authentication if user_pass is providedNEWLINE if user_pass is not None:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user-with-password")NEWLINE _allow_password_auth()NEWLINENEWLINE # Reset ssh key with the new public key passed in or reuse old public key.NEWLINE if cert_txt or len(ovf_env.SshPublicKeys) > 0:NEWLINE if cert_txt and cert_txt.strip().lower().startswith("ssh-rsa"):NEWLINE no_convert = TrueNEWLINE try:NEWLINE pub_path = os.path.join('/home/', user_name, '.ssh',NEWLINE 'authorized_keys')NEWLINE ovf_env.UserName = user_nameNEWLINE if no_convert:NEWLINE if cert_txt:NEWLINE pub_path = ovf_env.PrepareDir(pub_path)NEWLINE final_cert_txt = cert_txtNEWLINE if(not cert_txt.endswith("\n")):NEWLINE final_cert_txt = final_cert_txt+"\n"NEWLINE waagent.AppendFileContents(pub_path, final_cert_txt)NEWLINE waagent.MyDistro.setSelinuxContext(pub_path,NEWLINE 'unconfined_u:object_r:ssh_home_t:s0')NEWLINE waagent.ChangeOwner(pub_path, user_name)NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")NEWLINE hutil.log("Succeeded in resetting ssh_key.")NEWLINE else:NEWLINE err_msg = "Failed to reset ssh key because the cert content is empty."NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=False,NEWLINE message="(02100)"+err_msg)NEWLINE else:NEWLINE if cert_txt:NEWLINE _save_cert_str_as_file(cert_txt, 'temp.crt')NEWLINE else:NEWLINE for pkey in ovf_env.SshPublicKeys:NEWLINE if pkey[1]:NEWLINE shutil.copy(NEWLINE os.path.join(waagent.LibDir, pkey[0] + '.crt'),NEWLINE os.path.join(os.getcwd(), 'temp.crt'))NEWLINE breakNEWLINE pub_path = ovf_env.PrepareDir(pub_path)NEWLINE retcode = waagent.Run(waagent.Openssl + " x509 -in temp.crt -noout -pubkey > temp.pub")NEWLINE if retcode > 0:NEWLINE raise Exception("Failed to generate public key file.")NEWLINE waagent.MyDistro.sshDeployPublicKey('temp.pub', pub_path)NEWLINE waagent.MyDistro.setSelinuxContext(pub_path,NEWLINE 'unconfined_u:object_r:ssh_home_t:s0')NEWLINE waagent.ChangeOwner(pub_path, user_name)NEWLINE os.remove('temp.pub')NEWLINE os.remove('temp.crt')NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="create-user")NEWLINE hutil.log("Succeeded in resetting ssh_key.")NEWLINE except Exception as e:NEWLINE hutil.log(str(e))NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(),NEWLINE op=waagent.WALAEventOperation.Enable,NEWLINE isSuccess=False,NEWLINE message="(02100)Failed to reset ssh key.")NEWLINENEWLINENEWLINEdef _get_other_sudoers(user_name):NEWLINE sudoers_file = '/etc/sudoers.d/waagent'NEWLINE if not os.path.isfile(sudoers_file):NEWLINE return NoneNEWLINE sudoers = waagent.GetFileContents(sudoers_file).split("\n")NEWLINE pattern = '^{0}\s'.format(user_name)NEWLINE sudoers = list(filter(lambda x: re.match(pattern, x) is None, sudoers))NEWLINE return sudoersNEWLINENEWLINENEWLINEdef _save_other_sudoers(sudoers):NEWLINE sudoersFile = '/etc/sudoers.d/waagent'NEWLINE if sudoers is None:NEWLINE returnNEWLINE waagent.AppendFileContents(sudoersFile, "\n".join(sudoers))NEWLINE os.chmod("/etc/sudoers.d/waagent", 0o440)NEWLINENEWLINENEWLINEdef _allow_password_auth():NEWLINE config = waagent.GetFileContents(SshdConfigPath).split("\n")NEWLINE _set_sshd_config(config, "PasswordAuthentication", "yes")NEWLINE waagent.ReplaceFileContentsAtomic(SshdConfigPath, "\n".join(config))NEWLINENEWLINENEWLINEdef _set_sshd_config(config, name, val):NEWLINE notfound = TrueNEWLINE for i in range(0, len(config)):NEWLINE if config[i].startswith(name):NEWLINE config[i] = "{0} {1}".format(name, val)NEWLINE notfound = FalseNEWLINE elif config[i].startswith("Match"):NEWLINE # Match block must be put in the end of sshd configNEWLINE breakNEWLINE if notfound:NEWLINE config.insert(i, "{0} {1}".format(name, val))NEWLINE return configNEWLINENEWLINENEWLINEdef _reset_sshd_config(sshd_file_path):NEWLINE distro = platform.dist()NEWLINE distro_name = distro[0]NEWLINE version = distro[1]NEWLINE config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, version))NEWLINE if not(os.path.exists(config_file_path)):NEWLINE config_file_path = os.path.join(os.getcwd(), 'resources', '%s_%s' % (distro_name, 'default'))NEWLINE if not(os.path.exists(config_file_path)):NEWLINE config_file_path = os.path.join(os.getcwd(), 'resources', 'default')NEWLINENEWLINE if distro_name == "CoreOS":NEWLINE # Parse sshd port from config_file_pathNEWLINE sshd_port = 22NEWLINE regex = re.compile(r"^Port\s+(\d+)", re.VERBOSE)NEWLINE with open(config_file_path) as f:NEWLINE for line in f:NEWLINE match = regex.match(line)NEWLINE if match:NEWLINE sshd_port = match.group(1)NEWLINE breakNEWLINENEWLINE # Prepare cloud init config for coreos-cloudinitNEWLINE f = tempfile.NamedTemporaryFile(delete=False)NEWLINE f.close()NEWLINE cfg_tempfile = f.nameNEWLINE cfg_content = "#cloud-config\n\n"NEWLINENEWLINE # Overwrite /etc/ssh/sshd_configNEWLINE cfg_content += "write_files:\n"NEWLINE cfg_content += " - path: {0}\n".format(sshd_file_path)NEWLINE cfg_content += " permissions: 0600\n"NEWLINE cfg_content += " owner: root:root\n"NEWLINE cfg_content += " content: |\n"NEWLINE for line in waagent.GetFileContents(config_file_path).split('\n'):NEWLINE cfg_content += " {0}\n".format(line)NEWLINENEWLINE # Change the sshd port in /etc/systemd/system/sshd.socketNEWLINE cfg_content += "\ncoreos:\n"NEWLINE cfg_content += " units:\n"NEWLINE cfg_content += " - name: sshd.socket\n"NEWLINE cfg_content += " command: restart\n"NEWLINE cfg_content += " content: |\n"NEWLINE cfg_content += " [Socket]\n"NEWLINE cfg_content += " ListenStream={0}\n".format(sshd_port)NEWLINE cfg_content += " Accept=yes\n"NEWLINENEWLINE waagent.SetFileContents(cfg_tempfile, cfg_content)NEWLINENEWLINE waagent.Run("coreos-cloudinit -from-file " + cfg_tempfile, chk_err=False)NEWLINE os.remove(cfg_tempfile)NEWLINE else:NEWLINE shutil.copyfile(config_file_path, sshd_file_path)NEWLINE waagent.MyDistro.restartSshService()NEWLINENEWLINENEWLINEdef _backup_sshd_config(sshd_file_path):NEWLINE if os.path.exists(sshd_file_path):NEWLINE backup_file_name = '%s_%s' % (NEWLINE sshd_file_path, time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()))NEWLINE shutil.copyfile(sshd_file_path, backup_file_name)NEWLINENEWLINENEWLINEdef _save_cert_str_as_file(cert_txt, file_name):NEWLINE cert_start = cert_txt.find(BeginCertificateTag)NEWLINE if cert_start >= 0:NEWLINE cert_txt = cert_txt[cert_start + len(BeginCertificateTag):]NEWLINE cert_end = cert_txt.find(EndCertificateTag)NEWLINE if cert_end >= 0:NEWLINE cert_txt = cert_txt[:cert_end]NEWLINE cert_txt = cert_txt.strip()NEWLINE cert_txt = "{0}\n{1}\n{2}\n".format(BeginCertificateTag, cert_txt, EndCertificateTag)NEWLINE waagent.SetFileContents(file_name, cert_txt)NEWLINENEWLINENEWLINEdef _open_ssh_port():NEWLINE _del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j DROP')NEWLINE _del_rule_if_exists('INPUT -p tcp -m tcp --dport 22 -j REJECT')NEWLINE _del_rule_if_exists('INPUT -p -j DROP')NEWLINE _del_rule_if_exists('INPUT -p -j REJECT')NEWLINE _insert_rule_if_not_exists('INPUT -p tcp -m tcp --dport 22 -j ACCEPT')NEWLINENEWLINE _del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j DROP')NEWLINE _del_rule_if_exists('OUTPUT -p tcp -m tcp --sport 22 -j REJECT')NEWLINE _del_rule_if_exists('OUTPUT -p -j DROP')NEWLINE _del_rule_if_exists('OUTPUT -p -j REJECT')NEWLINE _insert_rule_if_not_exists('OUTPUT -p tcp -m tcp --dport 22 -j ACCEPT')NEWLINENEWLINENEWLINEdef _del_rule_if_exists(rule_string):NEWLINE cmd_result = waagent.RunGetOutput("iptables-save")NEWLINE while cmd_result[0] == 0 and (rule_string in cmd_result[1]):NEWLINE waagent.Run("iptables -D %s" % rule_string)NEWLINE cmd_result = waagent.RunGetOutput("iptables-save")NEWLINENEWLINENEWLINEdef _insert_rule_if_not_exists(rule_string):NEWLINE cmd_result = waagent.RunGetOutput("iptables-save")NEWLINE if cmd_result[0] == 0 and (rule_string not in cmd_result[1]):NEWLINE waagent.Run("iptables -I %s" % rule_string)NEWLINENEWLINENEWLINEdef check_and_repair_disk(hutil):NEWLINE public_settings = hutil.get_public_settings()NEWLINE if public_settings:NEWLINE check_disk = public_settings.get('check_disk')NEWLINE repair_disk = public_settings.get('repair_disk')NEWLINE disk_name = public_settings.get('disk_name')NEWLINENEWLINE if check_disk and repair_disk:NEWLINE err_msg = ("check_disk and repair_disk was both specified."NEWLINE "Only one of them can be specified")NEWLINE hutil.error(err_msg)NEWLINE hutil.do_exit(1, 'Enable', 'error', '0', 'Enable failed.')NEWLINENEWLINE if check_disk:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="check_disk")NEWLINE outretcode = _fsck_check(hutil)NEWLINE hutil.log("Successfully checked disk")NEWLINE return outretcodeNEWLINENEWLINE if repair_disk:NEWLINE waagent.AddExtensionEvent(name=hutil.get_name(), op="scenario", isSuccess=True, message="repair_disk")NEWLINE outdata = _fsck_repair(hutil, disk_name)NEWLINE hutil.log("Repaired and remounted disk")NEWLINE return outdataNEWLINENEWLINENEWLINEdef _fsck_check(hutil):NEWLINE try:NEWLINE retcode = waagent.Run("fsck -As -y")NEWLINE if retcode > 0:NEWLINE hutil.log(retcode)NEWLINE raise Exception("Disk check was not successful")NEWLINE else:NEWLINE return retcodeNEWLINE except Exception as e:NEWLINE hutil.error("Failed to run disk check with error: {0}, {1}".format(NEWLINE str(e), traceback.format_exc()))NEWLINE hutil.do_exit(1, 'Check', 'error', '0', 'Check failed.')NEWLINENEWLINENEWLINEdef _fsck_repair(hutil, disk_name):NEWLINE # first unmount disks and loop devices lazy + forcedNEWLINE try:NEWLINE cmd_result = waagent.Run("umount -f /%s" % disk_name)NEWLINE if cmd_result != 0:NEWLINE # Fail fastNEWLINE hutil.log("Failed to unmount disk: %s" % disk_name)NEWLINE # run repairNEWLINE retcode = waagent.Run("fsck -AR -y")NEWLINE hutil.log("Ran fsck with return code: %d" % retcode)NEWLINE if retcode == 0:NEWLINE retcode, output = waagent.RunGetOutput("mount")NEWLINE hutil.log(output)NEWLINE return outputNEWLINE else:NEWLINE raise Exception("Failed to mount disks")NEWLINE except Exception as e:NEWLINE hutil.error("{0}, {1}".format(str(e), traceback.format_exc()))NEWLINE hutil.do_exit(1, 'Repair','error','0', 'Repair failed.')NEWLINENEWLINEif __name__ == '__main__' :NEWLINE main()NEWLINE |
#!/usr/bin/python3NEWLINENEWLINE'''NEWLINEConvert Japanese datasets to Hepburn RomanizationNEWLINECopyright 2016 Xiang ZhangNEWLINENEWLINEUsage: python3 construct_hepburn.py -i [input] -o [output]NEWLINE'''NEWLINENEWLINE# Input fileNEWLINEINPUT = '../data/rakuten/sentiment/full_train.csv'NEWLINE# Output fileNEWLINEOUTPUT = '../data/rakuten/sentiment/full_train_hepburn.csv'NEWLINENEWLINEimport argparseNEWLINEimport csvNEWLINEimport MeCabNEWLINEimport romkanNEWLINEimport unidecodeNEWLINENEWLINE# Main programNEWLINEdef main():NEWLINE global INPUTNEWLINE global OUTPUTNEWLINENEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument('-i', '--input', help = 'Input file', default = INPUT)NEWLINE parser.add_argument(NEWLINE '-o', '--output', help = 'Output file', default = OUTPUT)NEWLINENEWLINE args = parser.parse_args()NEWLINENEWLINE INPUT = args.inputNEWLINE OUTPUT = args.outputNEWLINENEWLINE mecab = MeCab.Tagger()NEWLINENEWLINE convertRoman(mecab)NEWLINENEWLINEdef romanizeText(mecab, text):NEWLINE parsed = mecab.parse(text)NEWLINE result = list()NEWLINE for token in parsed.split('\n'):NEWLINE splitted = token.split('\t')NEWLINE if len(splitted) == 2:NEWLINE word = splitted[0]NEWLINE features = splitted[1].split(',')NEWLINE if len(features) > 7 and features[7] != '*':NEWLINE result.append(romkan.to_hepburn(features[7]))NEWLINE else:NEWLINE result.append(word)NEWLINE return resultNEWLINENEWLINE# Convert the text in Chinese to pintinNEWLINEdef convertRoman(mecab):NEWLINE # Open the filesNEWLINE ifd = open(INPUT, encoding = 'utf-8', newline = '')NEWLINE ofd = open(OUTPUT, 'w', encoding = 'utf-8', newline = '')NEWLINE reader = csv.reader(ifd, quoting = csv.QUOTE_ALL)NEWLINE writer = csv.writer(ofd, quoting = csv.QUOTE_ALL, lineterminator = '\n')NEWLINE # Loop over the csv rowsNEWLINE n = 0NEWLINE for row in reader:NEWLINE new_row = list()NEWLINE new_row.append(row[0])NEWLINE for i in range(1, len(row)):NEWLINE new_row.append(' '.join(map(NEWLINE str.strip,NEWLINE map(lambda s: s.replace('\n', '\\n'),NEWLINE map(unidecode.unidecode,NEWLINE romanizeText(mecab, row[i]))))))NEWLINE writer.writerow(new_row)NEWLINE n = n + 1NEWLINE if n % 1000 == 0:NEWLINE print('\rProcessing line: {}'.format(n), end = '')NEWLINE print('\rProcessed lines: {}'.format(n))NEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright 2020 Google LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# https://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Accesses the google.cloud.irm.v1alpha2 IncidentService API."""NEWLINENEWLINEimport functoolsNEWLINEimport pkg_resourcesNEWLINEimport warningsNEWLINENEWLINEfrom google.oauth2 import service_accountNEWLINEimport google.api_core.client_optionsNEWLINEimport google.api_core.gapic_v1.client_infoNEWLINEimport google.api_core.gapic_v1.configNEWLINEimport google.api_core.gapic_v1.methodNEWLINEimport google.api_core.gapic_v1.routing_headerNEWLINEimport google.api_core.grpc_helpersNEWLINEimport google.api_core.page_iteratorNEWLINEimport google.api_core.path_templateNEWLINEimport google.api_core.protobuf_helpersNEWLINEimport grpcNEWLINENEWLINEfrom google.cloud.irm_v1alpha2.gapic import enumsNEWLINEfrom google.cloud.irm_v1alpha2.gapic import incident_service_client_configNEWLINEfrom google.cloud.irm_v1alpha2.gapic.transports import incident_service_grpc_transportNEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_pb2NEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_service_pb2NEWLINEfrom google.cloud.irm_v1alpha2.proto import incidents_service_pb2_grpcNEWLINEfrom google.protobuf import empty_pb2NEWLINEfrom google.protobuf import field_mask_pb2NEWLINENEWLINENEWLINE_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-irm",).versionNEWLINENEWLINENEWLINEclass IncidentServiceClient(object):NEWLINE """The Incident API for Incident Response & Management."""NEWLINENEWLINE SERVICE_ADDRESS = "irm.googleapis.com:443"NEWLINE """The default address of the service."""NEWLINENEWLINE # The name of the interface for this client. This is the key used toNEWLINE # find the method configuration in the client_config dictionary.NEWLINE _INTERFACE_NAME = "google.cloud.irm.v1alpha2.IncidentService"NEWLINENEWLINE @classmethodNEWLINE def from_service_account_file(cls, filename, *args, **kwargs):NEWLINE """Creates an instance of this client using the provided credentialsNEWLINE file.NEWLINENEWLINE Args:NEWLINE filename (str): The path to the service account private key jsonNEWLINE file.NEWLINE args: Additional arguments to pass to the constructor.NEWLINE kwargs: Additional arguments to pass to the constructor.NEWLINENEWLINE Returns:NEWLINE IncidentServiceClient: The constructed client.NEWLINE """NEWLINE credentials = service_account.Credentials.from_service_account_file(filename)NEWLINE kwargs["credentials"] = credentialsNEWLINE return cls(*args, **kwargs)NEWLINENEWLINE from_service_account_json = from_service_account_fileNEWLINENEWLINE @classmethodNEWLINE def annotation_path(cls, project, incident, annotation):NEWLINE """Return a fully-qualified annotation string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/annotations/{annotation}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE annotation=annotation,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def artifact_path(cls, project, incident, artifact):NEWLINE """Return a fully-qualified artifact string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/artifacts/{artifact}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE artifact=artifact,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def incident_path(cls, project, incident):NEWLINE """Return a fully-qualified incident string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def incident_role_assignment_path(cls, project_id_or_number, incident_id, role_id):NEWLINE """Return a fully-qualified incident_role_assignment string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}/role_assignments/{role_id}",NEWLINE project_id_or_number=project_id_or_number,NEWLINE incident_id=incident_id,NEWLINE role_id=role_id,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def project_path(cls, project):NEWLINE """Return a fully-qualified project string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}", project=project,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def signal_path(cls, project, signal):NEWLINE """Return a fully-qualified signal string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/signals/{signal}", project=project, signal=signal,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def subscription_path(cls, project, incident, subscription):NEWLINE """Return a fully-qualified subscription string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/subscriptions/{subscription}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE subscription=subscription,NEWLINE )NEWLINENEWLINE @classmethodNEWLINE def tag_path(cls, project, incident, tag):NEWLINE """Return a fully-qualified tag string."""NEWLINE return google.api_core.path_template.expand(NEWLINE "projects/{project}/incidents/{incident}/tags/{tag}",NEWLINE project=project,NEWLINE incident=incident,NEWLINE tag=tag,NEWLINE )NEWLINENEWLINE def __init__(NEWLINE self,NEWLINE transport=None,NEWLINE channel=None,NEWLINE credentials=None,NEWLINE client_config=None,NEWLINE client_info=None,NEWLINE client_options=None,NEWLINE ):NEWLINE """Constructor.NEWLINENEWLINE Args:NEWLINE transport (Union[~.IncidentServiceGrpcTransport,NEWLINE Callable[[~.Credentials, type], ~.IncidentServiceGrpcTransport]): A transportNEWLINE instance, responsible for actually making the API calls.NEWLINE The default transport uses the gRPC protocol.NEWLINE This argument may also be a callable which returns aNEWLINE transport instance. Callables will be sent the credentialsNEWLINE as the first argument and the default transport class asNEWLINE the second argument.NEWLINE channel (grpc.Channel): DEPRECATED. A ``Channel`` instanceNEWLINE through which to make calls. This argument is mutually exclusiveNEWLINE with ``credentials``; providing both will raise an exception.NEWLINE credentials (google.auth.credentials.Credentials): TheNEWLINE authorization credentials to attach to requests. TheseNEWLINE credentials identify this application to the service. If noneNEWLINE are specified, the client will attempt to ascertain theNEWLINE credentials from the environment.NEWLINE This argument is mutually exclusive with providing aNEWLINE transport instance to ``transport``; doing so will raiseNEWLINE an exception.NEWLINE client_config (dict): DEPRECATED. A dictionary of call options forNEWLINE each method. If not specified, the default configuration is used.NEWLINE client_info (google.api_core.gapic_v1.client_info.ClientInfo):NEWLINE The client info used to send a user-agent string along withNEWLINE API requests. If ``None``, then default info will be used.NEWLINE Generally, you only need to set this if you're developingNEWLINE your own client library.NEWLINE client_options (Union[dict, google.api_core.client_options.ClientOptions]):NEWLINE Client options used to set user options on the client. API EndpointNEWLINE should be set through client_options.NEWLINE """NEWLINE # Raise deprecation warnings for things we want to go away.NEWLINE if client_config is not None:NEWLINE warnings.warn(NEWLINE "The `client_config` argument is deprecated.",NEWLINE PendingDeprecationWarning,NEWLINE stacklevel=2,NEWLINE )NEWLINE else:NEWLINE client_config = incident_service_client_config.configNEWLINENEWLINE if channel:NEWLINE warnings.warn(NEWLINE "The `channel` argument is deprecated; use " "`transport` instead.",NEWLINE PendingDeprecationWarning,NEWLINE stacklevel=2,NEWLINE )NEWLINENEWLINE api_endpoint = self.SERVICE_ADDRESSNEWLINE if client_options:NEWLINE if type(client_options) == dict:NEWLINE client_options = google.api_core.client_options.from_dict(NEWLINE client_optionsNEWLINE )NEWLINE if client_options.api_endpoint:NEWLINE api_endpoint = client_options.api_endpointNEWLINENEWLINE # Instantiate the transport.NEWLINE # The transport is responsible for handling serialization andNEWLINE # deserialization and actually sending data to the service.NEWLINE if transport:NEWLINE if callable(transport):NEWLINE self.transport = transport(NEWLINE credentials=credentials,NEWLINE default_class=incident_service_grpc_transport.IncidentServiceGrpcTransport,NEWLINE address=api_endpoint,NEWLINE )NEWLINE else:NEWLINE if credentials:NEWLINE raise ValueError(NEWLINE "Received both a transport instance and "NEWLINE "credentials; these are mutually exclusive."NEWLINE )NEWLINE self.transport = transportNEWLINE else:NEWLINE self.transport = incident_service_grpc_transport.IncidentServiceGrpcTransport(NEWLINE address=api_endpoint, channel=channel, credentials=credentials,NEWLINE )NEWLINENEWLINE if client_info is None:NEWLINE client_info = google.api_core.gapic_v1.client_info.ClientInfo(NEWLINE gapic_version=_GAPIC_LIBRARY_VERSION,NEWLINE )NEWLINE else:NEWLINE client_info.gapic_version = _GAPIC_LIBRARY_VERSIONNEWLINE self._client_info = client_infoNEWLINENEWLINE # Parse out the default settings for retry and timeout for each RPCNEWLINE # from the client configuration.NEWLINE # (Ordinarily, these are the defaults specified in the `*_config.py`NEWLINE # file next to this one.)NEWLINE self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(NEWLINE client_config["interfaces"][self._INTERFACE_NAME],NEWLINE )NEWLINENEWLINE # Save a dictionary of cached API call functions.NEWLINE # These are the actual callables which invoke the properNEWLINE # transport methods, wrapped with `wrap_method` to add retry,NEWLINE # timeout, and the like.NEWLINE self._inner_api_calls = {}NEWLINENEWLINE # Service callsNEWLINE def delete_artifact(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.artifact_path('[PROJECT]', '[INCIDENT]', '[ARTIFACT]')NEWLINE >>>NEWLINE >>> client.delete_artifact(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the artifact.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_artifact,NEWLINE default_retry=self._method_configs["DeleteArtifact"].retry,NEWLINE default_timeout=self._method_configs["DeleteArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteArtifactRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def request_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Starts a role handover. The proposed assignee will receive an emailNEWLINE notifying them of the assignment. This will fail if a role handover isNEWLINE already pending.NEWLINE Handover to an oncall ladder is not permitted. UseNEWLINE CreateIncidentRoleAssignment instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `name`:NEWLINE >>> name = ''NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.request_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "request_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "request_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.request_incident_role_handover,NEWLINE default_retry=self._method_configs["RequestIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "RequestIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.RequestIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["request_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def confirm_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Confirms a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request. If the caller is not the new_assignee,NEWLINE ForceIncidentRoleHandover should be used instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.confirm_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee, who will now be the assignee. This should be theNEWLINE current user; otherwise ForceRoleHandover should be called.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "confirm_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "confirm_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.confirm_incident_role_handover,NEWLINE default_retry=self._method_configs["ConfirmIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ConfirmIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ConfirmIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["confirm_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def force_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Forces a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request. If the caller is the new_assignee,NEWLINE ConfirmIncidentRoleHandover should be used instead.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.force_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. The proposed assignee, who will now be the assignee. This should not beNEWLINE the current user; otherwise ConfirmRoleHandover should be called.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "force_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "force_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.force_incident_role_handover,NEWLINE default_retry=self._method_configs["ForceIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ForceIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ForceIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["force_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_incident(NEWLINE self,NEWLINE incident,NEWLINE parent,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> response = client.create_incident(incident, parent)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project whichNEWLINE the incident belongs to. The name is of the formNEWLINE ``projects/{project_id_or_number}`` .NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_incident,NEWLINE default_retry=self._method_configs["CreateIncident"].retry,NEWLINE default_timeout=self._method_configs["CreateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateIncidentRequest(NEWLINE incident=incident, parent=parent,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def get_incident(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns an incident by name.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> response = client.get_incident(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "get_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "get_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.get_incident,NEWLINE default_retry=self._method_configs["GetIncident"].retry,NEWLINE default_timeout=self._method_configs["GetIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.GetIncidentRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["get_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_incidents(NEWLINE self,NEWLINE parent,NEWLINE query=None,NEWLINE page_size=None,NEWLINE time_zone=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of incidents.NEWLINE Incidents are ordered by start time, with the most recent incidents first.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_incidents(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_incidents(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE incidents belong to.NEWLINE query (str): An expression that defines which incidents to return.NEWLINENEWLINE Search atoms can be used to match certain specific fields. Otherwise,NEWLINE plain text will match text fields in the incident.NEWLINENEWLINE Search atoms:NEWLINENEWLINE - ``start`` - (timestamp) The time the incident started.NEWLINE - ``stage`` - The stage of the incident, one of detected, triaged,NEWLINE mitigated, resolved, documented, or duplicate (which correspond toNEWLINE values in the Incident.Stage enum). These are ordered, soNEWLINE ``stage<resolved`` is equivalent toNEWLINE ``stage:detected OR stage:triaged OR stage:mitigated``.NEWLINE - ``severity`` - (Incident.Severity) The severity of the incident.NEWLINENEWLINE - Supports matching on a specific severity (for example,NEWLINE ``severity:major``) or on a range (for example,NEWLINE ``severity>medium``, ``severity<=minor``, etc.).NEWLINENEWLINE Timestamp formats:NEWLINENEWLINE - yyyy-MM-dd - an absolute date, treated as a calendar-day-wide window.NEWLINE In other words, the "<" operator will match dates before that date,NEWLINE the ">" operator will match dates after that date, and the ":" or "="NEWLINE operators will match the entire day.NEWLINE - Nd (for example, 7d) - a relative number of days ago, treated as aNEWLINE moment in time (as opposed to a day-wide span). A multiple of 24NEWLINE hours ago (as opposed to calendar days). In the case of daylightNEWLINE savings time, it will apply the current timezone to both ends of theNEWLINE range. Note that exact matching (for example, ``start:7d``) isNEWLINE unlikely to be useful because that would only match incidents createdNEWLINE precisely at a particular instant in time.NEWLINENEWLINE Examples:NEWLINENEWLINE - ``foo`` - matches incidents containing the word "foo"NEWLINE - ``"foo bar"`` - matches incidents containing the phrase "foo bar"NEWLINE - ``foo bar`` or ``foo AND bar`` - matches incidents containing theNEWLINE words "foo" and "bar"NEWLINE - ``foo -bar`` or ``foo AND NOT bar`` - matches incidents containingNEWLINE the word "foo" but not the word "bar"NEWLINE - ``foo OR bar`` - matches incidents containing the word "foo" or theNEWLINE word "bar"NEWLINE - ``start>2018-11-28`` - matches incidents which started after NovemberNEWLINE 11, 2018.NEWLINE - ``start<=2018-11-28`` - matches incidents which started on or beforeNEWLINE November 11, 2018.NEWLINE - ``start:2018-11-28`` - matches incidents which started on NovemberNEWLINE 11, 2018.NEWLINE - ``start>7d`` - matches incidents which started after the point inNEWLINE time 7*24 hours agoNEWLINE - ``start>180d`` - similar to 7d, but likely to cross the daylightNEWLINE savings time boundary, so the end time will be 1 hour different fromNEWLINE "now."NEWLINE - ``foo AND start>90d AND stage<resolved`` - unresolved incidents fromNEWLINE the past 90 days containing the word "foo"NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE time_zone (str): The time zone name. It should be an IANA TZ name, such asNEWLINE "America/Los_Angeles". For more information, seeNEWLINE https://en.wikipedia.org/wiki/List_of_tz_database_time_zones. If no timeNEWLINE zone is specified, the default is UTC.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Incident` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_incidents" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_incidents"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_incidents,NEWLINE default_retry=self._method_configs["SearchIncidents"].retry,NEWLINE default_timeout=self._method_configs["SearchIncidents"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchIncidentsRequest(NEWLINE parent=parent, query=query, page_size=page_size, time_zone=time_zone,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_incidents"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="incidents",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def update_incident(NEWLINE self,NEWLINE incident,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>>NEWLINE >>> response = client.update_incident(incident)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Incident` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_incident,NEWLINE default_retry=self._method_configs["UpdateIncident"].retry,NEWLINE default_timeout=self._method_configs["UpdateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateIncidentRequest(NEWLINE incident=incident, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("incident.name", incident.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_similar_incidents(NEWLINE self,NEWLINE name,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of incidents that are "similar" to the specified incidentNEWLINE or signal. This functionality is provided on a best-effort basis and theNEWLINE definition of "similar" is subject to change.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_similar_incidents(name):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_similar_incidents(name).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the incident or signal, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Result` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_similar_incidents" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_similar_incidents"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_similar_incidents,NEWLINE default_retry=self._method_configs["SearchSimilarIncidents"].retry,NEWLINE default_timeout=self._method_configs["SearchSimilarIncidents"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchSimilarIncidentsRequest(NEWLINE name=name, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_similar_incidents"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="results",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_annotation(NEWLINE self,NEWLINE parent,NEWLINE annotation,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates an annotation on an existing incident. Only 'text/plain' andNEWLINE 'text/markdown' annotations can be created via this method.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `annotation`:NEWLINE >>> annotation = {}NEWLINE >>>NEWLINE >>> response = client.create_annotation(parent, annotation)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE annotation (Union[dict, ~google.cloud.irm_v1alpha2.types.Annotation]): Required. Only annotation.content is an input argument.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Annotation`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Annotation` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_annotation" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_annotation"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_annotation,NEWLINE default_retry=self._method_configs["CreateAnnotation"].retry,NEWLINE default_timeout=self._method_configs["CreateAnnotation"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateAnnotationRequest(NEWLINE parent=parent, annotation=annotation,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_annotation"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_annotations(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists annotations that are part of an incident. No assumptions should beNEWLINE made on the content-type of the annotation returned.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_annotations(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_annotations(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Annotation` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_annotations" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_annotations"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_annotations,NEWLINE default_retry=self._method_configs["ListAnnotations"].retry,NEWLINE default_timeout=self._method_configs["ListAnnotations"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListAnnotationsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_annotations"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="annotations",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_tag(NEWLINE self,NEWLINE parent,NEWLINE tag,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a tag on an existing incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `tag`:NEWLINE >>> tag = {}NEWLINE >>>NEWLINE >>> response = client.create_tag(parent, tag)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE tag (Union[dict, ~google.cloud.irm_v1alpha2.types.Tag]): Required. Tag to create. Only tag.display_name is an input argument.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Tag`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Tag` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_tag" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_tag"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_tag,NEWLINE default_retry=self._method_configs["CreateTag"].retry,NEWLINE default_timeout=self._method_configs["CreateTag"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateTagRequest(parent=parent, tag=tag,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_tag"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def delete_tag(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing tag.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.tag_path('[PROJECT]', '[INCIDENT]', '[TAG]')NEWLINE >>>NEWLINE >>> client.delete_tag(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the tag.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_tag" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_tag"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_tag,NEWLINE default_retry=self._method_configs["DeleteTag"].retry,NEWLINE default_timeout=self._method_configs["DeleteTag"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteTagRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_tag"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_tags(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists tags that are part of an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_tags(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_tags(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Tag` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_tags" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_tags"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_tags,NEWLINE default_retry=self._method_configs["ListTags"].retry,NEWLINE default_timeout=self._method_configs["ListTags"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListTagsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_tags"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="tags",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def create_signal(NEWLINE self,NEWLINE parent,NEWLINE signal,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new signal.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `signal`:NEWLINE >>> signal = {}NEWLINE >>>NEWLINE >>> response = client.create_signal(parent, signal)NEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE signal belongs to.NEWLINE signal (Union[dict, ~google.cloud.irm_v1alpha2.types.Signal]): Required. The signal to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Signal`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_signal,NEWLINE default_retry=self._method_configs["CreateSignal"].retry,NEWLINE default_timeout=self._method_configs["CreateSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateSignalRequest(NEWLINE parent=parent, signal=signal,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def search_signals(NEWLINE self,NEWLINE parent,NEWLINE query=None,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists signals that are part of an incident.NEWLINE Signals are returned in reverse chronological order.NEWLINE Note that search should not be relied on for critical functionality. ItNEWLINE has lower availability guarantees and might fail to return valid results.NEWLINE Returned results might include stale or extraneous entries.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.search_signals(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.search_signals(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the hosting Stackdriver project which requestedNEWLINE incidents belong to.NEWLINE query (str): An expression that defines which signals to return.NEWLINENEWLINE Search atoms can be used to match certain specific fields. Otherwise,NEWLINE plain text will match text fields in the signal.NEWLINENEWLINE Search atoms:NEWLINENEWLINE - ``start`` - (timestamp) The time the signal was created.NEWLINE - ``title`` - The title of the signal.NEWLINE - ``signal_state`` - ``open`` or ``closed``. State of the signal.NEWLINE (e.g., ``signal_state:open``)NEWLINENEWLINE Timestamp formats:NEWLINENEWLINE - yyyy-MM-dd - an absolute date, treated as a calendar-day-wide window.NEWLINE In other words, the "<" operator will match dates before that date,NEWLINE the ">" operator will match dates after that date, and the ":"NEWLINE operator will match the entire day.NEWLINE - yyyy-MM-ddTHH:mm - Same as above, but with minute resolution.NEWLINE - yyyy-MM-ddTHH:mm:ss - Same as above, but with second resolution.NEWLINE - Nd (e.g. 7d) - a relative number of days ago, treated as a moment inNEWLINE time (as opposed to a day-wide span) a multiple of 24 hours ago (asNEWLINE opposed to calendar days). In the case of daylight savings time, itNEWLINE will apply the current timezone to both ends of the range. Note thatNEWLINE exact matching (e.g. ``start:7d``) is unlikely to be useful becauseNEWLINE that would only match signals created precisely at a particularNEWLINE instant in time.NEWLINENEWLINE The absolute timestamp formats (everything starting with a year) canNEWLINE optionally be followed with a UTC offset in +/-hh:mm format. Also, theNEWLINE 'T' separating dates and times can optionally be replaced with a space.NEWLINE Note that any timestamp containing a space or colon will need to beNEWLINE quoted.NEWLINENEWLINE Examples:NEWLINENEWLINE - ``foo`` - matches signals containing the word "foo"NEWLINE - ``"foo bar"`` - matches signals containing the phrase "foo bar"NEWLINE - ``foo bar`` or ``foo AND bar`` - matches signals containing the wordsNEWLINE "foo" and "bar"NEWLINE - ``foo -bar`` or ``foo AND NOT bar`` - matches signals containing theNEWLINE word "foo" but not the word "bar"NEWLINE - ``foo OR bar`` - matches signals containing the word "foo" or theNEWLINE word "bar"NEWLINE - ``start>2018-11-28`` - matches signals which started after NovemberNEWLINE 11, 2018.NEWLINE - ``start<=2018-11-28`` - matches signals which started on or beforeNEWLINE November 11, 2018.NEWLINE - ``start:2018-11-28`` - matches signals which started on November 11,NEWLINE 2018.NEWLINE - ``start>"2018-11-28 01:02:03+04:00"`` - matches signals which startedNEWLINE after November 11, 2018 at 1:02:03 AM according to the UTC+04 timeNEWLINE zone.NEWLINE - ``start>7d`` - matches signals which started after the point in timeNEWLINE 7*24 hours agoNEWLINE - ``start>180d`` - similar to 7d, but likely to cross the daylightNEWLINE savings time boundary, so the end time will be 1 hour different fromNEWLINE "now."NEWLINE - ``foo AND start>90d AND stage<resolved`` - unresolved signals fromNEWLINE the past 90 days containing the word "foo"NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Signal` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "search_signals" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "search_signals"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.search_signals,NEWLINE default_retry=self._method_configs["SearchSignals"].retry,NEWLINE default_timeout=self._method_configs["SearchSignals"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SearchSignalsRequest(NEWLINE parent=parent, query=query, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["search_signals"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="signals",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def lookup_signal(NEWLINE self,NEWLINE cscc_finding=None,NEWLINE stackdriver_notification_id=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Finds a signal by other unique IDs.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> response = client.lookup_signal()NEWLINENEWLINE Args:NEWLINE cscc_finding (str): Required. Full resource name of the CSCC finding id this signal refers to (e.g.NEWLINE "organizations/abc/sources/123/findings/xyz")NEWLINE stackdriver_notification_id (str): The ID from the Stackdriver Alerting notification.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "lookup_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "lookup_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.lookup_signal,NEWLINE default_retry=self._method_configs["LookupSignal"].retry,NEWLINE default_timeout=self._method_configs["LookupSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE # Sanity check: We have some fields which are mutually exclusive;NEWLINE # raise ValueError if more than one is sent.NEWLINE google.api_core.protobuf_helpers.check_oneof(NEWLINE cscc_finding=cscc_finding,NEWLINE stackdriver_notification_id=stackdriver_notification_id,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.LookupSignalRequest(NEWLINE cscc_finding=cscc_finding,NEWLINE stackdriver_notification_id=stackdriver_notification_id,NEWLINE )NEWLINE return self._inner_api_calls["lookup_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def get_signal(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a signal by name.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.signal_path('[PROJECT]', '[SIGNAL]')NEWLINE >>>NEWLINE >>> response = client.get_signal(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the Signal resource, for example,NEWLINE "projects/{project_id_or_number}/signals/{signal_id}".NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "get_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "get_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.get_signal,NEWLINE default_retry=self._method_configs["GetSignal"].retry,NEWLINE default_timeout=self._method_configs["GetSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.GetSignalRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["get_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def update_signal(NEWLINE self,NEWLINE signal,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing signal (for example, to assign/unassign it to anNEWLINE incident).NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `signal`:NEWLINE >>> signal = {}NEWLINE >>>NEWLINE >>> response = client.update_signal(signal)NEWLINENEWLINE Args:NEWLINE signal (Union[dict, ~google.cloud.irm_v1alpha2.types.Signal]): Required. The signal to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Signal`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Signal` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_signal" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_signal"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_signal,NEWLINE default_retry=self._method_configs["UpdateSignal"].retry,NEWLINE default_timeout=self._method_configs["UpdateSignal"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateSignalRequest(NEWLINE signal=signal, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("signal.name", signal.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_signal"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def escalate_incident(NEWLINE self,NEWLINE incident,NEWLINE update_mask=None,NEWLINE subscriptions=None,NEWLINE tags=None,NEWLINE roles=None,NEWLINE artifacts=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Escalates an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident`:NEWLINE >>> incident = {}NEWLINE >>>NEWLINE >>> response = client.escalate_incident(incident)NEWLINENEWLINE Args:NEWLINE incident (Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]): Required. The incident to escalate with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE subscriptions (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]]): Subscriptions to add or update. Existing subscriptions with the sameNEWLINE channel and address as a subscription in the list will be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE tags (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Tag]]): Tags to add. Tags identical to existing tags will be ignored.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Tag`NEWLINE roles (list[Union[dict, ~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment]]): Roles to add or update. Existing roles with the same type (andNEWLINE title, for TYPE_OTHER roles) will be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment`NEWLINE artifacts (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]]): Artifacts to add. All artifacts are added without checking for duplicates.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.EscalateIncidentResponse` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "escalate_incident" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "escalate_incident"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.escalate_incident,NEWLINE default_retry=self._method_configs["EscalateIncident"].retry,NEWLINE default_timeout=self._method_configs["EscalateIncident"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.EscalateIncidentRequest(NEWLINE incident=incident,NEWLINE update_mask=update_mask,NEWLINE subscriptions=subscriptions,NEWLINE tags=tags,NEWLINE roles=roles,NEWLINE artifacts=artifacts,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("incident.name", incident.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["escalate_incident"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_artifact(NEWLINE self,NEWLINE parent,NEWLINE artifact,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `artifact`:NEWLINE >>> artifact = {}NEWLINE >>>NEWLINE >>> response = client.create_artifact(parent, artifact)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE artifact (Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]): Required. The artifact to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Artifact` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_artifact,NEWLINE default_retry=self._method_configs["CreateArtifact"].retry,NEWLINE default_timeout=self._method_configs["CreateArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateArtifactRequest(NEWLINE parent=parent, artifact=artifact,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_artifacts(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of artifacts for an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_artifacts(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_artifacts(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Artifact` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_artifacts" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_artifacts"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_artifacts,NEWLINE default_retry=self._method_configs["ListArtifacts"].retry,NEWLINE default_timeout=self._method_configs["ListArtifacts"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListArtifactsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_artifacts"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="artifacts",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def update_artifact(NEWLINE self,NEWLINE artifact,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates an existing artifact.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `artifact`:NEWLINE >>> artifact = {}NEWLINE >>>NEWLINE >>> response = client.update_artifact(artifact)NEWLINENEWLINE Args:NEWLINE artifact (Union[dict, ~google.cloud.irm_v1alpha2.types.Artifact]): Required. The artifact to update with the new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Artifact`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Artifact` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_artifact" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_artifact"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_artifact,NEWLINE default_retry=self._method_configs["UpdateArtifact"].retry,NEWLINE default_timeout=self._method_configs["UpdateArtifact"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateArtifactRequest(NEWLINE artifact=artifact, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("artifact.name", artifact.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_artifact"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def send_shift_handoff(NEWLINE self,NEWLINE parent,NEWLINE recipients,NEWLINE subject,NEWLINE cc=None,NEWLINE notes_content_type=None,NEWLINE notes_content=None,NEWLINE incidents=None,NEWLINE preview_only=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Sends a summary of the shift for oncall handoff.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.project_path('[PROJECT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `recipients`:NEWLINE >>> recipients = []NEWLINE >>>NEWLINE >>> # TODO: Initialize `subject`:NEWLINE >>> subject = ''NEWLINE >>>NEWLINE >>> response = client.send_shift_handoff(parent, recipients, subject)NEWLINENEWLINE Args:NEWLINE parent (str): Required. The resource name of the Stackdriver project that theNEWLINE handoff is being sent from. for example,NEWLINE ``projects/{project_id_or_number}``NEWLINE recipients (list[str]): Required. Email addresses of the recipients of the handoff, for example,NEWLINE "[email protected]". Must contain at least one entry.NEWLINE subject (str): Required. The subject of the email.NEWLINE cc (list[str]): Optional. Email addresses that should be CC'd on the handoff.NEWLINE notes_content_type (str): Content type string, for example, 'text/plain' or 'text/html'.NEWLINE notes_content (str): Optional. Additional notes to be included in the handoff.NEWLINE incidents (list[Union[dict, ~google.cloud.irm_v1alpha2.types.Incident]]): Optional. The set of incidents that should be included in the handoff.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Incident`NEWLINE preview_only (bool): If set to true a ShiftHandoffResponse will be returned but the handoffNEWLINE will not actually be sent.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.SendShiftHandoffResponse` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "send_shift_handoff" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "send_shift_handoff"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.send_shift_handoff,NEWLINE default_retry=self._method_configs["SendShiftHandoff"].retry,NEWLINE default_timeout=self._method_configs["SendShiftHandoff"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.SendShiftHandoffRequest(NEWLINE parent=parent,NEWLINE recipients=recipients,NEWLINE subject=subject,NEWLINE cc=cc,NEWLINE notes_content_type=notes_content_type,NEWLINE notes_content=notes_content,NEWLINE incidents=incidents,NEWLINE preview_only=preview_only,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["send_shift_handoff"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_subscription(NEWLINE self,NEWLINE parent,NEWLINE subscription,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a new subscription.NEWLINE This will fail if:NEWLINE a. there are too many (50) subscriptions in the incident alreadyNEWLINE b. a subscription using the given channel already existsNEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `subscription`:NEWLINE >>> subscription = {}NEWLINE >>>NEWLINE >>> response = client.create_subscription(parent, subscription)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE subscription (Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]): Required. The subscription to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Subscription` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_subscription,NEWLINE default_retry=self._method_configs["CreateSubscription"].retry,NEWLINE default_timeout=self._method_configs["CreateSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateSubscriptionRequest(NEWLINE parent=parent, subscription=subscription,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def update_subscription(NEWLINE self,NEWLINE subscription,NEWLINE update_mask=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Updates a subscription.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> # TODO: Initialize `subscription`:NEWLINE >>> subscription = {}NEWLINE >>>NEWLINE >>> response = client.update_subscription(subscription)NEWLINENEWLINE Args:NEWLINE subscription (Union[dict, ~google.cloud.irm_v1alpha2.types.Subscription]): Required. The subscription to update, with new values.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.Subscription`NEWLINE update_mask (Union[dict, ~google.cloud.irm_v1alpha2.types.FieldMask]): List of fields that should be updated.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.FieldMask`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.Subscription` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "update_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "update_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.update_subscription,NEWLINE default_retry=self._method_configs["UpdateSubscription"].retry,NEWLINE default_timeout=self._method_configs["UpdateSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.UpdateSubscriptionRequest(NEWLINE subscription=subscription, update_mask=update_mask,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("subscription.name", subscription.name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["update_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_subscriptions(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Returns a list of subscriptions for an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_subscriptions(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_subscriptions(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.Subscription` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_subscriptions" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_subscriptions"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_subscriptions,NEWLINE default_retry=self._method_configs["ListSubscriptions"].retry,NEWLINE default_timeout=self._method_configs["ListSubscriptions"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListSubscriptionsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_subscriptions"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="subscriptions",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def delete_subscription(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing subscription.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.subscription_path('[PROJECT]', '[INCIDENT]', '[SUBSCRIPTION]')NEWLINE >>>NEWLINE >>> client.delete_subscription(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the subscription.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_subscription" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_subscription"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_subscription,NEWLINE default_retry=self._method_configs["DeleteSubscription"].retry,NEWLINE default_timeout=self._method_configs["DeleteSubscription"].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteSubscriptionRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_subscription"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def create_incident_role_assignment(NEWLINE self,NEWLINE parent,NEWLINE incident_role_assignment,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Creates a role assignment on an existing incident. Normally, the user fieldNEWLINE will be set when assigning a role to oneself, and the next field will beNEWLINE set when proposing another user as the assignee. Setting the next fieldNEWLINE directly to a user other than oneself is equivalent to proposing andNEWLINE force-assigning the role to the user.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `incident_role_assignment`:NEWLINE >>> incident_role_assignment = {}NEWLINE >>>NEWLINE >>> response = client.create_incident_role_assignment(parent, incident_role_assignment)NEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE incident_role_assignment (Union[dict, ~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment]): Required. Role assignment to create.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "create_incident_role_assignment" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "create_incident_role_assignment"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.create_incident_role_assignment,NEWLINE default_retry=self._method_configs[NEWLINE "CreateIncidentRoleAssignment"NEWLINE ].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "CreateIncidentRoleAssignment"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CreateIncidentRoleAssignmentRequest(NEWLINE parent=parent, incident_role_assignment=incident_role_assignment,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["create_incident_role_assignment"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def delete_incident_role_assignment(NEWLINE self,NEWLINE name,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Deletes an existing role assignment.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> client.delete_incident_role_assignment(name)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "delete_incident_role_assignment" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "delete_incident_role_assignment"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.delete_incident_role_assignment,NEWLINE default_retry=self._method_configs[NEWLINE "DeleteIncidentRoleAssignment"NEWLINE ].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "DeleteIncidentRoleAssignment"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.DeleteIncidentRoleAssignmentRequest(name=name,)NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE self._inner_api_calls["delete_incident_role_assignment"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINENEWLINE def list_incident_role_assignments(NEWLINE self,NEWLINE parent,NEWLINE page_size=None,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Lists role assignments that are part of an incident.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> parent = client.incident_path('[PROJECT]', '[INCIDENT]')NEWLINE >>>NEWLINE >>> # Iterate over all resultsNEWLINE >>> for element in client.list_incident_role_assignments(parent):NEWLINE ... # process elementNEWLINE ... passNEWLINE >>>NEWLINE >>>NEWLINE >>> # Alternatively:NEWLINE >>>NEWLINE >>> # Iterate over results one page at a timeNEWLINE >>> for page in client.list_incident_role_assignments(parent).pages:NEWLINE ... for element in page:NEWLINE ... # process elementNEWLINE ... passNEWLINENEWLINE Args:NEWLINE parent (str): Required. Resource name of the incident, for example,NEWLINE "projects/{project_id_or_number}/incidents/{incident_id}".NEWLINE page_size (int): The maximum number of resources contained in theNEWLINE underlying API response. If page streaming is performed per-NEWLINE resource, this parameter does not affect the return value. If pageNEWLINE streaming is performed per-page, this determines the maximum numberNEWLINE of resources in a page.NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.api_core.page_iterator.PageIterator` instance.NEWLINE An iterable of :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instances.NEWLINE You can also iterate over the pages of the responseNEWLINE using its `pages` property.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "list_incident_role_assignments" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "list_incident_role_assignments"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.list_incident_role_assignments,NEWLINE default_retry=self._method_configs["ListIncidentRoleAssignments"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "ListIncidentRoleAssignments"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.ListIncidentRoleAssignmentsRequest(NEWLINE parent=parent, page_size=page_size,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("parent", parent)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE iterator = google.api_core.page_iterator.GRPCIterator(NEWLINE client=None,NEWLINE method=functools.partial(NEWLINE self._inner_api_calls["list_incident_role_assignments"],NEWLINE retry=retry,NEWLINE timeout=timeout,NEWLINE metadata=metadata,NEWLINE ),NEWLINE request=request,NEWLINE items_field="incident_role_assignments",NEWLINE request_token_field="page_token",NEWLINE response_token_field="next_page_token",NEWLINE )NEWLINE return iteratorNEWLINENEWLINE def cancel_incident_role_handover(NEWLINE self,NEWLINE name,NEWLINE new_assignee,NEWLINE retry=google.api_core.gapic_v1.method.DEFAULT,NEWLINE timeout=google.api_core.gapic_v1.method.DEFAULT,NEWLINE metadata=None,NEWLINE ):NEWLINE """NEWLINE Cancels a role handover. This will fail if the 'proposed_assignee'NEWLINE field of the IncidentRoleAssignment is not equal to the 'new_assignee'NEWLINE field of the request.NEWLINENEWLINE Example:NEWLINE >>> from google.cloud import irm_v1alpha2NEWLINE >>>NEWLINE >>> client = irm_v1alpha2.IncidentServiceClient()NEWLINE >>>NEWLINE >>> name = client.incident_role_assignment_path('[PROJECT_ID_OR_NUMBER]', '[INCIDENT_ID]', '[ROLE_ID]')NEWLINE >>>NEWLINE >>> # TODO: Initialize `new_assignee`:NEWLINE >>> new_assignee = {}NEWLINE >>>NEWLINE >>> response = client.cancel_incident_role_handover(name, new_assignee)NEWLINENEWLINE Args:NEWLINE name (str): Required. Resource name of the role assignment.NEWLINE new_assignee (Union[dict, ~google.cloud.irm_v1alpha2.types.User]): Required. Person who was proposed as the next assignee (i.e.NEWLINE IncidentRoleAssignment.proposed_assignee) and whose proposal is beingNEWLINE cancelled.NEWLINENEWLINE If a dict is provided, it must be of the same form as the protobufNEWLINE message :class:`~google.cloud.irm_v1alpha2.types.User`NEWLINE retry (Optional[google.api_core.retry.Retry]): A retry object usedNEWLINE to retry requests. If ``None`` is specified, requests willNEWLINE be retried using a default configuration.NEWLINE timeout (Optional[float]): The amount of time, in seconds, to waitNEWLINE for the request to complete. Note that if ``retry`` isNEWLINE specified, the timeout applies to each individual attempt.NEWLINE metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadataNEWLINE that is provided to the method.NEWLINENEWLINE Returns:NEWLINE A :class:`~google.cloud.irm_v1alpha2.types.IncidentRoleAssignment` instance.NEWLINENEWLINE Raises:NEWLINE google.api_core.exceptions.GoogleAPICallError: If the requestNEWLINE failed for any reason.NEWLINE google.api_core.exceptions.RetryError: If the request failed dueNEWLINE to a retryable error and retry attempts failed.NEWLINE ValueError: If the parameters are invalid.NEWLINE """NEWLINE # Wrap the transport method to add retry and timeout logic.NEWLINE if "cancel_incident_role_handover" not in self._inner_api_calls:NEWLINE self._inner_api_calls[NEWLINE "cancel_incident_role_handover"NEWLINE ] = google.api_core.gapic_v1.method.wrap_method(NEWLINE self.transport.cancel_incident_role_handover,NEWLINE default_retry=self._method_configs["CancelIncidentRoleHandover"].retry,NEWLINE default_timeout=self._method_configs[NEWLINE "CancelIncidentRoleHandover"NEWLINE ].timeout,NEWLINE client_info=self._client_info,NEWLINE )NEWLINENEWLINE request = incidents_service_pb2.CancelIncidentRoleHandoverRequest(NEWLINE name=name, new_assignee=new_assignee,NEWLINE )NEWLINE if metadata is None:NEWLINE metadata = []NEWLINE metadata = list(metadata)NEWLINE try:NEWLINE routing_header = [("name", name)]NEWLINE except AttributeError:NEWLINE passNEWLINE else:NEWLINE routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(NEWLINE routing_headerNEWLINE )NEWLINE metadata.append(routing_metadata)NEWLINENEWLINE return self._inner_api_calls["cancel_incident_role_handover"](NEWLINE request, retry=retry, timeout=timeout, metadata=metadataNEWLINE )NEWLINE |
def DetectCollision(characterPosX, characterPosY, character, obstaclePosX, obstaclePosY, obstacle):NEWLINE MARGIN_ERROR = 18 # error with image pixelsNEWLINE collision = FalseNEWLINENEWLINE print("Character:",characterPosX, characterPosY, "\tObstacle", obstaclePosX, obstaclePosY)NEWLINE if ( characterPosX + MARGIN_ERROR < (obstaclePosX + obstacle.getWidth() )NEWLINE and (characterPosX + character[0]) > obstaclePosX + MARGIN_ERRORNEWLINE and characterPosY + MARGIN_ERROR < (obstaclePosY) NEWLINE and (characterPosY + character[1]) > obstaclePosY + MARGIN_ERROR):NEWLINE collision = TrueNEWLINENEWLINE return collisionNEWLINENEWLINE |
import argparseNEWLINEimport rayNEWLINENEWLINEfrom wiki_search import Dataset, EngineNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE if not ray.is_initialized():NEWLINE ray.init()NEWLINENEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument('--data_dir', type=str, default='./data')NEWLINE parser.add_argument('--ranking', type=str, default='tfidf')NEWLINE args = parser.parse_args()NEWLINENEWLINE dataset = Dataset(data_dir=args.data_dir)NEWLINE engine = Engine(dataset=dataset, ranking_algo=args.ranking)NEWLINENEWLINE while True:NEWLINE query = input('> ')NEWLINE # results = engine.search(query)[:5]NEWLINE results = engine.search(query)[:8]NEWLINENEWLINE print('Only showing top 8 results:')NEWLINE for result in results:NEWLINE print('=============')NEWLINE print(f'{result.document.title} :: {result.score:.4f}')NEWLINE print('https://en.wikipedia.org/wiki/' + result.document.name)NEWLINE # print(result.document.main_image)NEWLINE desc = result.document.raw_main_descNEWLINE if len(desc) > 1000:NEWLINE desc = desc[:1000]NEWLINE desc += '...'NEWLINE print(desc)NEWLINE print()NEWLINE |
from sympy import Point, Line, SegmentNEWLINEfrom skimage import feature, measureNEWLINEimport numpy as npNEWLINEfrom scipy import ndimage as ndNEWLINENEWLINEdef thr_calculator(filtered_img,min_distance,stringency):NEWLINENEWLINE """NEWLINE Function used to calculate the threshold to use for the dotsNEWLINE counting in a 2D image. NEWLINENEWLINE Parameters:NEWLINE -----------NEWLINENEWLINE filtered_img: np.array float64NEWLINE preprocessed image used to count the dots.NEWLINE min_distance: intNEWLINE minimum distance that two maxima need to have in order to be defined as NEWLINE separete peaks.NEWLINE stringency: intNEWLINE integer used to select the stringency of the generatedNEWLINE threshold. By adding stringency to the thr_idx we can select a Thr with higherNEWLINE value from the thr_array.NEWLINENEWLINE Returns:NEWLINE -----------NEWLINENEWLINE counting_dict : dict NEWLINE dictionary containing all the counting infos:NEWLINE selected_thr: float64NEWLINE Thr used for counting after application of the stringency.NEWLINE calculated_thr: float64 NEWLINE Calculated ThrNEWLINE selected_peaks: int64 NEWLINE 2D coords of the peaks defined using the selected_thr.NEWLINE thr_array: float64 NEWLINE Thr array of 100 points distributed between (Img.min(),Img.max()).NEWLINE peaks_coords: float64 NEWLINE list of all the 3D coords calculated using the Thr array.NEWLINE total_peaks: list of int NEWLINE List of the peaks counts.NEWLINE thr_idx: int64 NEWLINE index of the calculated threshold.NEWLINE stringency: int64 NEWLINE stringency used for the identification of the selected_peaksNEWLINE """NEWLINE NEWLINE # List with the total peaks calculated for each thresholdNEWLINE total_peaks = []NEWLINE NEWLINE # List of ndarrays with the coords of the peaks calculated for each thresholdNEWLINE peaks_coords = []NEWLINENEWLINE # Define the Thr array to be testedNEWLINE thr_array = np.linspace(filtered_img.min(),filtered_img.max(),num=100)NEWLINENEWLINENEWLINE # Calculate the number of peaks for each threshold. In this calculationNEWLINE # the size of the objects is not consideredNEWLINE for thr in thr_array:NEWLINE # The border is excluded from the countingNEWLINE peaks = feature.peak_local_max(filtered_img,min_distance=min_distance,\NEWLINE threshold_abs=thr,exclude_border=False, indices=True,\NEWLINE num_peaks=np.inf, footprint=None,labels=None) NEWLINE # Stop the counting when the number of peaks detected falls below 3NEWLINE if len(peaks)<=3:NEWLINE stop_thr = thr # Move in the upper loop so you will stop at the previous thrNEWLINE breakNEWLINE else:NEWLINE peaks_coords.append(peaks) NEWLINE total_peaks.append(len(peaks))NEWLINENEWLINENEWLINE # Consider the case of no detectected peaks or if there is only one ThrNEWLINE # that create peaks (list total_peaks have only one element and )NEWLINE # if np.array(total_peaks).sum()>0 or len(total_peaks)>1:NEWLINE if len(total_peaks)>1:NEWLINENEWLINE # Trim the threshold array in order to match the stopping pointNEWLINE # used the [0][0] to get the first number and then take it out from listNEWLINE thr_array = thr_array[:np.where(thr_array==stop_thr)[0][0]]NEWLINENEWLINENEWLINE # Calculate the gradient of the number of peaks distributionNEWLINE grad = np.gradient(total_peaks)NEWLINE NEWLINE # Restructure the data in order to avoid to consider the min_peak in theNEWLINE # calculationsNEWLINENEWLINE # Coord of the gradient min_peakNEWLINE grad_min_peak_coord = np.argmin(grad)NEWLINE NEWLINE # Trim the data to remove the peak.NEWLINE trimmed_thr_array = thr_array[grad_min_peak_coord:]NEWLINE trimmed_grad = grad[grad_min_peak_coord:]NEWLINENEWLINE if trimmed_thr_array.shape>(1,):NEWLINENEWLINE # Trim the coords array in order to maintain the same length of the NEWLINE # tr and pkNEWLINE trimmed_peaks_coords = peaks_coords[grad_min_peak_coord:]NEWLINE trimmed_total_peaks = total_peaks[grad_min_peak_coord:]NEWLINENEWLINE # To determine the threshold we will determine the Thr with the biggestNEWLINE # distance to the segment that join the end points of the calculatedNEWLINE # gradientNEWLINENEWLINE # Distances listNEWLINE distances = []NEWLINENEWLINE # Calculate the coords of the end points of the gradientNEWLINE p1 = Point(trimmed_thr_array[0],trimmed_grad[0])NEWLINE p2 = Point(trimmed_thr_array[-1],trimmed_grad[-1])NEWLINE NEWLINE # Create a line that join the pointsNEWLINE s = Line(p1,p2)NEWLINE allpoints = np.arange(0,len(trimmed_thr_array))NEWLINE NEWLINE # Calculate the distance between all points and the lineNEWLINE for p in allpoints:NEWLINE dst = s.distance(Point(trimmed_thr_array[p],trimmed_grad[p]))NEWLINE distances.append(dst.evalf())NEWLINENEWLINE # Remove the end points from the listsNEWLINE trimmed_thr_array = trimmed_thr_array[1:-1]NEWLINE trimmed_grad = trimmed_grad[1:-1]NEWLINE trimmed_peaks_coords = trimmed_peaks_coords[1:-1]NEWLINE trimmed_total_peaks = trimmed_total_peaks[1:-1]NEWLINE trimmed_distances = distances[1:-1]NEWLINE NEWLINE # Determine the coords of the selected ThrNEWLINE # Converted trimmed_distances to array because it crashedNEWLINE # on Sanger.NEWLINE if trimmed_distances: # Most efficient way will be to consider the length of Thr listNEWLINE thr_idx=np.argmax(np.array(trimmed_distances))NEWLINE calculated_thr = trimmed_thr_array[thr_idx]NEWLINE # The selected threshold usually causes oversampling of the number of dotsNEWLINE # I added a stringency parameter (int n) to use to select the Thr+n NEWLINE # for the counting. It selects a stringency only if the trimmed_thr_arrayNEWLINE # is long enoughNEWLINE if thr_idx+stringency<len(trimmed_thr_array):NEWLINE selected_thr = trimmed_thr_array[thr_idx+stringency]NEWLINE selected_peaks = trimmed_peaks_coords[thr_idx+stringency]NEWLINE thr_idx = thr_idx+stringencyNEWLINE else:NEWLINE selected_thr = trimmed_thr_array[thr_idx]NEWLINE selected_peaks = trimmed_peaks_coords[thr_idx]NEWLINENEWLINE NEWLINE # Calculate the selected peaks after removal of the big and small objectsNEWLINE NEWLINE # Threshold the image using the selected thresholdNEWLINE if selected_thr>0:NEWLINE img_mask = filtered_img>selected_thrNEWLINE NEWLINE labels = nd.label(img_mask)[0]NEWLINE NEWLINE properties = measure.regionprops(labels)NEWLINE NEWLINE for ob in properties:NEWLINE if ob.area<6 or ob.area>200:NEWLINE img_mask[ob.coords[:,0],ob.coords[:,1]]=0NEWLINE NEWLINE labels = nd.label(img_mask)[0]NEWLINE selected_peaks = feature.peak_local_max(filtered_img, min_distance=min_distance, threshold_abs=selected_thr, exclude_border=False, indices=True, num_peaks=np.inf, footprint=None, labels=labels)NEWLINE NEWLINE if selected_peaks.size:NEWLINE # Intensity counting of the max peaksNEWLINE selected_peaks_int = filtered_img[selected_peaks[:,0],selected_peaks[:,1]]NEWLINE NEWLINE NEWLINE else:NEWLINE selected_thr = 0NEWLINE calculated_thr = 0NEWLINE selected_peaks = 0NEWLINE peaks_coords = 0NEWLINE total_peaks = 0NEWLINE thr_idx = 0NEWLINE selected_peaks_int = 0NEWLINE trimmed_thr_array = 0NEWLINE trimmed_peaks_coords = 0NEWLINE NEWLINE else:NEWLINE selected_thr = 0NEWLINE calculated_thr = 0NEWLINE selected_peaks = 0NEWLINE peaks_coords = 0NEWLINE total_peaks = 0NEWLINE thr_idx = 0NEWLINE selected_peaks_int = 0NEWLINE trimmed_thr_array = 0NEWLINE trimmed_peaks_coords = 0NEWLINE else:NEWLINE selected_thr = 0NEWLINE calculated_thr = 0NEWLINE selected_peaks = 0NEWLINE peaks_coords = 0NEWLINE total_peaks = 0NEWLINE thr_idx = 0NEWLINE selected_peaks_int = 0NEWLINE trimmed_thr_array = 0NEWLINE trimmed_peaks_coords = 0NEWLINENEWLINENEWLINE else:NEWLINE selected_thr = 0NEWLINE calculated_thr = 0NEWLINE selected_peaks = 0NEWLINE peaks_coords = 0NEWLINE total_peaks = 0NEWLINE thr_idx = 0NEWLINE selected_peaks_int = 0NEWLINE trimmed_thr_array = 0NEWLINE trimmed_peaks_coords = 0NEWLINENEWLINE counting_dict={}NEWLINENEWLINE counting_dict['selected_thr'] = selected_thrNEWLINE counting_dict['calculated_thr'] = calculated_thrNEWLINE counting_dict['selected_peaks'] = selected_peaksNEWLINE counting_dict['thr_array'] = thr_arrayNEWLINE counting_dict['trimmed_thr_array'] = trimmed_thr_arrayNEWLINE counting_dict['peaks_coords'] = peaks_coordsNEWLINE counting_dict['trimmed_peaks_coords'] = trimmed_peaks_coordsNEWLINE counting_dict['total_peaks'] = total_peaks NEWLINE counting_dict['thr_idx'] = thr_idxNEWLINE counting_dict['stringency'] = stringencyNEWLINE counting_dict['selected_peaks_int'] = selected_peaks_intNEWLINE NEWLINE return counting_dict |
import loggingNEWLINEimport reNEWLINEimport timeNEWLINEimport hashlibNEWLINENEWLINEfrom bs4 import BeautifulSoupNEWLINEimport requestsNEWLINEfrom requests.exceptions import ReadTimeoutNEWLINENEWLINEclass BackEnd:NEWLINENEWLINE @staticmethodNEWLINE def help():NEWLINE return [NEWLINE "url: ex https://rtblivedemo.herokuapp.com/scoreboard",NEWLINE "poll-interval: seconds",NEWLINE ]NEWLINENEWLINE @staticmethodNEWLINE def supports(conf, url):NEWLINE # Return True if the url seems like a system we supportNEWLINE resp = requests.get(url)NEWLINENEWLINE lcase = resp.text.lower()NEWLINE return "abs0lut3pwn4g3" in lcaseNEWLINENEWLINE def __init__(self, conf, middleend):NEWLINE self.conf = confNEWLINE self.middle = middleendNEWLINE self.log = logging.getLogger(__name__)NEWLINENEWLINE if conf["url"] == "":NEWLINE raise RuntimeError("This backend requires a URL")NEWLINENEWLINENEWLINE # Help the user out a little bit, they can specify some various linksNEWLINE self.URL = self._baseurl(conf["url"])NEWLINE self.log.info(f"Attempting to use RTB-CTF instance at {self.URL}")NEWLINENEWLINE self.session = requests.Session()NEWLINENEWLINENEWLINE def run(self):NEWLINE self.running = TrueNEWLINE while self.running:NEWLINENEWLINE scoreboard = self._get_scoreboard()NEWLINENEWLINE if scoreboard is not None:NEWLINE self.middle.handle_snapshot(("scoreboard", { "scores": scoreboard }))NEWLINENEWLINE time.sleep(self.conf["poll-interval"])NEWLINENEWLINE def stop(self):NEWLINE self.running = FalseNEWLINENEWLINENEWLINE def update(self):NEWLINE passNEWLINENEWLINE def _baseurl(self, url):NEWLINE url = re.sub("/scoreboard.*", "", url)NEWLINE return urlNEWLINENEWLINE def _get_scoreboard(self):NEWLINENEWLINE teams = []NEWLINENEWLINE # Sadly, we have to parse the table, but that's allrightNEWLINE failed = FalseNEWLINE try:NEWLINE resp = self.session.get(self.URL + "/scoreboard")NEWLINE except:NEWLINE failed = TrueNEWLINENEWLINE if failed or resp.status_code != 200:NEWLINE self.log.warning("scoreboard fetch failed:")NEWLINE return NoneNEWLINENEWLINE # BS filters to find the elements that we are interested inNEWLINE filt_team_row = lambda tag: tag.name == "tr" and tag.parent.name == "tbody"NEWLINENEWLINE # Accidentally detects ISO-8859 due to some german team names or somethingNEWLINE # The page is explicitly encoded as utf-8, though.NEWLINE resp.encoding = "utf-8"NEWLINENEWLINE soup = BeautifulSoup(resp.text, "html.parser")NEWLINE rows = soup.findAll(filt_team_row)NEWLINENEWLINE # Important: BS4 objects have string representations, but they do notNEWLINE # belong in the data we pass to middle-end. NEWLINE # Convert strings to proper strings or face the consequences!NEWLINENEWLINE for r in rows:NEWLINE heads = r.find_all("th")NEWLINE cells = r.find_all("td")NEWLINENEWLINE t = {}NEWLINE t["name"] = str(cells[0].string)NEWLINE t["team_id"] = t["name"]NEWLINE t["place"] = int(heads[0].string)NEWLINE t["score"] = str(cells[1].string)NEWLINENEWLINE teams.append(t)NEWLINENEWLINE return teamsNEWLINENEWLINE |
# pylint: disable=redefined-builtin, wildcard-importNEWLINE"""x86 specific declaration and schedules."""NEWLINEfrom __future__ import absolute_import as _absNEWLINENEWLINEfrom .conv2d import schedule_conv2d, schedule_conv2d_nhwcNEWLINEfrom .binarize_pack import schedule_binarize_packNEWLINEfrom .binary_dense import schedule_binary_denseNEWLINEfrom .nn import *NEWLINEfrom .injective import *NEWLINEfrom .pooling import schedule_pool, schedule_global_poolNEWLINE |
import mathNEWLINENEWLINEr= float(input("Radius:"))NEWLINEri=int(r)NEWLINEwhile r>ri:NEWLINE ri=ri+1NEWLINEx= float(input("x-coordinate:"))NEWLINEwhile x>r or x<-r:NEWLINE print ("x-coordinate cannot be larger than radius")NEWLINE x= float(input("x-coordinate:"))NEWLINEy= float(input("y-coordinate:"))NEWLINEwhile y>r or y<-r:NEWLINE print ("y-coordinate cannot be larger than radius")NEWLINE y= float(input("y-coordinate:"))NEWLINEz= float(input("z-coordinate:"))NEWLINEwhile z>r or z<-r:NEWLINE print ("z-coordinate cannot be larger than radius")NEWLINE z= float(input("z-coordinate:"))NEWLINErij=math.sqrt(((x)**2)+((y)**2)+((z)**2))NEWLINEwhile rij>((math.sqrt(3))*r):NEWLINE print ("point is outside the cube")NEWLINE x=float(input("x-coordinate:"))NEWLINE while x>r or x<-r:NEWLINE print ("x-coordinate cannot be larger than radius")NEWLINE x=float(input("x-coordinate:"))NEWLINE y=float(input("y-coordinate:"))NEWLINE while y>r or y<-r:NEWLINE print ("y-coordinate cannot be larger than radius")NEWLINE y=float(input("y-coordinate:"))NEWLINE z=float(input("z-coordinate:"))NEWLINE while z>r or z<-r:NEWLINE print ("z-coordinate cannot be larger than radius")NEWLINE z=float(input("z-coordinate:"))NEWLINE rij=math.sqrt(((x)**2)+((y)**2)+((z)**2))NEWLINEprint ('Point:(',x,',',y,',',z,')')NEWLINENEWLINEwhile x<0:NEWLINE x=x*(-1)NEWLINEwhile y<0:NEWLINE y=y*(-1)NEWLINEwhile z<0:NEWLINE z=z*(-1)NEWLINENEWLINExone=ri-xNEWLINEyone=ri-yNEWLINEzone=ri-zNEWLINExtwo=(-1)*(x+ri)NEWLINEytwo=(-1)*(y+ri)NEWLINEztwo=(-1)*(z+ri)NEWLINEtotalx=0NEWLINEtotaly=0NEWLINEtotalz=0NEWLINENEWLINEwhile xone>=xtwo:NEWLINE while yone>=ytwo:NEWLINE while zone>=ztwo:NEWLINE if xone==0 and yone==0 and zone==0:NEWLINE zone=zone-1NEWLINE else:NEWLINE rij=math.sqrt(((xone)**2)+((yone)**2)+((zone)**2))NEWLINE rijc=math.sqrt(((x+xone)**2)+((y+yone)**2)+((z+zone)**2))NEWLINE if rijc>((math.sqrt(3))*r):NEWLINE zone=zone-1NEWLINE else:NEWLINE Hx=((3*xone*zone)/((rij)**5))NEWLINE Hy=((3*yone*zone)/((rij)**5))NEWLINE Hz=(((2*((zone)**2))-((xone)**2)-((yone)**2))/((rij)**5))NEWLINE totalx=totalx+HxNEWLINE totaly=totaly+HyNEWLINE totalz=totalz+HzNEWLINE zone=zone-1NEWLINE yone=yone-1NEWLINE zone=ri+zNEWLINE xone=xone-1NEWLINE yone=ri+yNEWLINENEWLINEH=math.sqrt(((totalx)**2)+((totaly)**2)+((totalz)**2))NEWLINEif H<(10**(-10)):NEWLINE print ("total H: 0.0")NEWLINEelse:NEWLINE print ("total H:",H)NEWLINENEWLINE NEWLINENEWLINENEWLINE |
# Copyright (c) Microsoft Corporation.NEWLINE# Licensed under the MIT license.NEWLINENEWLINEimport timeNEWLINEfrom typing import IterableNEWLINENEWLINEfrom ..graph import Model, ModelStatusNEWLINEfrom .interface import AbstractExecutionEngineNEWLINEfrom .listener import DefaultListenerNEWLINENEWLINE_execution_engine = NoneNEWLINE_default_listener = NoneNEWLINENEWLINE__all__ = ['get_execution_engine', 'get_and_register_default_listener',NEWLINE 'list_models', 'submit_models', 'wait_models', 'query_available_resources',NEWLINE 'set_execution_engine', 'is_stopped_exec', 'budget_exhausted']NEWLINENEWLINENEWLINEdef set_execution_engine(engine: AbstractExecutionEngine) -> None:NEWLINE global _execution_engineNEWLINE if _execution_engine is None:NEWLINE _execution_engine = engineNEWLINE else:NEWLINE raise RuntimeError('Execution engine is already set. 'NEWLINE 'You should avoid instantiating RetiariiExperiment twice in one process. 'NEWLINE 'If you are running in a Jupyter notebook, please restart the kernel.')NEWLINENEWLINENEWLINEdef get_execution_engine() -> AbstractExecutionEngine:NEWLINE global _execution_engineNEWLINE assert _execution_engine is not None, 'You need to set execution engine, before using it.'NEWLINE return _execution_engineNEWLINENEWLINENEWLINEdef get_and_register_default_listener(engine: AbstractExecutionEngine) -> DefaultListener:NEWLINE global _default_listenerNEWLINE if _default_listener is None:NEWLINE _default_listener = DefaultListener()NEWLINE engine.register_graph_listener(_default_listener)NEWLINE return _default_listenerNEWLINENEWLINENEWLINEdef submit_models(*models: Model) -> None:NEWLINE engine = get_execution_engine()NEWLINE get_and_register_default_listener(engine)NEWLINE engine.submit_models(*models)NEWLINENEWLINENEWLINEdef list_models(*models: Model) -> Iterable[Model]:NEWLINE engine = get_execution_engine()NEWLINE get_and_register_default_listener(engine)NEWLINE return engine.list_models()NEWLINENEWLINENEWLINEdef wait_models(*models: Model) -> None:NEWLINE get_and_register_default_listener(get_execution_engine())NEWLINE while True:NEWLINE time.sleep(1)NEWLINE left_models = [g for g in models if not g.status in (ModelStatus.Trained, ModelStatus.Failed)]NEWLINE if not left_models:NEWLINE breakNEWLINENEWLINENEWLINEdef query_available_resources() -> int:NEWLINE engine = get_execution_engine()NEWLINE resources = engine.query_available_resource()NEWLINE return resources if isinstance(resources, int) else len(resources)NEWLINENEWLINENEWLINEdef is_stopped_exec(model: Model) -> bool:NEWLINE return model.status in (ModelStatus.Trained, ModelStatus.Failed)NEWLINENEWLINENEWLINEdef budget_exhausted() -> bool:NEWLINE engine = get_execution_engine()NEWLINE return engine.budget_exhausted()NEWLINE |
# Copyright 2018 IguazioNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEimport timeNEWLINENEWLINEfrom kubernetes import clientNEWLINEfrom kubernetes.client.rest import ApiExceptionNEWLINENEWLINEimport mlrun.errorsNEWLINEfrom mlrun.runtimes.base import BaseRuntimeHandlerNEWLINENEWLINEfrom ..builder import build_runtimeNEWLINEfrom ..db import RunDBErrorNEWLINEfrom ..kfpops import build_opNEWLINEfrom ..model import RunObjectNEWLINEfrom ..utils import get_in, loggerNEWLINEfrom .base import RunErrorNEWLINEfrom .pod import KubeResource, kube_resource_spec_to_pod_specNEWLINEfrom .utils import AsyncLogWriterNEWLINENEWLINENEWLINEclass KubejobRuntime(KubeResource):NEWLINE kind = "job"NEWLINE _is_nested = TrueNEWLINENEWLINE _is_remote = TrueNEWLINENEWLINE @propertyNEWLINE def is_deployed(self):NEWLINE """check if the function is deployed (have a valid container)"""NEWLINE if self.spec.image:NEWLINE return TrueNEWLINENEWLINE if self._is_remote_api():NEWLINE db = self._get_db()NEWLINE try:NEWLINE db.get_builder_status(self, logs=False)NEWLINE except RunDBError:NEWLINE passNEWLINENEWLINE if self.spec.image:NEWLINE return TrueNEWLINE if self.status.state and self.status.state == "ready":NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE def with_source_archive(self, source, pythonpath=None, pull_at_runtime=True):NEWLINE """load the code from git/tar/zip archive at runtime or buildNEWLINENEWLINE :param source: valid path to git, zip, or tar file, e.g.NEWLINE git://github.com/mlrun/something.gitNEWLINE http://some/url/file.zipNEWLINE :param pythonpath: python search path relative to the archive root or absolute (e.g. './subdir')NEWLINE :param pull_at_runtime: load the archive into the container at job runtime vs on build/deployNEWLINE """NEWLINE self.spec.build.load_source_on_run = pull_at_runtimeNEWLINE self.spec.build.source = sourceNEWLINE if pythonpath:NEWLINE self.spec.pythonpath = pythonpathNEWLINENEWLINE def build_config(NEWLINE self,NEWLINE image="",NEWLINE base_image=None,NEWLINE commands: list = None,NEWLINE secret=None,NEWLINE source=None,NEWLINE extra=None,NEWLINE load_source_on_run=None,NEWLINE ):NEWLINE """specify builder configuration for the deploy operationNEWLINENEWLINE :param image: target image name/pathNEWLINE :param base_image: base image name/pathNEWLINE :param commands: list of docker build (RUN) commands e.g. ['pip install pandas']NEWLINE :param secret: k8s secret for accessing the docker registryNEWLINE :param source: source git/tar archive to load code from in to the context/workdirNEWLINE e.g. git://github.com/mlrun/something.git#developmentNEWLINE :param extra: extra Dockerfile linesNEWLINE :param load_source_on_run: load the archive code into the container at runtime vs at build timeNEWLINE """NEWLINE if image:NEWLINE self.spec.build.image = imageNEWLINE if commands:NEWLINE if not isinstance(commands, list):NEWLINE raise ValueError("commands must be a string list")NEWLINE self.spec.build.commands = self.spec.build.commands or []NEWLINE self.spec.build.commands += commandsNEWLINE if extra:NEWLINE self.spec.build.extra = extraNEWLINE if secret:NEWLINE self.spec.build.secret = secretNEWLINE if base_image:NEWLINE self.spec.build.base_image = base_imageNEWLINE if source:NEWLINE self.spec.build.source = sourceNEWLINE if load_source_on_run:NEWLINE self.spec.build.load_source_on_run = load_source_on_runNEWLINENEWLINE def deploy(NEWLINE self,NEWLINE watch=True,NEWLINE with_mlrun=True,NEWLINE skip_deployed=False,NEWLINE is_kfp=False,NEWLINE mlrun_version_specifier=None,NEWLINE ):NEWLINE """deploy function, build container with dependenciesNEWLINENEWLINE :param watch: wait for the deploy to complete (and print build logs)NEWLINE :param with_mlrun: add the current mlrun package to the container buildNEWLINE :param skip_deployed: skip the build if we already have an image for the functionNEWLINE :param mlrun_version_specifier: which mlrun package version to include (if not current)NEWLINE """NEWLINENEWLINE build = self.spec.buildNEWLINENEWLINE if not build.source and not build.commands and not build.extra and with_mlrun:NEWLINE logger.info(NEWLINE "running build to add mlrun package, set "NEWLINE "with_mlrun=False to skip if its already in the image"NEWLINE )NEWLINE self.status.state = ""NEWLINENEWLINE # When we're in pipelines context we must watch otherwise the pipelines pod will exit before the operationNEWLINE # is actually done. (when a pipelines pod exits, the pipeline step marked as done)NEWLINE if is_kfp:NEWLINE watch = TrueNEWLINENEWLINE if self._is_remote_api():NEWLINE db = self._get_db()NEWLINE data = db.remote_builder(NEWLINE self, with_mlrun, mlrun_version_specifier, skip_deployedNEWLINE )NEWLINE logger.info(NEWLINE f"Started building image: {data.get('data', {}).get('spec', {}).get('build', {}).get('image')}"NEWLINE )NEWLINE self.status = data["data"].get("status", None)NEWLINE self.spec.image = get_in(data, "data.spec.image")NEWLINE ready = data.get("ready", False)NEWLINE if watch and not ready:NEWLINE state = self._build_watch(watch)NEWLINE ready = state == "ready"NEWLINE self.status.state = stateNEWLINE else:NEWLINE self.save(versioned=False)NEWLINE ready = build_runtime(NEWLINE self, with_mlrun, mlrun_version_specifier, skip_deployed, watchNEWLINE )NEWLINE self.save(versioned=False)NEWLINENEWLINE if watch and not ready:NEWLINE raise mlrun.errors.MLRunRuntimeError("Deploy failed")NEWLINE return readyNEWLINENEWLINE def _build_watch(self, watch=True, logs=True):NEWLINE db = self._get_db()NEWLINE offset = 0NEWLINE try:NEWLINE text, _ = db.get_builder_status(self, 0, logs=logs)NEWLINE except RunDBError:NEWLINE raise ValueError("function or build process not found")NEWLINENEWLINE if text:NEWLINE print(text)NEWLINE if watch:NEWLINE while self.status.state in ["pending", "running"]:NEWLINE offset += len(text)NEWLINE time.sleep(2)NEWLINE text, _ = db.get_builder_status(self, offset, logs=logs)NEWLINE if text:NEWLINE print(text, end="")NEWLINENEWLINE return self.status.stateNEWLINENEWLINE def builder_status(self, watch=True, logs=True):NEWLINE if self._is_remote_api():NEWLINE return self._build_watch(watch, logs)NEWLINENEWLINE else:NEWLINE pod = self.status.build_podNEWLINE if not self.status.state == "ready" and pod:NEWLINE k8s = self._get_k8s()NEWLINE status = k8s.get_pod_status(pod)NEWLINE if logs:NEWLINE if watch:NEWLINE status = k8s.watch(pod)NEWLINE else:NEWLINE resp = k8s.logs(pod)NEWLINE if resp:NEWLINE print(resp.encode())NEWLINENEWLINE if status == "succeeded":NEWLINE self.status.build_pod = NoneNEWLINE self.status.state = "ready"NEWLINE logger.info("build completed successfully")NEWLINE return "ready"NEWLINE if status in ["failed", "error"]:NEWLINE self.status.state = statusNEWLINE logger.error(f" build {status}, watch the build pod logs: {pod}")NEWLINE return statusNEWLINENEWLINE logger.info(f"builder status is: {status}, wait for it to complete")NEWLINE return NoneNEWLINENEWLINE def deploy_step(NEWLINE self,NEWLINE image=None,NEWLINE base_image=None,NEWLINE commands: list = None,NEWLINE secret_name="",NEWLINE with_mlrun=True,NEWLINE skip_deployed=False,NEWLINE ):NEWLINE function_name = self.metadata.name or "function"NEWLINE name = f"deploy_{function_name}"NEWLINE return build_op(NEWLINE name,NEWLINE self,NEWLINE image=image,NEWLINE base_image=base_image,NEWLINE commands=commands,NEWLINE secret_name=secret_name,NEWLINE with_mlrun=with_mlrun,NEWLINE skip_deployed=skip_deployed,NEWLINE )NEWLINENEWLINE def _run(self, runobj: RunObject, execution):NEWLINENEWLINE command, args, extra_env = self._get_cmd_args(runobj)NEWLINENEWLINE if runobj.metadata.iteration:NEWLINE self.store_run(runobj)NEWLINE k8s = self._get_k8s()NEWLINE new_meta = self._get_meta(runobj)NEWLINENEWLINE if self._secrets:NEWLINE if self._secrets.has_vault_source():NEWLINE self._add_vault_params_to_spec(runobj)NEWLINE if self._secrets.has_azure_vault_source():NEWLINE self._add_azure_vault_params_to_spec(NEWLINE self._secrets.get_azure_vault_k8s_secret()NEWLINE )NEWLINE k8s_secrets = self._secrets.get_k8s_secrets()NEWLINE if k8s_secrets:NEWLINE self._add_project_k8s_secrets_to_spec(k8s_secrets, runobj)NEWLINENEWLINE pod_spec = func_to_pod(NEWLINE self.full_image_path(), self, extra_env, command, args, self.spec.workdirNEWLINE )NEWLINE pod = client.V1Pod(metadata=new_meta, spec=pod_spec)NEWLINE try:NEWLINE pod_name, namespace = k8s.create_pod(pod)NEWLINE except ApiException as exc:NEWLINE raise RunError(str(exc))NEWLINENEWLINE if pod_name and self.kfp:NEWLINE writer = AsyncLogWriter(self._db_conn, runobj)NEWLINE status = k8s.watch(pod_name, namespace, writer=writer)NEWLINENEWLINE if status in ["failed", "error"]:NEWLINE raise RunError(f"pod exited with {status}, check logs")NEWLINE else:NEWLINE txt = f"Job is running in the background, pod: {pod_name}"NEWLINE logger.info(txt)NEWLINE runobj.status.status_text = txtNEWLINENEWLINE return NoneNEWLINENEWLINENEWLINEdef func_to_pod(image, runtime, extra_env, command, args, workdir):NEWLINE container = client.V1Container(NEWLINE name="base",NEWLINE image=image,NEWLINE env=extra_env + runtime.spec.env,NEWLINE command=[command],NEWLINE args=args,NEWLINE working_dir=workdir,NEWLINE image_pull_policy=runtime.spec.image_pull_policy,NEWLINE volume_mounts=runtime.spec.volume_mounts,NEWLINE resources=runtime.spec.resources,NEWLINE )NEWLINENEWLINE pod_spec = kube_resource_spec_to_pod_spec(runtime.spec, container)NEWLINENEWLINE if runtime.spec.image_pull_secret:NEWLINE pod_spec.image_pull_secrets = [NEWLINE client.V1LocalObjectReference(name=runtime.spec.image_pull_secret)NEWLINE ]NEWLINENEWLINE return pod_specNEWLINENEWLINENEWLINEclass KubeRuntimeHandler(BaseRuntimeHandler):NEWLINE @staticmethodNEWLINE def _are_resources_coupled_to_run_object() -> bool:NEWLINE return TrueNEWLINENEWLINE @staticmethodNEWLINE def _get_object_label_selector(object_id: str) -> str:NEWLINE return f"mlrun/uid={object_id}"NEWLINENEWLINE @staticmethodNEWLINE def _get_default_label_selector() -> str:NEWLINE return "mlrun/class in (build, job)"NEWLINE |
# title.pyNEWLINENEWLINEimport htmlNEWLINEimport reNEWLINENEWLINEfrom bobbit.utils import strip_htmlNEWLINENEWLINE# MetadataNEWLINENEWLINENAME = 'title'NEWLINEENABLE = TrueNEWLINEPATTERN = r'.*(?P<url>http[^\s]+).*'NEWLINEUSAGE = '''Usage: <url>NEWLINELooks up title of URL.NEWLINEExample:NEWLINE > http://www.insidehighered.com/quicktakes/2019/06/24/uc-santa-cruz-removes-catholic-mission-bellNEWLINE Title: UC Santa Cruz Removes Catholic Mission BellNEWLINE'''NEWLINENEWLINE# ConstantsNEWLINENEWLINEBLACKLIST = []NEWLINEAVOID_EXTENSIONS = ('.gif', '.jpg', '.mkv', '.mov', '.mp4', '.png')NEWLINENEWLINE# CommandNEWLINENEWLINEasync def title(bot, message, url=None):NEWLINE if message.channel in BLACKLIST or \NEWLINE any(url.lower().endswith(extension) for extension in AVOID_EXTENSIONS):NEWLINE returnNEWLINENEWLINE async with bot.http_client.get(url) as response:NEWLINE try:NEWLINE text = (await response.text()).replace('\n', ' ')NEWLINE html_title = re.findall(r'<title[^>]*>([^<]+)</title>', text)[0]NEWLINE response = bot.client.format_text(NEWLINE '{color}{green}Title{color}: {bold}{title}{bold}',NEWLINE title = strip_html(html.unescape(html_title)).strip()NEWLINE )NEWLINE except (IndexError, ValueError):NEWLINE returnNEWLINENEWLINE return message.with_body(response)NEWLINENEWLINE# RegisterNEWLINENEWLINEdef register(bot):NEWLINE global BLACKLISTNEWLINENEWLINE config = bot.config.load_module_config('title')NEWLINE BLACKLIST = config.get('blacklist', BLACKLIST)NEWLINENEWLINE if config.get('disabled', False):NEWLINE return []NEWLINENEWLINE return (NEWLINE ('command', PATTERN, title),NEWLINE )NEWLINENEWLINE# vim: set sts=4 sw=4 ts=8 expandtab ft=python:NEWLINE |
import torchNEWLINEimport torch.nn as nnNEWLINEimport torch.nn.functional as FNEWLINEimport randomNEWLINENEWLINEfrom torch.distributions import NormalNEWLINENEWLINENEWLINEclass Retina:NEWLINE """A visual retina.NEWLINENEWLINE Extracts a foveated glimpse `phi` around location `l`NEWLINE from an image `x`.NEWLINENEWLINE Concretely, encodes the region around `l` at aNEWLINE high-resolution but uses a progressively lowerNEWLINE resolution for pixels further from `l`, resultingNEWLINE in a compressed representation of the originalNEWLINE image `x`.NEWLINENEWLINE Args:NEWLINE x: a 4D Tensor of shape (B, H, W, C). The minibatchNEWLINE of images.NEWLINE l: a 2D Tensor of shape (B, 2). Contains normalizedNEWLINE coordinates in the range [-1, 1].NEWLINE g: size of the first square patch.NEWLINE k: number of patches to extract in the glimpse.NEWLINE s: scaling factor that controls the size ofNEWLINE successive patches.NEWLINENEWLINE Returns:NEWLINE phi: a 5D tensor of shape (B, k, g, g, C). TheNEWLINE foveated glimpse of the image.NEWLINE """NEWLINENEWLINE def __init__(self, g, k, s):NEWLINE self.g = gNEWLINE self.k = kNEWLINE self.s = sNEWLINENEWLINE def foveate(self, x, l):NEWLINE """Extract `k` square patches of size `g`, centeredNEWLINE at location `l`. The initial patch is a square ofNEWLINE size `g`, and each subsequent patch is a squareNEWLINE whose side is `s` times the size of the previousNEWLINE patch.NEWLINENEWLINE The `k` patches are finally resized to (g, g) andNEWLINE concatenated into a tensor of shape (B, k, g, g, C).NEWLINE """NEWLINE phi = []NEWLINE size = self.gNEWLINENEWLINE # extract k patches of increasing sizeNEWLINE for i in range(self.k):NEWLINE phi.append(self.extract_patch(x, l, size))NEWLINE size = int(self.s * size)NEWLINENEWLINE # resize the patches to squares of size gNEWLINE for i in range(1, len(phi)):NEWLINE k = phi[i].shape[-1] // self.gNEWLINE phi[i] = F.avg_pool2d(phi[i], k)NEWLINENEWLINE # concatenate into a single tensor and flattenNEWLINE phi = torch.cat(phi, 1)NEWLINE phi = phi.view(phi.shape[0], -1)NEWLINENEWLINE return phiNEWLINENEWLINE def extract_patch(self, x, l, size):NEWLINE """Extract a single patch for each image in `x`.NEWLINENEWLINE Args:NEWLINE x: a 4D Tensor of shape (B, H, W, C). The minibatchNEWLINE of images.NEWLINE l: a 2D Tensor of shape (B, 2).NEWLINE size: a scalar defining the size of the extracted patch.NEWLINENEWLINE Returns:NEWLINE patch: a 4D Tensor of shape (B, size, size, C)NEWLINE """NEWLINE B, C, H, W = x.shapeNEWLINENEWLINE start = self.denormalize(H, l)NEWLINE end = start + sizeNEWLINENEWLINE # pad with zerosNEWLINE x = F.pad(x, (size, size, size, size))NEWLINENEWLINE # loop through mini-batch and extract patchesNEWLINE patch = []NEWLINE for i in range(B):NEWLINE patch.append(x[i, :, start[i, 1] : end[i, 1], start[i, 0] : end[i, 0]])NEWLINE return torch.stack(patch)NEWLINENEWLINE def denormalize(self, T, coords):NEWLINE """Convert coordinates in the range [-1, 1] toNEWLINE coordinates in the range [0, T] where `T` isNEWLINE the size of the image.NEWLINE """NEWLINE return (0.5 * ((coords + 1.0) * T)).long()NEWLINENEWLINE def exceeds(self, from_x, to_x, from_y, to_y, T):NEWLINE """Check whether the extracted patch will exceedNEWLINE the boundaries of the image of size `T`.NEWLINE """NEWLINE if (from_x < 0) or (from_y < 0) or (to_x > T) or (to_y > T):NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINEclass HeadNet(nn.Module):NEWLINE def __init__(self, ):NEWLINE super(HeadNet, self).__init__()NEWLINE mult = 256NEWLINE self.fc1 = nn.Linear(mult, 128)NEWLINE self.fc11 = nn.Linear(2, 128)NEWLINE self.fc2 = nn.Linear(128, 1)NEWLINE self.fc1.apply(weights_init)NEWLINE self.fc2.apply(weights_init)NEWLINENEWLINE def forward(self, h_t, a_t):NEWLINE x = F.relu(self.fc1(h_t)+self.fc11(a_t))NEWLINE x = F.relu(self.fc2(x))NEWLINE return xNEWLINENEWLINEclass EnsembleNet(nn.Module):NEWLINE def __init__(self, n_ensemble, hidden_size):NEWLINE super(EnsembleNet, self).__init__()NEWLINE self.n_emseble = n_ensembleNEWLINE self.corenet_list = nn.ModuleList([LocationNetwork(input_size=hidden_size, output_size=2, std=0.1) for k in range(n_ensemble)])NEWLINENEWLINE self.net_list = nn.ModuleList([HeadNet() for k in range(n_ensemble)])NEWLINENEWLINE def _core(self, x, epoch, t, is_test):NEWLINE return [net(x, epoch, t, is_test) for net in self.corenet_list]NEWLINENEWLINE def forward(self, h_t, k, epoch, t, is_test, refer_action=None):NEWLINE if k is not None:NEWLINE if refer_action is not None:NEWLINE return refer_action.detach(), self.net_list[k](h_t.detach(), refer_action.detach())NEWLINE else:NEWLINE return self.corenet_list[k](h_t, epoch, t, is_test), self.net_list[k](h_t.detach(), self.corenet_list[k](h_t, epoch, t, is_test))NEWLINE else:NEWLINE if refer_action is not None:NEWLINE net_heads = []NEWLINE for k in range(self.n_emseble):NEWLINE net_heads.append(self.net_list[k](h_t.detach(), refer_action.detach()))NEWLINE refer_action = [refer_action.detach() for k in range(self.n_emseble)]NEWLINE return refer_action, net_headsNEWLINE else:NEWLINE core_cache = self._core(h_t, epoch, t, is_test)NEWLINE net_heads = []NEWLINE for k, core in enumerate(core_cache):NEWLINE net_heads.append(self.net_list[k](h_t.detach(), core_cache[k]))NEWLINE return core_cache, net_headsNEWLINENEWLINENEWLINEclass GlimpseNetwork(nn.Module):NEWLINE """The glimpse network.NEWLINENEWLINE Combines the "what" and the "where" into a glimpseNEWLINE feature vector `g_t`.NEWLINENEWLINE - "what": glimpse extracted from the retina.NEWLINE - "where": location tuple where glimpse was extracted.NEWLINENEWLINE Concretely, feeds the output of the retina `phi` toNEWLINE a fc layer and the glimpse location vector `l_t_prev`NEWLINE to a fc layer. Finally, these outputs are fed eachNEWLINE through a fc layer and their sum is rectified.NEWLINENEWLINE In other words:NEWLINENEWLINE `g_t = relu( fc( fc(l) ) + fc( fc(phi) ) )`NEWLINENEWLINE Args:NEWLINE h_g: hidden layer size of the fc layer for `phi`.NEWLINE h_l: hidden layer size of the fc layer for `l`.NEWLINE g: size of the square patches in the glimpses extractedNEWLINE by the retina.NEWLINE k: number of patches to extract per glimpse.NEWLINE s: scaling factor that controls the size of successive patches.NEWLINE c: number of channels in each image.NEWLINE x: a 4D Tensor of shape (B, H, W, C). The minibatchNEWLINE of images.NEWLINE l_t_prev: a 2D tensor of shape (B, 2). Contains the glimpseNEWLINE coordinates [x, y] for the previous timestep `t-1`.NEWLINENEWLINE Returns:NEWLINE g_t: a 2D tensor of shape (B, hidden_size).NEWLINE The glimpse representation returned byNEWLINE the glimpse network for the currentNEWLINE timestep `t`.NEWLINE """NEWLINENEWLINE def __init__(self, h_g, h_l, g, k, s, c):NEWLINE super().__init__()NEWLINENEWLINE self.retina = Retina(g, k, s)NEWLINENEWLINE # glimpse layerNEWLINE D_in = k * g * g * cNEWLINE self.fc1 = nn.Linear(D_in, h_g)NEWLINENEWLINE # location layerNEWLINE D_in = 2NEWLINE self.fc2 = nn.Linear(D_in, h_l)NEWLINENEWLINE self.fc3 = nn.Linear(h_g, h_g + h_l)NEWLINE self.fc4 = nn.Linear(h_l, h_g + h_l)NEWLINENEWLINE def forward(self, x, l_t_prev):NEWLINE # generate glimpse phi from image xNEWLINE phi = self.retina.foveate(x, l_t_prev)NEWLINENEWLINE # flatten location vectorNEWLINE l_t_prev = l_t_prev.view(l_t_prev.size(0), -1)NEWLINENEWLINE # feed phi and l to respective fc layersNEWLINE phi_out = F.relu(self.fc1(phi))NEWLINE l_out = F.relu(self.fc2(l_t_prev))NEWLINENEWLINE what = self.fc3(phi_out)NEWLINE where = self.fc4(l_out)NEWLINENEWLINE # feed to fc layerNEWLINE g_t = F.relu(what + where)NEWLINE # print('g_t',g_t)NEWLINE # print("g_t_norm",torch.norm(g_t))NEWLINENEWLINE return g_tNEWLINENEWLINENEWLINEclass CoreNetwork(nn.Module):NEWLINE """The core network.NEWLINENEWLINE An RNN that maintains an internal state by integratingNEWLINE information extracted from the history of past observations.NEWLINE It encodes the agent's knowledge of the environment throughNEWLINE a state vector `h_t` that gets updated at every time step `t`.NEWLINENEWLINE Concretely, it takes the glimpse representation `g_t` as input,NEWLINE and combines it with its internal state `h_t_prev` at the previousNEWLINE time step, to produce the new internal state `h_t` at the currentNEWLINE time step.NEWLINENEWLINE In other words:NEWLINENEWLINE `h_t = relu( fc(h_t_prev) + fc(g_t) )`NEWLINENEWLINE Args:NEWLINE input_size: input size of the rnn.NEWLINE hidden_size: hidden size of the rnn.NEWLINE g_t: a 2D tensor of shape (B, hidden_size). The glimpseNEWLINE representation returned by the glimpse network for theNEWLINE current timestep `t`.NEWLINE h_t_prev: a 2D tensor of shape (B, hidden_size). TheNEWLINE hidden state vector for the previous timestep `t-1`.NEWLINENEWLINE Returns:NEWLINE h_t: a 2D tensor of shape (B, hidden_size). The hiddenNEWLINE state vector for the current timestep `t`.NEWLINE """NEWLINENEWLINE def __init__(self, input_size, hidden_size):NEWLINE super().__init__()NEWLINENEWLINE self.input_size = input_sizeNEWLINE self.hidden_size = hidden_sizeNEWLINENEWLINE self.i2h = nn.Linear(input_size, hidden_size)NEWLINE self.h2h = nn.Linear(hidden_size, hidden_size)NEWLINENEWLINE def forward(self, g_t, h_t_prev):NEWLINE h1 = self.i2h(g_t)NEWLINE # np.set_printoptions(threshold=np.inf)NEWLINE # print('h1', h1.detach().cpu().numpy())NEWLINE h2 = self.h2h(h_t_prev)NEWLINE h_t = F.relu(h1 + h2)NEWLINE # print('h_t',h_t)NEWLINE # print('h_t_norm',torch.norm(h_t))NEWLINE return h_tNEWLINENEWLINENEWLINEclass ActionNetwork(nn.Module):NEWLINE """The action network.NEWLINENEWLINE Uses the internal state `h_t` of the core network toNEWLINE produce the final output classification.NEWLINENEWLINE Concretely, feeds the hidden state `h_t` through a fcNEWLINE layer followed by a softmax to create a vector ofNEWLINE output probabilities over the possible classes.NEWLINENEWLINE Hence, the environment action `a_t` is drawn from aNEWLINE distribution conditioned on an affine transformationNEWLINE of the hidden state vector `h_t`, or in other words,NEWLINE the action network is simply a linear softmax classifier.NEWLINENEWLINE Args:NEWLINE input_size: input size of the fc layer.NEWLINE output_size: output size of the fc layer.NEWLINE h_t: the hidden state vector of the core networkNEWLINE for the current time step `t`.NEWLINENEWLINE Returns:NEWLINE a_t: output probability vector over the classes.NEWLINE """NEWLINENEWLINE def __init__(self, input_size, output_size, log_std=0):NEWLINE super().__init__()NEWLINENEWLINE self.fc = nn.Linear(input_size, output_size)NEWLINE # self.action_log_std = nn.Parameter(torch.ones(output_size) * log_std)NEWLINENEWLINE def forward(self, h_t):NEWLINE a_t = self.fc(h_t)NEWLINE # m = torch.distributions.Normal(a_t, torch.exp(0.5 * self.action_log_std))NEWLINE # sketch_anchor_embedding = m.sample()NEWLINE # log_prob = m.log_prob(sketch_anchor_embedding).sum()NEWLINENEWLINE return a_tNEWLINENEWLINEclass BootNetwork(nn.Module):NEWLINENEWLINE def __init__(self, input_size, output_size):NEWLINE super().__init__()NEWLINENEWLINE self.fc = nn.Linear(input_size, output_size)NEWLINE # self.action_log_std = nn.Parameter(torch.ones(output_size) * log_std)NEWLINENEWLINE def forward(self, h_t):NEWLINE h_t = self.fc(h_t)NEWLINE # m = torch.distributions.Normal(a_t, torch.exp(0.5 * self.action_log_std))NEWLINE # sketch_anchor_embedding = m.sample()NEWLINE # log_prob = m.log_prob(sketch_anchor_embedding).sum()NEWLINENEWLINE return h_tNEWLINENEWLINEdef weights_init(m):NEWLINE """custom weights initialization"""NEWLINE classtype = m.__class__NEWLINE if classtype == nn.Linear or classtype == nn.Conv2d:NEWLINE print("default init")NEWLINE #m.weight.data.normal_(0.0, 0.02)NEWLINE #m.bias.data.fill_(0)NEWLINE elif classtype == nn.BatchNorm2d:NEWLINE m.weight.data.normal_(1.0, 0.02)NEWLINE m.bias.data.fill_(0)NEWLINE else:NEWLINE print('%s is not initialized.' %classtype)NEWLINENEWLINEclass LocationNetwork(nn.Module):NEWLINE """The location network.NEWLINENEWLINE Uses the internal state `h_t` of the core network toNEWLINE produce the location coordinates `l_t` for the nextNEWLINE time step.NEWLINENEWLINE Concretely, feeds the hidden state `h_t` through a fcNEWLINE layer followed by a tanh to clamp the output beweenNEWLINE [-1, 1]. This produces a 2D vector of means used toNEWLINE parametrize a two-component Gaussian with a fixedNEWLINE variance from which the location coordinates `l_t`NEWLINE for the next time step are sampled.NEWLINENEWLINE Hence, the location `l_t` is chosen stochasticallyNEWLINE from a distribution conditioned on an affineNEWLINE transformation of the hidden state vector `h_t`.NEWLINENEWLINE Args:NEWLINE input_size: input size of the fc layer.NEWLINE output_size: output size of the fc layer.NEWLINE std: standard deviation of the normal distribution.NEWLINE h_t: the hidden state vector of the core network forNEWLINE the current time step `t`.NEWLINENEWLINE Returns:NEWLINE mu: a 2D vector of shape (B, 2).NEWLINE l_t: a 2D vector of shape (B, 2).NEWLINE """NEWLINENEWLINE def __init__(self, input_size, output_size, std):NEWLINE super().__init__()NEWLINENEWLINE self.std = stdNEWLINE # self.std = nn.Parameter(torch.ones(2) * std)NEWLINENEWLINE hid_size = input_size // 2NEWLINE self.fc = nn.Linear(input_size, hid_size)NEWLINE self.fc.apply(weights_init)NEWLINE self.fc_lt = nn.Linear(hid_size, output_size)NEWLINE self.fc_lt.apply(weights_init)NEWLINENEWLINE def forward(self, h_t, epoch, t, is_test):NEWLINE # compute meanNEWLINE feat = F.relu(self.fc(h_t.detach()))NEWLINE l_t = torch.tanh(self.fc_lt(feat))NEWLINENEWLINENEWLINE # log_pi = Normal(mu, self.std).log_prob(l_t)NEWLINE # we assume both dimensions are independentNEWLINE # 1. pdf of the joint is the product of the pdfsNEWLINE # 2. log of the product is the sum of the logsNEWLINE # log_pi = torch.sum(log_pi, dim=1)NEWLINENEWLINE # bound between [-1, 1]NEWLINENEWLINE # entropy = Normal(mu, self.std).entropy()NEWLINENEWLINE return l_tNEWLINENEWLINENEWLINEclass BaselineNetwork(nn.Module):NEWLINE """The baseline network.NEWLINENEWLINE This network regresses the baseline in theNEWLINE reward function to reduce the variance ofNEWLINE the gradient update.NEWLINENEWLINE Args:NEWLINE input_size: input size of the fc layer.NEWLINE output_size: output size of the fc layer.NEWLINE h_t: the hidden state vector of the core networkNEWLINE for the current time step `t`.NEWLINENEWLINE Returns:NEWLINE b_t: a 2D vector of shape (B, 1). The baselineNEWLINE for the current time step `t`.NEWLINE """NEWLINENEWLINE def __init__(self, input_size, output_size):NEWLINE super().__init__()NEWLINENEWLINE self.fc = nn.Linear(input_size, output_size)NEWLINENEWLINE def forward(self, h_t):NEWLINE b_t = self.fc(h_t.detach())NEWLINE return b_tNEWLINE |
# Generated by Django 2.2.4 on 2021-01-15 01:18NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('users', '0019_puzzlesolution'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AddField(NEWLINE model_name='puzzlesolution',NEWLINE name='previous_attempts',NEWLINE field=models.CharField(default='', max_length=2000),NEWLINE ),NEWLINE ]NEWLINE |
from pathlib import PathNEWLINENEWLINEimport pytestNEWLINENEWLINEnplg = pytest.importorskip("ome_types._napari_plugin")NEWLINENEWLINEDATA = Path(__file__).parent / "data"[email protected]("fname", DATA.iterdir(), ids=lambda x: x.stem)NEWLINEdef test_widget(fname, qtbot):NEWLINE if fname.stem in ("bad.ome", "timestampannotation.ome"):NEWLINE pytest.xfail()NEWLINE nplg.OMETree(str(fname))NEWLINE |
#!/usr/bin/env pythonNEWLINE# Copyright (C) 2012-2013, The CyanogenMod ProjectNEWLINE# (C) 2017-2018,2020-2021, The LineageOS ProjectNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport base64NEWLINEimport jsonNEWLINEimport netrcNEWLINEimport osNEWLINEimport reNEWLINEimport sysNEWLINEtry:NEWLINE # For python3NEWLINE import urllib.errorNEWLINE import urllib.parseNEWLINE import urllib.requestNEWLINEexcept ImportError:NEWLINE # For python2NEWLINE import impNEWLINE import urllib2NEWLINE import urlparseNEWLINE urllib = imp.new_module('urllib')NEWLINE urllib.error = urllib2NEWLINE urllib.parse = urlparseNEWLINE urllib.request = urllib2NEWLINENEWLINEfrom xml.etree import ElementTreeNEWLINENEWLINEproduct = sys.argv[1]NEWLINENEWLINEif len(sys.argv) > 2:NEWLINE depsonly = sys.argv[2]NEWLINEelse:NEWLINE depsonly = NoneNEWLINENEWLINEtry:NEWLINE device = product[product.index("_") + 1:]NEWLINEexcept:NEWLINE device = productNEWLINENEWLINEif not depsonly:NEWLINE print("Device %s not found. Attempting to retrieve device repository from Havoc-OS Github (http://github.com/Havoc-Devices)." % device)NEWLINENEWLINErepositories = []NEWLINENEWLINEtry:NEWLINE authtuple = netrc.netrc().authenticators("api.github.com")NEWLINENEWLINE if authtuple:NEWLINE auth_string = ('%s:%s' % (authtuple[0], authtuple[2])).encode()NEWLINE githubauth = base64.encodestring(auth_string).decode().replace('\n', '')NEWLINE else:NEWLINE githubauth = NoneNEWLINEexcept:NEWLINE githubauth = NoneNEWLINENEWLINEdef add_auth(githubreq):NEWLINE if githubauth:NEWLINE githubreq.add_header("Authorization","Basic %s" % githubauth)NEWLINENEWLINEif not depsonly:NEWLINE githubreq = urllib.request.Request("https://api.github.com/search/repositories?q=%s+user:Havoc-Devices+in:name+fork:true" % device)NEWLINE add_auth(githubreq)NEWLINE try:NEWLINE result = json.loads(urllib.request.urlopen(githubreq).read().decode())NEWLINE except urllib.error.URLError:NEWLINE print("Failed to search GitHub")NEWLINE sys.exit(1)NEWLINE except ValueError:NEWLINE print("Failed to parse return data from GitHub")NEWLINE sys.exit(1)NEWLINE for res in result.get('items', []):NEWLINE repositories.append(res)NEWLINENEWLINElocal_manifests = r'.repo/local_manifests'NEWLINEif not os.path.exists(local_manifests): os.makedirs(local_manifests)NEWLINENEWLINEdef exists_in_tree(lm, path):NEWLINE for child in lm.getchildren():NEWLINE if child.attrib['path'] == path:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE# in-place prettyprint formatterNEWLINEdef indent(elem, level=0):NEWLINE i = "\n" + level*" "NEWLINE if len(elem):NEWLINE if not elem.text or not elem.text.strip():NEWLINE elem.text = i + " "NEWLINE if not elem.tail or not elem.tail.strip():NEWLINE elem.tail = iNEWLINE for elem in elem:NEWLINE indent(elem, level+1)NEWLINE if not elem.tail or not elem.tail.strip():NEWLINE elem.tail = iNEWLINE else:NEWLINE if level and (not elem.tail or not elem.tail.strip()):NEWLINE elem.tail = iNEWLINENEWLINEdef get_manifest_path():NEWLINE '''Find the current manifest pathNEWLINE In old versions of repo this is at .repo/manifest.xmlNEWLINE In new versions, .repo/manifest.xml includes an includeNEWLINE to some arbitrary file in .repo/manifests'''NEWLINENEWLINE m = ElementTree.parse(".repo/manifest.xml")NEWLINE try:NEWLINE m.findall('default')[0]NEWLINE return '.repo/manifest.xml'NEWLINE except IndexError:NEWLINE return ".repo/manifests/{}".format(m.find("include").get("name"))NEWLINENEWLINEdef get_default_revision():NEWLINE m = ElementTree.parse(get_manifest_path())NEWLINE d = m.findall('default')[0]NEWLINE r = d.get('revision')NEWLINE return r.replace('refs/heads/', '').replace('refs/tags/', '')NEWLINENEWLINEdef get_from_manifest(devicename):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if re.search("android_device_.*_%s$" % device, localpath.get("name")):NEWLINE return localpath.get("path")NEWLINENEWLINE return NoneNEWLINENEWLINEdef is_in_manifest(projectpath):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE # Search in main manifest, tooNEWLINE try:NEWLINE lm = ElementTree.parse(get_manifest_path())NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE # ... and don't forget the havoc snippetNEWLINE try:NEWLINE lm = ElementTree.parse(".repo/manifests/snippets/havoc.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINEdef add_to_manifest(repositories, fallback_branch = None):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for repository in repositories:NEWLINE repo_name = repository['repository']NEWLINE repo_target = repository['target_path']NEWLINE print('Checking if %s is fetched from %s' % (repo_target, repo_name))NEWLINE if is_in_manifest(repo_target):NEWLINE print('Havoc-Devices/%s already fetched to %s' % (repo_name, repo_target))NEWLINE continueNEWLINENEWLINE print('Adding dependency: Havoc-Devices/%s -> %s' % (repo_name, repo_target))NEWLINE project = ElementTree.Element("project", attrib = { "path": repo_target,NEWLINE "remote": "havoc-devices", "name": "%s" % repo_name })NEWLINENEWLINE if 'branch' in repository:NEWLINE project.set('revision',repository['branch'])NEWLINE elif fallback_branch:NEWLINE print("Using fallback branch %s for %s" % (fallback_branch, repo_name))NEWLINE project.set('revision', fallback_branch)NEWLINE else:NEWLINE print("Using default branch for %s" % repo_name)NEWLINENEWLINE lm.append(project)NEWLINENEWLINE indent(lm, 0)NEWLINE raw_xml = ElementTree.tostring(lm).decode()NEWLINE raw_xml = '<?xml version="1.0" encoding="UTF-8"?>\n' + raw_xmlNEWLINENEWLINE f = open('.repo/local_manifests/roomservice.xml', 'w')NEWLINE f.write(raw_xml)NEWLINE f.close()NEWLINENEWLINEdef fetch_dependencies(repo_path, fallback_branch = None):NEWLINE print('Looking for dependencies in %s' % repo_path)NEWLINE dependencies_path = repo_path + '/havoc.dependencies'NEWLINE syncable_repos = []NEWLINE verify_repos = []NEWLINENEWLINE if os.path.exists(dependencies_path):NEWLINE dependencies_file = open(dependencies_path, 'r')NEWLINE dependencies = json.loads(dependencies_file.read())NEWLINE fetch_list = []NEWLINENEWLINE for dependency in dependencies:NEWLINE if not is_in_manifest(dependency['target_path']):NEWLINE fetch_list.append(dependency)NEWLINE syncable_repos.append(dependency['target_path'])NEWLINE verify_repos.append(dependency['target_path'])NEWLINE else:NEWLINE verify_repos.append(dependency['target_path'])NEWLINENEWLINE if not os.path.isdir(dependency['target_path']):NEWLINE syncable_repos.append(dependency['target_path'])NEWLINENEWLINE dependencies_file.close()NEWLINENEWLINE if len(fetch_list) > 0:NEWLINE print('Adding dependencies to manifest')NEWLINE add_to_manifest(fetch_list, fallback_branch)NEWLINE else:NEWLINE print('%s has no additional dependencies.' % repo_path)NEWLINENEWLINE if len(syncable_repos) > 0:NEWLINE print('Syncing dependencies')NEWLINE os.system('repo sync --force-sync %s' % ' '.join(syncable_repos))NEWLINENEWLINE for deprepo in verify_repos:NEWLINE fetch_dependencies(deprepo)NEWLINENEWLINEdef has_branch(branches, revision):NEWLINE return revision in [branch['name'] for branch in branches]NEWLINENEWLINEif depsonly:NEWLINE repo_path = get_from_manifest(device)NEWLINE if repo_path:NEWLINE fetch_dependencies(repo_path)NEWLINE else:NEWLINE print("Trying dependencies-only mode on a non-existing device tree?")NEWLINENEWLINE sys.exit()NEWLINENEWLINEelse:NEWLINE for repository in repositories:NEWLINE repo_name = repository['name']NEWLINE if re.match(r"^android_device_[^_]*_" + device + "$", repo_name):NEWLINE print("Found repository: %s" % repository['name'])NEWLINE NEWLINE manufacturer = repo_name.replace("android_device_", "").replace("_" + device, "")NEWLINE NEWLINE default_revision = get_default_revision()NEWLINE print("Default revision: %s" % default_revision)NEWLINE print("Checking branch info")NEWLINE githubreq = urllib.request.Request(repository['branches_url'].replace('{/branch}', ''))NEWLINE add_auth(githubreq)NEWLINE result = json.loads(urllib.request.urlopen(githubreq).read().decode())NEWLINENEWLINE ## Try tags, too, since that's what releases useNEWLINE if not has_branch(result, default_revision):NEWLINE githubreq = urllib.request.Request(repository['tags_url'].replace('{/tag}', ''))NEWLINE add_auth(githubreq)NEWLINE result.extend (json.loads(urllib.request.urlopen(githubreq).read().decode()))NEWLINE NEWLINE repo_path = "device/%s/%s" % (manufacturer, device)NEWLINE adding = {'repository':repo_name,'target_path':repo_path}NEWLINE NEWLINE fallback_branch = NoneNEWLINE if not has_branch(result, default_revision):NEWLINE if os.getenv('ROOMSERVICE_BRANCHES'):NEWLINE fallbacks = list(filter(bool, os.getenv('ROOMSERVICE_BRANCHES').split(' ')))NEWLINE for fallback in fallbacks:NEWLINE if has_branch(result, fallback):NEWLINE print("Using fallback branch: %s" % fallback)NEWLINE fallback_branch = fallbackNEWLINE breakNEWLINENEWLINE if not fallback_branch:NEWLINE print("Default revision %s not found in %s. Bailing." % (default_revision, repo_name))NEWLINE print("Branches found:")NEWLINE for branch in [branch['name'] for branch in result]:NEWLINE print(branch)NEWLINE print("Use the ROOMSERVICE_BRANCHES environment variable to specify a list of fallback branches.")NEWLINE sys.exit()NEWLINENEWLINE add_to_manifest([adding], fallback_branch)NEWLINENEWLINE print("Syncing repository to retrieve project.")NEWLINE os.system('repo sync --force-sync %s' % repo_path)NEWLINE print("Repository synced!")NEWLINENEWLINE fetch_dependencies(repo_path, fallback_branch)NEWLINE print("Done")NEWLINE sys.exit()NEWLINENEWLINEprint("Repository for %s not found in the Havoc-Devices Github repository list. If this is in error, you may need to manually add it to your local_manifests/roomservice.xml." % device)NEWLINE |
from .test_auth import enable_ad, load_setup_data, enable_openldap, \NEWLINE OPENLDAP_AUTH_USER_PASSWORD, enable_freeipa, FREEIPA_AUTH_USER_PASSWORDNEWLINEfrom .common import * # NOQANEWLINEimport astNEWLINENEWLINEAGENT_REG_CMD = os.environ.get('RANCHER_AGENT_REG_CMD', "")NEWLINEHOST_COUNT = int(os.environ.get('RANCHER_HOST_COUNT', 1))NEWLINEHOST_NAME = os.environ.get('RANCHER_HOST_NAME', "testsa")NEWLINERANCHER_SERVER_VERSION = os.environ.get('RANCHER_SERVER_VERSION',NEWLINE "master-head")NEWLINErke_config = {"authentication": {"type": "authnConfig", "strategy": "x509"},NEWLINE "ignoreDockerVersion": False,NEWLINE "network": {"type": "networkConfig", "plugin": "canal"},NEWLINE "type": "rancherKubernetesEngineConfig"NEWLINE }NEWLINEAUTO_DEPLOY_CUSTOM_CLUSTER = ast.literal_eval(NEWLINE os.environ.get('RANCHER_AUTO_DEPLOY_CUSTOM_CLUSTER', "True"))NEWLINEKEYPAIR_NAME_PREFIX = os.environ.get('RANCHER_KEYPAIR_NAME_PREFIX', "")NEWLINERANCHER_CLUSTER_NAME = os.environ.get('RANCHER_CLUSTER_NAME', "")NEWLINERANCHER_ELASTIC_SEARCH_ENDPOINT = os.environ.get(NEWLINE 'RANCHER_ELASTIC_SEARCH_ENDPOINT', "")NEWLINEK8S_VERSION = os.environ.get('RANCHER_K8S_VERSION', "")NEWLINENEWLINENEWLINEdef test_add_custom_host():NEWLINE aws_nodes = AmazonWebServices().create_multiple_nodes(NEWLINE HOST_COUNT, random_test_name("testsa" + HOST_NAME))NEWLINE if AGENT_REG_CMD != "":NEWLINE for aws_node in aws_nodes:NEWLINE additional_options = " --address " + aws_node.public_ip_address + \NEWLINE " --internal-address " + \NEWLINE aws_node.private_ip_addressNEWLINE if 'Administrator' == aws_node.ssh_user:NEWLINE agent_cmd_temp = AGENT_REG_CMD.replace('| iex', ' ' + additional_options + ' | iex ')NEWLINE agent_cmd = agent_cmd_temp + additional_optionsNEWLINE else:NEWLINE agent_cmd = AGENT_REG_CMD + additional_optionsNEWLINE aws_node.execute_command(agent_cmd)NEWLINE print("Nodes: " + aws_node.public_ip_address)NEWLINENEWLINENEWLINEdef test_delete_keypair():NEWLINE AmazonWebServices().delete_keypairs(KEYPAIR_NAME_PREFIX)NEWLINENEWLINENEWLINEdef test_deploy_rancher_server():NEWLINE RANCHER_SERVER_CMD = \NEWLINE 'sudo docker run -d --name="rancher-server" ' \NEWLINE '--restart=unless-stopped -p 80:80 -p 443:443 ' \NEWLINE 'rancher/rancher'NEWLINE RANCHER_SERVER_CMD += ":" + RANCHER_SERVER_VERSIONNEWLINE aws_nodes = AmazonWebServices().create_multiple_nodes(NEWLINE 1, random_test_name("testsa" + HOST_NAME))NEWLINE aws_nodes[0].execute_command(RANCHER_SERVER_CMD)NEWLINE time.sleep(120)NEWLINE RANCHER_SERVER_URL = "https://" + aws_nodes[0].public_ip_addressNEWLINE print(RANCHER_SERVER_URL)NEWLINE wait_until_active(RANCHER_SERVER_URL, timeout=300)NEWLINENEWLINE RANCHER_SET_DEBUG_CMD = \NEWLINE "sudo docker exec rancher-server loglevel --set debug"NEWLINE aws_nodes[0].execute_command(RANCHER_SET_DEBUG_CMD)NEWLINENEWLINE token = set_url_password_token(RANCHER_SERVER_URL)NEWLINE admin_client = rancher.Client(url=RANCHER_SERVER_URL + "/v3",NEWLINE token=token, verify=False)NEWLINE if AUTH_PROVIDER:NEWLINE enable_url = \NEWLINE RANCHER_SERVER_URL + "/v3/" + AUTH_PROVIDER + \NEWLINE "Configs/" + AUTH_PROVIDER.lower() + "?action=testAndApply"NEWLINE auth_admin_user = load_setup_data()["admin_user"]NEWLINE auth_user_login_url = \NEWLINE RANCHER_SERVER_URL + "/v3-public/" + AUTH_PROVIDER + "Providers/" \NEWLINE + AUTH_PROVIDER.lower() + "?action=login"NEWLINENEWLINE if AUTH_PROVIDER == "activeDirectory":NEWLINENEWLINE enable_ad(auth_admin_user, token, enable_url=enable_url,NEWLINE password=AUTH_USER_PASSWORD, nested=NESTED_GROUP_ENABLED)NEWLINE user_token = login_as_auth_user(NEWLINE load_setup_data()["standard_user"],NEWLINE AUTH_USER_PASSWORD,NEWLINE login_url=auth_user_login_url)["token"]NEWLINE elif AUTH_PROVIDER == "openLdap":NEWLINENEWLINE enable_openldap(auth_admin_user, token, enable_url=enable_url,NEWLINE password=OPENLDAP_AUTH_USER_PASSWORD,NEWLINE nested=NESTED_GROUP_ENABLED)NEWLINE user_token = login_as_auth_user(NEWLINE load_setup_data()["standard_user"],NEWLINE OPENLDAP_AUTH_USER_PASSWORD,NEWLINE login_url=auth_user_login_url)["token"]NEWLINE elif AUTH_PROVIDER == "freeIpa":NEWLINENEWLINE enable_freeipa(auth_admin_user, token, enable_url=enable_url,NEWLINE password=FREEIPA_AUTH_USER_PASSWORD,NEWLINE nested=NESTED_GROUP_ENABLED)NEWLINE user_token = login_as_auth_user(NEWLINE load_setup_data()["standard_user"],NEWLINE FREEIPA_AUTH_USER_PASSWORD,NEWLINE login_url=auth_user_login_url)["token"]NEWLINE else:NEWLINE AUTH_URL = \NEWLINE RANCHER_SERVER_URL + "/v3-public/localproviders/local?action=login"NEWLINE user, user_token = create_user(admin_client, AUTH_URL)NEWLINENEWLINE env_details = "env.CATTLE_TEST_URL='" + RANCHER_SERVER_URL + "'\n"NEWLINE env_details += "env.ADMIN_TOKEN='" + token + "'\n"NEWLINE env_details += "env.USER_TOKEN='" + user_token + "'\n"NEWLINENEWLINE if AUTO_DEPLOY_CUSTOM_CLUSTER:NEWLINE aws_nodes = \NEWLINE AmazonWebServices().create_multiple_nodes(NEWLINE 5, random_test_name("testcustom"))NEWLINE node_roles = [["controlplane"], ["etcd"],NEWLINE ["worker"], ["worker"], ["worker"]]NEWLINE client = rancher.Client(url=RANCHER_SERVER_URL + "/v3",NEWLINE token=user_token, verify=False)NEWLINE if K8S_VERSION != "":NEWLINE rke_config["kubernetesVersion"] = K8S_VERSIONNEWLINE print("the rke config for creating the cluster:")NEWLINE print(rke_config)NEWLINE cluster = client.create_cluster(NEWLINE name=random_name(),NEWLINE driver="rancherKubernetesEngine",NEWLINE rancherKubernetesEngineConfig=rke_config)NEWLINE assert cluster.state == "provisioning"NEWLINE i = 0NEWLINE for aws_node in aws_nodes:NEWLINE docker_run_cmd = \NEWLINE get_custom_host_registration_cmd(NEWLINE client, cluster, node_roles[i], aws_node)NEWLINE aws_node.execute_command(docker_run_cmd)NEWLINE i += 1NEWLINE validate_cluster_state(client, cluster)NEWLINE env_details += "env.CLUSTER_NAME='" + cluster.name + "'\n"NEWLINE create_config_file(env_details)NEWLINENEWLINENEWLINEdef test_delete_rancher_server():NEWLINE client = get_admin_client()NEWLINE clusters = client.list_cluster().dataNEWLINE for cluster in clusters:NEWLINE delete_cluster(client, cluster)NEWLINE clusters = client.list_cluster().dataNEWLINE start = time.time()NEWLINE while len(clusters) > 0:NEWLINE time.sleep(30)NEWLINE clusters = client.list_cluster().dataNEWLINE if time.time() - start > MACHINE_TIMEOUT:NEWLINE exceptionMsg = 'Timeout waiting for clusters to be removed'NEWLINE raise Exception(exceptionMsg)NEWLINE ip_address = CATTLE_TEST_URL[8:]NEWLINE print("Ip Address:" + ip_address)NEWLINE filters = [NEWLINE {'Name': 'network-interface.addresses.association.public-ip',NEWLINE 'Values': [ip_address]}]NEWLINE aws_nodes = AmazonWebServices().get_nodes(filters)NEWLINE assert len(aws_nodes) == 1NEWLINE AmazonWebServices().delete_nodes(aws_nodes, wait_for_deleted=True)NEWLINENEWLINENEWLINEdef test_cluster_enable_logging_elasticsearch():NEWLINE client = get_user_client()NEWLINE cluster = get_cluster_by_name(client, RANCHER_CLUSTER_NAME)NEWLINE cluster_name = cluster.nameNEWLINE client.create_cluster_logging(name=random_test_name("elasticsearch"),NEWLINE clusterId=cluster.id,NEWLINE elasticsearchConfig={NEWLINE "dateFormat": "YYYY-MM-DD",NEWLINE "sslVerify": False,NEWLINE "sslVersion": "TLSv1_2",NEWLINE "indexPrefix": cluster_name,NEWLINE "endpoint":NEWLINE RANCHER_ELASTIC_SEARCH_ENDPOINT}NEWLINE )NEWLINE projects = client.list_project(name="System",NEWLINE clusterId=cluster.id).dataNEWLINE assert len(projects) == 1NEWLINE project = projects[0]NEWLINE p_client = get_project_client_for_token(project, USER_TOKEN)NEWLINE wait_for_app_to_active(p_client, "rancher-logging")NEWLINE |
N = int(input())NEWLINEp = [int(input()) for i in range(N)]NEWLINEprint(sum(p)-max(p)//2)NEWLINE |
#FatiamentoNEWLINEfrase = 'Curso em video python'NEWLINE#Ex01|Obs: Desmostra um celula no parametro indicadoNEWLINEprint(frase[3])NEWLINE#Ex02/OBS: sempre será escluido última casa para mostragem NEWLINEprint(frase[3:15])NEWLINE#Ex03|Obs: seleção e alternação na mostragem definido parametro NEWLINEprint(frase[3:21:2])NEWLINE#Ex04|Obs: sem parametro de inicio não for definido será como padrão 0NEWLINEprint(frase[:4])NEWLINE#Ex05| Obs: sem parametro do fim será definido como padrão última celulaNEWLINEprint(frase[15:])NEWLINE#Ex06|Obs: seleção sem um dos paremetros e alternação na mostragem definido por parametroNEWLINEprint(frase[9::3])NEWLINE |
import osNEWLINEfrom threading import BrokenBarrierErrorNEWLINEimport timeNEWLINENEWLINEfrom multiprocess.context import TimeoutErrorNEWLINEfrom multiprocess.managers import StateNEWLINEimport numpy as npNEWLINEimport pytestNEWLINENEWLINEfrom catkit.multiprocessing import Process, SharedMemoryManagerNEWLINENEWLINETIMEOUT = 10 # Use a shorter timeout for testing.NEWLINENEWLINENEWLINEdef test_child_exception():NEWLINE def client_func():NEWLINE raise RuntimeError("123456789")NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE client = Process(target=client_func)NEWLINE client.start()NEWLINE with pytest.raises(RuntimeError, match="123456789"):NEWLINE client.join()NEWLINENEWLINENEWLINEdef test_pid():NEWLINE def client_func(pid):NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE server_pid = manager.getpid()NEWLINENEWLINE client_pid = os.getpid()NEWLINE assert server_pid != client_pid, f"Server ({server_pid}) shouldn't be running on client ({client_pid})"NEWLINE assert server_pid == pid, f"Server ({server_pid}) connected to from client ({client_pid}) should be same as that started by manager ({pid})"NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE parent_pid = os.getpid()NEWLINE server_pid = manager.getpid()NEWLINENEWLINE assert server_pid != parent_pidNEWLINENEWLINE n_clients = 2NEWLINE clients = [Process(target=client_func, args=(server_pid,)) for x in range(n_clients)]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINENEWLINEdef test_no_persistent_server():NEWLINE manager = SharedMemoryManager()NEWLINE with pytest.raises(ConnectionRefusedError):NEWLINE manager.connect()NEWLINENEWLINENEWLINEdef test_locks():NEWLINE def client_func2():NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE with pytest.raises(TimeoutError):NEWLINE with manager.get_lock("test_lock"): # This will timeout as the parent process has already acquired this.NEWLINE passNEWLINENEWLINE client = Process(target=client_func2)NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE assert manager._state.value == State.STARTED # Oddly manager._state is State.Started doesn't work.NEWLINE with manager.get_lock("test_lock", timeout=TIMEOUT) as is_locked:NEWLINE assert is_lockedNEWLINE client.start()NEWLINE client.join()NEWLINENEWLINENEWLINEdef client_barrier(sleep, parties, a_list, name_mangle=False):NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE name = f"test_barrier_{sleep}" if name_mangle else "test_barrier"NEWLINE barrier = manager.get_barrier(name, parties)NEWLINE t0 = time.time()NEWLINE time.sleep(sleep)NEWLINE barrier.wait(timeout=TIMEOUT) # NOTE: The barrier release order is not guaranteed.NEWLINE a_list.append(np.rint(time.time() - t0))NEWLINENEWLINENEWLINEdef test_single_barrier():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE clients = [Process(target=client_barrier, args=(6, 3, a_list)),NEWLINE Process(target=client_barrier, args=(0, 3, a_list)),NEWLINE Process(target=client_barrier, args=(0, 3, a_list))]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINE # We Expect to see that the timer wrapping the sleep and the barrier for each client to be that of the longest.NEWLINE assert a_list._getvalue() == [6, 6, 6], a_list._getvalue()NEWLINENEWLINENEWLINEdef test_multiple_barriers():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE clients = [Process(target=client_barrier, args=(6, 1, a_list, True)),NEWLINE Process(target=client_barrier, args=(0, 1, a_list, True)),NEWLINE Process(target=client_barrier, args=(0, 1, a_list, True))]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINE # We Expect to see that the timer wrapping the sleep and the barrier for each client to be that of their sleep.NEWLINE assert a_list._getvalue() == [0, 0, 6], a_list._getvalue()NEWLINENEWLINENEWLINEdef test_broken_barrier():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE # More parties than process will cause barrier.wait() to timeout.NEWLINE client = Process(target=client_barrier, args=(6, 3, a_list))NEWLINE client.start()NEWLINE with pytest.raises(BrokenBarrierError):NEWLINE client.join()NEWLINE |
from __future__ import print_function, absolute_import, divisionNEWLINENEWLINEfrom timeit import default_timer as timerNEWLINEimport numpy as npNEWLINENEWLINEfrom .reduction import device_reduce_sumNEWLINENEWLINENEWLINEdef benchmark_intp(nelem):NEWLINE data = np.random.randint(0, 100, nelem).astype(np.intp)NEWLINENEWLINE ts = timer()NEWLINE expected_res = data.sum()NEWLINE cpu_time = timer() - tsNEWLINENEWLINE ts = timer()NEWLINE got_res = device_reduce_sum(data)NEWLINE gpu_time = timer() - tsNEWLINENEWLINE assert got_res == expected_resNEWLINE return cpu_time, gpu_timeNEWLINENEWLINENEWLINEdef benchmark_float64(nelem):NEWLINE data = np.random.random(nelem).astype(np.float64)NEWLINENEWLINE ts = timer()NEWLINE expected_res = data.sum()NEWLINE cpu_time = timer() - tsNEWLINENEWLINE ts = timer()NEWLINE got_res = device_reduce_sum(data)NEWLINE gpu_time = timer() - tsNEWLINENEWLINE np.allclose(got_res, expected_res)NEWLINE return cpu_time, gpu_timeNEWLINENEWLINENEWLINEdef main():NEWLINE print('benchmark intp'.center(80, '='))NEWLINE for n in [100, 1000, 10000, 100000, 1000000, 10000000]:NEWLINE print('n = {0}'.format(n))NEWLINE for t in range(3):NEWLINE print(benchmark_intp(n))NEWLINENEWLINE print('benchmark float64'.center(80, '='))NEWLINE for n in [100, 1000, 10000, 100000, 1000000, 10000000]:NEWLINE print('n = {0}'.format(n))NEWLINE for t in range(3):NEWLINE print(benchmark_float64(n))NEWLINENEWLINE # Note: On Carrizo, speedup is attained at n=1,000,000NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE |
"""NEWLINEProcess missing data within a dataset. NEWLINE"""NEWLINEimport missingno as msnoNEWLINEfrom pandas import DataFrameNEWLINEfrom pandas import SeriesNEWLINEfrom typing import ListNEWLINENEWLINENEWLINEdef visualize(df):NEWLINE """Plot missing cells heatmap"""NEWLINE msno.matrix(df)NEWLINENEWLINENEWLINEdef removeRows(df: DataFrame) -> DataFrame:NEWLINE """Removes all rows with NaN cells"""NEWLINE return(df.dropna().reset_index(drop=True))NEWLINENEWLINENEWLINEdef removeRowsByCol(df: DataFrame, col: str) -> DataFrame:NEWLINE """Removes all rows with missing cells in specified column"""NEWLINE return(df[~df[col].isna()].reset_index(drop=True))NEWLINENEWLINENEWLINEdef impute(df: DataFrame, col: str, strategy: str = "zero") -> DataFrame:NEWLINE """NEWLINE Impute missing data in column.NEWLINE df - data dataframeNEWLINE col - target column labelNEWLINE strategy - imputation strategyNEWLINE zero: replaces NA with 0NEWLINE mean: replaces NA with the meanNEWLINE median: replaces NA with the medianNEWLINE most frequent: replaces NA with one the modeNEWLINE empty: replaces NA with an empty str i.e. ""NEWLINE hot deck: replaces NA with a random sample of non-NA dataNEWLINE """NEWLINE data = df.copy()NEWLINENEWLINE if strategy == "zero":NEWLINE # works only with quant dataNEWLINE filler_data = 0NEWLINE elif strategy == "mean":NEWLINE # works only with quant dataNEWLINE filler_data = data[col].mean()NEWLINE elif strategy == "median":NEWLINE # works only with quant dataNEWLINE filler_data = data[col].median()NEWLINE elif strategy == "most frequent":NEWLINE filler_data = data[col].mode().sample()NEWLINE elif strategy == "empty":NEWLINE filler_data = ""NEWLINE elif strategy == "hot deck":NEWLINE # replaces NaNs with random samples from the valid data pool.NEWLINE # The sampling is with replacement incase the valid sampleNEWLINE # size is too smallNEWLINE valid_data = data[col][~data[col].isnull()]NEWLINE sample_len = len(data[col][data[col].isnull()])NEWLINE filler_data = valid_data.sample(sample_len, replace=True).valuesNEWLINE else:NEWLINE raise Exception("Not a valid impute strategy")NEWLINE data[col][data[col].isnull()] = filler_dataNEWLINE return(data)NEWLINENEWLINENEWLINEdef generateBinaries(df:DataFrame, cols: List[str]) -> DataFrame:NEWLINE """Add binary variables to specify whether cell is na"""NEWLINE data = df.copy()NEWLINE for col in cols:NEWLINE data[col+"_na"] = ~data[col].isnull()NEWLINE return(data)NEWLINENEWLINENEWLINEdef noMissingByCol(df: DataFrame) -> Series:NEWLINE """Count the number of missing cells in each column"""NEWLINE return(df.isna().sum())NEWLINENEWLINENEWLINEdef replaceDefects(df: DataFrame, col: str, replacement_pairs: dict) -> DataFrame:NEWLINE """Row replacement for str based columns"""NEWLINE data = df.copy()NEWLINE for key, item in replacement_pairs.items():NEWLINE data[col] = data[col].apply(lambda x: x.replace(key, item))NEWLINE return(data)NEWLINE |
from __future__ import absolute_import, divisionNEWLINE__author__ = 'katharine'NEWLINENEWLINEfrom six import indexbytesNEWLINEfrom six.moves import rangeNEWLINENEWLINEfrom libpebble2.events.mixin import EventSourceMixinNEWLINEfrom libpebble2.exceptions import ScreenshotErrorNEWLINEfrom libpebble2.protocol.screenshots import *NEWLINENEWLINENEWLINEclass Screenshot(EventSourceMixin):NEWLINE """NEWLINE Takes a screenshot from the watch.NEWLINENEWLINE :param pebble: The pebble of which to take a screenshot.NEWLINE :type pebble: .PebbleConnectionNEWLINE """NEWLINE def __init__(self, pebble):NEWLINE self._pebble = pebbleNEWLINE super(Screenshot, self).__init__()NEWLINENEWLINE def grab_image(self):NEWLINE """NEWLINE Takes a screenshot. Blocks until completion, or raises a :exc:`.ScreenshotError` on failure.NEWLINENEWLINE While this method is executing, "progress" events will periodically be emitted with the following signature: ::NEWLINENEWLINE (downloaded_so_far, total_size)NEWLINENEWLINE :return: A list of bytearrays in RGB8 format, where each bytearray is one row of the image.NEWLINE """NEWLINE # We have to open this queue before we make the request, to ensure we don't miss the response.NEWLINE queue = self._pebble.get_endpoint_queue(ScreenshotResponse)NEWLINE self._pebble.send_packet(ScreenshotRequest())NEWLINE return self._read_screenshot(queue)NEWLINENEWLINE def _read_screenshot(self, queue):NEWLINE data = queue.get().dataNEWLINE header = ScreenshotHeader.parse(data)[0]NEWLINE if header.response_code != ScreenshotHeader.ResponseCode.OK:NEWLINE queue.close()NEWLINE raise ScreenshotError("Screenshot failed: {!s}".format(header.response_code))NEWLINE data = header.dataNEWLINE expected_size = self._get_expected_bytes(header)NEWLINE while len(data) < expected_size:NEWLINE data += queue.get().dataNEWLINE self._broadcast_event("progress", len(data), expected_size)NEWLINE queue.close()NEWLINE return self._decode_image(header, data)NEWLINENEWLINE @classmethodNEWLINE def _get_expected_bytes(cls, header):NEWLINE if header.version == 1:NEWLINE return (header.width * header.height) // 8NEWLINE elif header.version == 2:NEWLINE return header.width * header.heightNEWLINE else:NEWLINE raise ScreenshotError("Unknown screenshot version: {}".format(header.version))NEWLINENEWLINE @classmethodNEWLINE def _decode_image(cls, header, data):NEWLINE if header.version == 1:NEWLINE return cls._decode_1bit(header, data)NEWLINE elif header.version == 2:NEWLINE return cls._decode_8bit(header, data)NEWLINENEWLINE @classmethodNEWLINE def _decode_1bit(cls, header, data):NEWLINE output = []NEWLINE row_bytes = header.width // 8NEWLINE for row in range(header.height):NEWLINE row_values = []NEWLINE for column in range(header.width):NEWLINE pixel = (indexbytes(data, row*row_bytes + column//8) >> (column % 8)) & 1NEWLINE row_values.extend([pixel * 255] * 3)NEWLINE output.append(bytearray(row_values))NEWLINE return outputNEWLINENEWLINE @classmethodNEWLINE def _decode_8bit(cls, header, data):NEWLINE output = []NEWLINE for row in range(header.height):NEWLINE row_values = []NEWLINE for column in range(header.width):NEWLINE pixel = indexbytes(data, row*header.width + column)NEWLINE row_values.extend([NEWLINE ((pixel >> 4) & 0b11) * 85,NEWLINE ((pixel >> 2) & 0b11) * 85,NEWLINE ((pixel >> 0) & 0b11) * 85,NEWLINE ])NEWLINE output.append(bytearray(row_values))NEWLINE return outputNEWLINE |
"""NEWLINEUse this module directly:NEWLINE import xarray.plot as xpltNEWLINENEWLINEOr use the methods on a DataArray:NEWLINE DataArray.plot._____NEWLINE"""NEWLINEimport functoolsNEWLINENEWLINEimport numpy as npNEWLINEimport pandas as pdNEWLINENEWLINEfrom .facetgrid import _easy_facetgridNEWLINEfrom .utils import (NEWLINE _add_colorbar, _ensure_plottable, _infer_interval_breaks, _infer_xy_labels,NEWLINE _interval_to_double_bound_points, _interval_to_mid_points,NEWLINE _process_cmap_cbar_kwargs, _rescale_imshow_rgb, _resolve_intervals_2dplot,NEWLINE _update_axes, _valid_other_type, get_axis, import_matplotlib_pyplot,NEWLINE label_from_attrs)NEWLINENEWLINENEWLINEdef _infer_line_data(darray, x, y, hue):NEWLINE error_msg = ('must be either None or one of ({0:s})'NEWLINE .format(', '.join([repr(dd) for dd in darray.dims])))NEWLINE ndims = len(darray.dims)NEWLINENEWLINE if x is not None and x not in darray.dims and x not in darray.coords:NEWLINE raise ValueError('x ' + error_msg)NEWLINENEWLINE if y is not None and y not in darray.dims and y not in darray.coords:NEWLINE raise ValueError('y ' + error_msg)NEWLINENEWLINE if x is not None and y is not None:NEWLINE raise ValueError('You cannot specify both x and y kwargs'NEWLINE 'for line plots.')NEWLINENEWLINE if ndims == 1:NEWLINE huename = NoneNEWLINE hueplt = NoneNEWLINE huelabel = ''NEWLINENEWLINE if x is not None:NEWLINE xplt = darray[x]NEWLINE yplt = darrayNEWLINENEWLINE elif y is not None:NEWLINE xplt = darrayNEWLINE yplt = darray[y]NEWLINENEWLINE else: # Both x & y are NoneNEWLINE dim = darray.dims[0]NEWLINE xplt = darray[dim]NEWLINE yplt = darrayNEWLINENEWLINE else:NEWLINE if x is None and y is None and hue is None:NEWLINE raise ValueError('For 2D inputs, please'NEWLINE 'specify either hue, x or y.')NEWLINENEWLINE if y is None:NEWLINE xname, huename = _infer_xy_labels(darray=darray, x=x, y=hue)NEWLINE xplt = darray[xname]NEWLINE if xplt.ndim > 1:NEWLINE if huename in darray.dims:NEWLINE otherindex = 1 if darray.dims.index(huename) == 0 else 0NEWLINE otherdim = darray.dims[otherindex]NEWLINE yplt = darray.transpose(NEWLINE otherdim, huename, transpose_coords=False)NEWLINE xplt = xplt.transpose(NEWLINE otherdim, huename, transpose_coords=False)NEWLINE else:NEWLINE raise ValueError('For 2D inputs, hue must be a dimension'NEWLINE + ' i.e. one of ' + repr(darray.dims))NEWLINENEWLINE else:NEWLINE yplt = darray.transpose(xname, huename)NEWLINENEWLINE else:NEWLINE yname, huename = _infer_xy_labels(darray=darray, x=y, y=hue)NEWLINE yplt = darray[yname]NEWLINE if yplt.ndim > 1:NEWLINE if huename in darray.dims:NEWLINE otherindex = 1 if darray.dims.index(huename) == 0 else 0NEWLINE otherdim = darray.dims[otherindex]NEWLINE xplt = darray.transpose(NEWLINE otherdim, huename, transpose_coords=False)NEWLINE else:NEWLINE raise ValueError('For 2D inputs, hue must be a dimension'NEWLINE + ' i.e. one of ' + repr(darray.dims))NEWLINENEWLINE else:NEWLINE xplt = darray.transpose(yname, huename)NEWLINENEWLINE huelabel = label_from_attrs(darray[huename])NEWLINE hueplt = darray[huename]NEWLINENEWLINE xlabel = label_from_attrs(xplt)NEWLINE ylabel = label_from_attrs(yplt)NEWLINENEWLINE return xplt, yplt, hueplt, xlabel, ylabel, huelabelNEWLINENEWLINENEWLINEdef plot(darray, row=None, col=None, col_wrap=None, ax=None, hue=None,NEWLINE rtol=0.01, subplot_kws=None, **kwargs):NEWLINE """NEWLINE Default plot of DataArray using matplotlib.pyplot.NEWLINENEWLINE Calls xarray plotting function based on the dimensions ofNEWLINE darray.squeeze()NEWLINENEWLINE =============== ===========================NEWLINE Dimensions Plotting functionNEWLINE --------------- ---------------------------NEWLINE 1 :py:func:`xarray.plot.line`NEWLINE 2 :py:func:`xarray.plot.pcolormesh`NEWLINE Anything else :py:func:`xarray.plot.hist`NEWLINE =============== ===========================NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE darray : DataArrayNEWLINE row : string, optionalNEWLINE If passed, make row faceted plots on this dimension nameNEWLINE col : string, optionalNEWLINE If passed, make column faceted plots on this dimension nameNEWLINE hue : string, optionalNEWLINE If passed, make faceted line plots with hue on this dimension nameNEWLINE col_wrap : integer, optionalNEWLINE Use together with ``col`` to wrap faceted plotsNEWLINE ax : matplotlib axes, optionalNEWLINE If None, uses the current axis. Not applicable when using facets.NEWLINE rtol : number, optionalNEWLINE Relative tolerance used to determine if the indexesNEWLINE are uniformly spaced. Usually a small positive number.NEWLINE subplot_kws : dict, optionalNEWLINE Dictionary of keyword arguments for matplotlib subplots. Only appliesNEWLINE to FacetGrid plotting.NEWLINE **kwargs : optionalNEWLINE Additional keyword arguments to matplotlibNEWLINENEWLINE """NEWLINE darray = darray.squeeze().compute()NEWLINENEWLINE plot_dims = set(darray.dims)NEWLINE plot_dims.discard(row)NEWLINE plot_dims.discard(col)NEWLINE plot_dims.discard(hue)NEWLINENEWLINE ndims = len(plot_dims)NEWLINENEWLINE error_msg = ('Only 1d and 2d plots are supported for facets in xarray. 'NEWLINE 'See the package `Seaborn` for more options.')NEWLINENEWLINE if ndims in [1, 2]:NEWLINE if row or col:NEWLINE kwargs['row'] = rowNEWLINE kwargs['col'] = colNEWLINE kwargs['col_wrap'] = col_wrapNEWLINE kwargs['subplot_kws'] = subplot_kwsNEWLINE if ndims == 1:NEWLINE plotfunc = lineNEWLINE kwargs['hue'] = hueNEWLINE elif ndims == 2:NEWLINE if hue:NEWLINE plotfunc = lineNEWLINE kwargs['hue'] = hueNEWLINE else:NEWLINE plotfunc = pcolormeshNEWLINE else:NEWLINE if row or col or hue:NEWLINE raise ValueError(error_msg)NEWLINE plotfunc = histNEWLINENEWLINE kwargs['ax'] = axNEWLINENEWLINE return plotfunc(darray, **kwargs)NEWLINENEWLINENEWLINE# This function signature should not change so that it can useNEWLINE# matplotlib format stringsNEWLINEdef line(darray, *args, row=None, col=None, figsize=None, aspect=None,NEWLINE size=None, ax=None, hue=None, x=None, y=None, xincrease=None,NEWLINE yincrease=None, xscale=None, yscale=None, xticks=None, yticks=None,NEWLINE xlim=None, ylim=None, add_legend=True, _labels=True, **kwargs):NEWLINE """NEWLINE Line plot of DataArray index against valuesNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.plot`NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE darray : DataArrayNEWLINE Must be 1 dimensionalNEWLINE figsize : tuple, optionalNEWLINE A tuple (width, height) of the figure in inches.NEWLINE Mutually exclusive with ``size`` and ``ax``.NEWLINE aspect : scalar, optionalNEWLINE Aspect ratio of plot, so that ``aspect * size`` gives the width inNEWLINE inches. Only used if a ``size`` is provided.NEWLINE size : scalar, optionalNEWLINE If provided, create a new figure for the plot with the given size.NEWLINE Height (in inches) of each plot. See also: ``aspect``.NEWLINE ax : matplotlib axes object, optionalNEWLINE Axis on which to plot this figure. By default, use the current axis.NEWLINE Mutually exclusive with ``size`` and ``figsize``.NEWLINE hue : string, optionalNEWLINE Dimension or coordinate for which you want multiple lines plotted.NEWLINE If plotting against a 2D coordinate, ``hue`` must be a dimension.NEWLINE x, y : string, optionalNEWLINE Dimensions or coordinates for x, y axis.NEWLINE Only one of these may be specified.NEWLINE The other coordinate plots values from the DataArray on which thisNEWLINE plot method is called.NEWLINE xscale, yscale : 'linear', 'symlog', 'log', 'logit', optionalNEWLINE Specifies scaling for the x- and y-axes respectivelyNEWLINE xticks, yticks : Specify tick locations for x- and y-axesNEWLINE xlim, ylim : Specify x- and y-axes limitsNEWLINE xincrease : None, True, or False, optionalNEWLINE Should the values on the x axes be increasing from left to right?NEWLINE if None, use the default for the matplotlib function.NEWLINE yincrease : None, True, or False, optionalNEWLINE Should the values on the y axes be increasing from top to bottom?NEWLINE if None, use the default for the matplotlib function.NEWLINE add_legend : boolean, optionalNEWLINE Add legend with y axis coordinates (2D inputs only).NEWLINE *args, **kwargs : optionalNEWLINE Additional arguments to matplotlib.pyplot.plotNEWLINE """NEWLINE # Handle facetgrids firstNEWLINE if row or col:NEWLINE allargs = locals().copy()NEWLINE allargs.update(allargs.pop('kwargs'))NEWLINE allargs.pop('darray')NEWLINE return _easy_facetgrid(darray, line, kind='line', **allargs)NEWLINENEWLINE ndims = len(darray.dims)NEWLINE if ndims > 2:NEWLINE raise ValueError('Line plots are for 1- or 2-dimensional DataArrays. 'NEWLINE 'Passed DataArray has {ndims} 'NEWLINE 'dimensions'.format(ndims=ndims))NEWLINENEWLINE # The allargs dict passed to _easy_facetgrid above contains argsNEWLINE if args is ():NEWLINE args = kwargs.pop('args', ())NEWLINE else:NEWLINE assert 'args' not in kwargsNEWLINENEWLINE ax = get_axis(figsize, size, aspect, ax)NEWLINE xplt, yplt, hueplt, xlabel, ylabel, huelabel = \NEWLINE _infer_line_data(darray, x, y, hue)NEWLINENEWLINE # Remove pd.Intervals if contained in xplt.values.NEWLINE if _valid_other_type(xplt.values, [pd.Interval]):NEWLINE # Is it a step plot? (see matplotlib.Axes.step)NEWLINE if kwargs.get('linestyle', '').startswith('steps-'):NEWLINE xplt_val, yplt_val = _interval_to_double_bound_points(xplt.values,NEWLINE yplt.values)NEWLINE # Remove steps-* to be sure that matplotlib is not confusedNEWLINE kwargs['linestyle'] = (kwargs['linestyle']NEWLINE .replace('steps-pre', '')NEWLINE .replace('steps-post', '')NEWLINE .replace('steps-mid', ''))NEWLINE if kwargs['linestyle'] == '':NEWLINE del kwargs['linestyle']NEWLINE else:NEWLINE xplt_val = _interval_to_mid_points(xplt.values)NEWLINE yplt_val = yplt.valuesNEWLINE xlabel += '_center'NEWLINE else:NEWLINE xplt_val = xplt.valuesNEWLINE yplt_val = yplt.valuesNEWLINENEWLINE _ensure_plottable(xplt_val, yplt_val)NEWLINENEWLINE primitive = ax.plot(xplt_val, yplt_val, *args, **kwargs)NEWLINENEWLINE if _labels:NEWLINE if xlabel is not None:NEWLINE ax.set_xlabel(xlabel)NEWLINENEWLINE if ylabel is not None:NEWLINE ax.set_ylabel(ylabel)NEWLINENEWLINE ax.set_title(darray._title_for_slice())NEWLINENEWLINE if darray.ndim == 2 and add_legend:NEWLINE ax.legend(handles=primitive,NEWLINE labels=list(hueplt.values),NEWLINE title=huelabel)NEWLINENEWLINE # Rotate dates on xlabelsNEWLINE # Do this without calling autofmt_xdate so that x-axes ticksNEWLINE # on other subplots (if any) are not deleted.NEWLINE # https://stackoverflow.com/questions/17430105/autofmt-xdate-deletes-x-axis-labels-of-all-subplotsNEWLINE if np.issubdtype(xplt.dtype, np.datetime64):NEWLINE for xlabels in ax.get_xticklabels():NEWLINE xlabels.set_rotation(30)NEWLINE xlabels.set_ha('right')NEWLINENEWLINE _update_axes(ax, xincrease, yincrease, xscale, yscale,NEWLINE xticks, yticks, xlim, ylim)NEWLINENEWLINE return primitiveNEWLINENEWLINENEWLINEdef step(darray, *args, where='pre', linestyle=None, ls=None, **kwargs):NEWLINE """NEWLINE Step plot of DataArray index against valuesNEWLINENEWLINE Similar to :func:`matplotlib:matplotlib.pyplot.step`NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE where : {'pre', 'post', 'mid'}, optional, default 'pre'NEWLINE Define where the steps should be placed:NEWLINE - 'pre': The y value is continued constantly to the left fromNEWLINE every *x* position, i.e. the interval ``(x[i-1], x[i]]`` has theNEWLINE value ``y[i]``.NEWLINE - 'post': The y value is continued constantly to the right fromNEWLINE every *x* position, i.e. the interval ``[x[i], x[i+1])`` has theNEWLINE value ``y[i]``.NEWLINE - 'mid': Steps occur half-way between the *x* positions.NEWLINE Note that this parameter is ignored if the x coordinate consists ofNEWLINE :py:func:`pandas.Interval` values, e.g. as a result ofNEWLINE :py:func:`xarray.Dataset.groupby_bins`. In this case, the actualNEWLINE boundaries of the interval are used.NEWLINENEWLINE *args, **kwargs : optionalNEWLINE Additional arguments following :py:func:`xarray.plot.line`NEWLINE """NEWLINE if where not in {'pre', 'post', 'mid'}:NEWLINE raise ValueError("'where' argument to step must be "NEWLINE "'pre', 'post' or 'mid'")NEWLINENEWLINE if ls is not None:NEWLINE if linestyle is None:NEWLINE linestyle = lsNEWLINE else:NEWLINE raise TypeError('ls and linestyle are mutually exclusive')NEWLINE if linestyle is None:NEWLINE linestyle = ''NEWLINE linestyle = 'steps-' + where + linestyleNEWLINENEWLINE return line(darray, *args, linestyle=linestyle, **kwargs)NEWLINENEWLINENEWLINEdef hist(darray, figsize=None, size=None, aspect=None, ax=None,NEWLINE xincrease=None, yincrease=None, xscale=None, yscale=None,NEWLINE xticks=None, yticks=None, xlim=None, ylim=None, **kwargs):NEWLINE """NEWLINE Histogram of DataArrayNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.hist`NEWLINENEWLINE Plots N dimensional arrays by first flattening the array.NEWLINENEWLINE ParametersNEWLINE ----------NEWLINE darray : DataArrayNEWLINE Can be any dimensionNEWLINE figsize : tuple, optionalNEWLINE A tuple (width, height) of the figure in inches.NEWLINE Mutually exclusive with ``size`` and ``ax``.NEWLINE aspect : scalar, optionalNEWLINE Aspect ratio of plot, so that ``aspect * size`` gives the width inNEWLINE inches. Only used if a ``size`` is provided.NEWLINE size : scalar, optionalNEWLINE If provided, create a new figure for the plot with the given size.NEWLINE Height (in inches) of each plot. See also: ``aspect``.NEWLINE ax : matplotlib axes object, optionalNEWLINE Axis on which to plot this figure. By default, use the current axis.NEWLINE Mutually exclusive with ``size`` and ``figsize``.NEWLINE **kwargs : optionalNEWLINE Additional keyword arguments to matplotlib.pyplot.histNEWLINENEWLINE """NEWLINE ax = get_axis(figsize, size, aspect, ax)NEWLINENEWLINE no_nan = np.ravel(darray.values)NEWLINE no_nan = no_nan[pd.notnull(no_nan)]NEWLINENEWLINE primitive = ax.hist(no_nan, **kwargs)NEWLINENEWLINE ax.set_title('Histogram')NEWLINE ax.set_xlabel(label_from_attrs(darray))NEWLINENEWLINE _update_axes(ax, xincrease, yincrease, xscale, yscale,NEWLINE xticks, yticks, xlim, ylim)NEWLINENEWLINE return primitiveNEWLINENEWLINENEWLINE# MUST run before any 2d plotting functions are defined sinceNEWLINE# _plot2d decorator adds them as methods here.NEWLINEclass _PlotMethods:NEWLINE """NEWLINE Enables use of xarray.plot functions as attributes on a DataArray.NEWLINE For example, DataArray.plot.imshowNEWLINE """NEWLINENEWLINE def __init__(self, darray):NEWLINE self._da = darrayNEWLINENEWLINE def __call__(self, **kwargs):NEWLINE return plot(self._da, **kwargs)NEWLINENEWLINE @functools.wraps(hist)NEWLINE def hist(self, ax=None, **kwargs):NEWLINE return hist(self._da, ax=ax, **kwargs)NEWLINENEWLINE @functools.wraps(line)NEWLINE def line(self, *args, **kwargs):NEWLINE return line(self._da, *args, **kwargs)NEWLINENEWLINE @functools.wraps(step)NEWLINE def step(self, *args, **kwargs):NEWLINE return step(self._da, *args, **kwargs)NEWLINENEWLINENEWLINEdef _plot2d(plotfunc):NEWLINE """NEWLINE Decorator for common 2d plotting logicNEWLINENEWLINE Also adds the 2d plot method to class _PlotMethodsNEWLINE """NEWLINE commondoc = """NEWLINE ParametersNEWLINE ----------NEWLINE darray : DataArrayNEWLINE Must be 2 dimensional, unless creating faceted plotsNEWLINE x : string, optionalNEWLINE Coordinate for x axis. If None use darray.dims[1]NEWLINE y : string, optionalNEWLINE Coordinate for y axis. If None use darray.dims[0]NEWLINE figsize : tuple, optionalNEWLINE A tuple (width, height) of the figure in inches.NEWLINE Mutually exclusive with ``size`` and ``ax``.NEWLINE aspect : scalar, optionalNEWLINE Aspect ratio of plot, so that ``aspect * size`` gives the width inNEWLINE inches. Only used if a ``size`` is provided.NEWLINE size : scalar, optionalNEWLINE If provided, create a new figure for the plot with the given size.NEWLINE Height (in inches) of each plot. See also: ``aspect``.NEWLINE ax : matplotlib axes object, optionalNEWLINE Axis on which to plot this figure. By default, use the current axis.NEWLINE Mutually exclusive with ``size`` and ``figsize``.NEWLINE row : string, optionalNEWLINE If passed, make row faceted plots on this dimension nameNEWLINE col : string, optionalNEWLINE If passed, make column faceted plots on this dimension nameNEWLINE col_wrap : integer, optionalNEWLINE Use together with ``col`` to wrap faceted plotsNEWLINE xscale, yscale : 'linear', 'symlog', 'log', 'logit', optionalNEWLINE Specifies scaling for the x- and y-axes respectivelyNEWLINE xticks, yticks : Specify tick locations for x- and y-axesNEWLINE xlim, ylim : Specify x- and y-axes limitsNEWLINE xincrease : None, True, or False, optionalNEWLINE Should the values on the x axes be increasing from left to right?NEWLINE if None, use the default for the matplotlib function.NEWLINE yincrease : None, True, or False, optionalNEWLINE Should the values on the y axes be increasing from top to bottom?NEWLINE if None, use the default for the matplotlib function.NEWLINE add_colorbar : Boolean, optionalNEWLINE Adds colorbar to axisNEWLINE add_labels : Boolean, optionalNEWLINE Use xarray metadata to label axesNEWLINE norm : ``matplotlib.colors.Normalize`` instance, optionalNEWLINE If the ``norm`` has vmin or vmax specified, the corresponding kwargNEWLINE must be None.NEWLINE vmin, vmax : floats, optionalNEWLINE Values to anchor the colormap, otherwise they are inferred from theNEWLINE data and other keyword arguments. When a diverging dataset is inferred,NEWLINE setting one of these values will fix the other by symmetry aroundNEWLINE ``center``. Setting both values prevents use of a diverging colormap.NEWLINE If discrete levels are provided as an explicit list, both of theseNEWLINE values are ignored.NEWLINE cmap : matplotlib colormap name or object, optionalNEWLINE The mapping from data values to color space. If not provided, thisNEWLINE will be either be ``viridis`` (if the function infers a sequentialNEWLINE dataset) or ``RdBu_r`` (if the function infers a diverging dataset).NEWLINE When `Seaborn` is installed, ``cmap`` may also be a `seaborn`NEWLINE color palette. If ``cmap`` is seaborn color palette and the plot typeNEWLINE is not ``contour`` or ``contourf``, ``levels`` must also be specified.NEWLINE colors : discrete colors to plot, optionalNEWLINE A single color or a list of colors. If the plot type is not ``contour``NEWLINE or ``contourf``, the ``levels`` argument is required.NEWLINE center : float, optionalNEWLINE The value at which to center the colormap. Passing this value impliesNEWLINE use of a diverging colormap. Setting it to ``False`` prevents use of aNEWLINE diverging colormap.NEWLINE robust : bool, optionalNEWLINE If True and ``vmin`` or ``vmax`` are absent, the colormap range isNEWLINE computed with 2nd and 98th percentiles instead of the extreme values.NEWLINE extend : {'neither', 'both', 'min', 'max'}, optionalNEWLINE How to draw arrows extending the colorbar beyond its limits. If notNEWLINE provided, extend is inferred from vmin, vmax and the data limits.NEWLINE levels : int or list-like object, optionalNEWLINE Split the colormap (cmap) into discrete color intervals. If an integerNEWLINE is provided, "nice" levels are chosen based on the data range: this canNEWLINE imply that the final number of levels is not exactly the expected one.NEWLINE Setting ``vmin`` and/or ``vmax`` with ``levels=N`` is equivalent toNEWLINE setting ``levels=np.linspace(vmin, vmax, N)``.NEWLINE infer_intervals : bool, optionalNEWLINE Only applies to pcolormesh. If True, the coordinate intervals areNEWLINE passed to pcolormesh. If False, the original coordinates are usedNEWLINE (this can be useful for certain map projections). The default is toNEWLINE always infer intervals, unless the mesh is irregular and plotted onNEWLINE a map projection.NEWLINE subplot_kws : dict, optionalNEWLINE Dictionary of keyword arguments for matplotlib subplots. Only appliesNEWLINE to FacetGrid plotting.NEWLINE cbar_ax : matplotlib Axes, optionalNEWLINE Axes in which to draw the colorbar.NEWLINE cbar_kwargs : dict, optionalNEWLINE Dictionary of keyword arguments to pass to the colorbar.NEWLINE **kwargs : optionalNEWLINE Additional arguments to wrapped matplotlib functionNEWLINENEWLINE ReturnsNEWLINE -------NEWLINE artist :NEWLINE The same type of primitive artist that the wrapped matplotlibNEWLINE function returnsNEWLINE """NEWLINENEWLINE # Build on the original docstringNEWLINE plotfunc.__doc__ = '%s\n%s' % (plotfunc.__doc__, commondoc)NEWLINENEWLINE @functools.wraps(plotfunc)NEWLINE def newplotfunc(darray, x=None, y=None, figsize=None, size=None,NEWLINE aspect=None, ax=None, row=None, col=None,NEWLINE col_wrap=None, xincrease=True, yincrease=True,NEWLINE add_colorbar=None, add_labels=True, vmin=None, vmax=None,NEWLINE cmap=None, center=None, robust=False, extend=None,NEWLINE levels=None, infer_intervals=None, colors=None,NEWLINE subplot_kws=None, cbar_ax=None, cbar_kwargs=None,NEWLINE xscale=None, yscale=None, xticks=None, yticks=None,NEWLINE xlim=None, ylim=None, norm=None, **kwargs):NEWLINE # All 2d plots in xarray share this function signature.NEWLINE # Method signature below should be consistent.NEWLINENEWLINE # Decide on a default for the colorbar before facetgridsNEWLINE if add_colorbar is None:NEWLINE add_colorbar = plotfunc.__name__ != 'contour'NEWLINE imshow_rgb = (NEWLINE plotfunc.__name__ == 'imshow' andNEWLINE darray.ndim == (3 + (row is not None) + (col is not None)))NEWLINE if imshow_rgb:NEWLINE # Don't add a colorbar when showing an image with explicit colorsNEWLINE add_colorbar = FalseNEWLINE # Matplotlib does not support normalising RGB data, so do it here.NEWLINE # See eg. https://github.com/matplotlib/matplotlib/pull/10220NEWLINE if robust or vmax is not None or vmin is not None:NEWLINE darray = _rescale_imshow_rgb(darray, vmin, vmax, robust)NEWLINE vmin, vmax, robust = None, None, FalseNEWLINENEWLINE # Handle facetgrids firstNEWLINE if row or col:NEWLINE allargs = locals().copy()NEWLINE del allargs['darray']NEWLINE del allargs['imshow_rgb']NEWLINE allargs.update(allargs.pop('kwargs'))NEWLINE # Need the decorated plotting functionNEWLINE allargs['plotfunc'] = globals()[plotfunc.__name__]NEWLINE return _easy_facetgrid(darray, kind='dataarray', **allargs)NEWLINENEWLINE plt = import_matplotlib_pyplot()NEWLINENEWLINE rgb = kwargs.pop('rgb', None)NEWLINE if rgb is not None and plotfunc.__name__ != 'imshow':NEWLINE raise ValueError('The "rgb" keyword is only valid for imshow()')NEWLINE elif rgb is not None and not imshow_rgb:NEWLINE raise ValueError('The "rgb" keyword is only valid for imshow()'NEWLINE 'with a three-dimensional array (per facet)')NEWLINENEWLINE xlab, ylab = _infer_xy_labels(NEWLINE darray=darray, x=x, y=y, imshow=imshow_rgb, rgb=rgb)NEWLINENEWLINE # better to pass the ndarrays directly to plotting functionsNEWLINE xval = darray[xlab].valuesNEWLINE yval = darray[ylab].valuesNEWLINENEWLINE # check if we need to broadcast one dimensionNEWLINE if xval.ndim < yval.ndim:NEWLINE xval = np.broadcast_to(xval, yval.shape)NEWLINENEWLINE if yval.ndim < xval.ndim:NEWLINE yval = np.broadcast_to(yval, xval.shape)NEWLINENEWLINE # May need to transpose for correct x, y labelsNEWLINE # xlab may be the name of a coord, we have to check for dim namesNEWLINE if imshow_rgb:NEWLINE # For RGB[A] images, matplotlib requires the color dimensionNEWLINE # to be last. In Xarray the order should be unimportant, soNEWLINE # we transpose to (y, x, color) to make this work.NEWLINE yx_dims = (ylab, xlab)NEWLINE dims = yx_dims + tuple(d for d in darray.dims if d not in yx_dims)NEWLINE if dims != darray.dims:NEWLINE darray = darray.transpose(*dims, transpose_coords=True)NEWLINE elif darray[xlab].dims[-1] == darray.dims[0]:NEWLINE darray = darray.transpose(transpose_coords=True)NEWLINENEWLINE # Pass the data as a masked ndarray tooNEWLINE zval = darray.to_masked_array(copy=False)NEWLINENEWLINE # Replace pd.Intervals if contained in xval or yval.NEWLINE xplt, xlab_extra = _resolve_intervals_2dplot(xval, plotfunc.__name__)NEWLINE yplt, ylab_extra = _resolve_intervals_2dplot(yval, plotfunc.__name__)NEWLINENEWLINE _ensure_plottable(xplt, yplt)NEWLINENEWLINE cmap_params, cbar_kwargs = _process_cmap_cbar_kwargs(NEWLINE plotfunc, zval.data, **locals())NEWLINENEWLINE if 'contour' in plotfunc.__name__:NEWLINE # extend is a keyword argument only for contour and contourf, butNEWLINE # passing it to the colorbar is sufficient for imshow andNEWLINE # pcolormeshNEWLINE kwargs['extend'] = cmap_params['extend']NEWLINE kwargs['levels'] = cmap_params['levels']NEWLINE # if colors == a single color, matplotlib draws dashed negativeNEWLINE # contours. we lose this feature if we pass cmap and not colorsNEWLINE if isinstance(colors, str):NEWLINE cmap_params['cmap'] = NoneNEWLINE kwargs['colors'] = colorsNEWLINENEWLINE if 'pcolormesh' == plotfunc.__name__:NEWLINE kwargs['infer_intervals'] = infer_intervalsNEWLINENEWLINE if 'imshow' == plotfunc.__name__ and isinstance(aspect, str):NEWLINE # forbid usage of mpl stringsNEWLINE raise ValueError("plt.imshow's `aspect` kwarg is not available "NEWLINE "in xarray")NEWLINENEWLINE ax = get_axis(figsize, size, aspect, ax)NEWLINE primitive = plotfunc(xplt, yplt, zval, ax=ax, cmap=cmap_params['cmap'],NEWLINE vmin=cmap_params['vmin'],NEWLINE vmax=cmap_params['vmax'],NEWLINE norm=cmap_params['norm'],NEWLINE **kwargs)NEWLINENEWLINE # Label the plot with metadataNEWLINE if add_labels:NEWLINE ax.set_xlabel(label_from_attrs(darray[xlab], xlab_extra))NEWLINE ax.set_ylabel(label_from_attrs(darray[ylab], ylab_extra))NEWLINE ax.set_title(darray._title_for_slice())NEWLINENEWLINE if add_colorbar:NEWLINE if add_labels and 'label' not in cbar_kwargs:NEWLINE cbar_kwargs['label'] = label_from_attrs(darray)NEWLINE cbar = _add_colorbar(primitive, ax, cbar_ax, cbar_kwargs,NEWLINE cmap_params)NEWLINENEWLINE elif cbar_ax is not None or cbar_kwargs:NEWLINE # inform the user about keywords which aren't usedNEWLINE raise ValueError("cbar_ax and cbar_kwargs can't be used with "NEWLINE "add_colorbar=False.")NEWLINENEWLINE # origin kwarg overrides yincreaseNEWLINE if 'origin' in kwargs:NEWLINE yincrease = NoneNEWLINENEWLINE _update_axes(ax, xincrease, yincrease, xscale, yscale,NEWLINE xticks, yticks, xlim, ylim)NEWLINENEWLINE # Rotate dates on xlabelsNEWLINE # Do this without calling autofmt_xdate so that x-axes ticksNEWLINE # on other subplots (if any) are not deleted.NEWLINE # https://stackoverflow.com/questions/17430105/autofmt-xdate-deletes-x-axis-labels-of-all-subplotsNEWLINE if np.issubdtype(xplt.dtype, np.datetime64):NEWLINE for xlabels in ax.get_xticklabels():NEWLINE xlabels.set_rotation(30)NEWLINE xlabels.set_ha('right')NEWLINENEWLINE return primitiveNEWLINENEWLINE # For use as DataArray.plot.plotmethodNEWLINE @functools.wraps(newplotfunc)NEWLINE def plotmethod(_PlotMethods_obj, x=None, y=None, figsize=None, size=None,NEWLINE aspect=None, ax=None, row=None, col=None, col_wrap=None,NEWLINE xincrease=True, yincrease=True, add_colorbar=None,NEWLINE add_labels=True, vmin=None, vmax=None, cmap=None,NEWLINE colors=None, center=None, robust=False, extend=None,NEWLINE levels=None, infer_intervals=None, subplot_kws=None,NEWLINE cbar_ax=None, cbar_kwargs=None,NEWLINE xscale=None, yscale=None, xticks=None, yticks=None,NEWLINE xlim=None, ylim=None, norm=None, **kwargs):NEWLINE """NEWLINE The method should have the same signature as the function.NEWLINENEWLINE This just makes the method work on Plotmethods objects,NEWLINE and passes all the other arguments straight through.NEWLINE """NEWLINE allargs = locals()NEWLINE allargs['darray'] = _PlotMethods_obj._daNEWLINE allargs.update(kwargs)NEWLINE for arg in ['_PlotMethods_obj', 'newplotfunc', 'kwargs']:NEWLINE del allargs[arg]NEWLINE return newplotfunc(**allargs)NEWLINENEWLINE # Add to class _PlotMethodsNEWLINE setattr(_PlotMethods, plotmethod.__name__, plotmethod)NEWLINENEWLINE return newplotfuncNEWLINENEWLINENEWLINE@_plot2dNEWLINEdef imshow(x, y, z, ax, **kwargs):NEWLINE """NEWLINE Image plot of 2d DataArray using matplotlib.pyplotNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.imshow`NEWLINENEWLINE While other plot methods require the DataArray to be strictlyNEWLINE two-dimensional, ``imshow`` also accepts a 3D array where someNEWLINE dimension can be interpreted as RGB or RGBA color channels andNEWLINE allows this dimension to be specified via the kwarg ``rgb=``.NEWLINENEWLINE Unlike matplotlib, Xarray can apply ``vmin`` and ``vmax`` to RGB or RGBANEWLINE data, by applying a single scaling factor and offset to all bands.NEWLINE Passing ``robust=True`` infers ``vmin`` and ``vmax``NEWLINE :ref:`in the usual way <robust-plotting>`.NEWLINENEWLINE .. note::NEWLINE This function needs uniformly spaced coordinates toNEWLINE properly label the axes. Call DataArray.plot() to check.NEWLINENEWLINE The pixels are centered on the coordinates values. Ie, if the coordinateNEWLINE value is 3.2 then the pixels for those coordinates will be centered on 3.2.NEWLINE """NEWLINENEWLINE if x.ndim != 1 or y.ndim != 1:NEWLINE raise ValueError('imshow requires 1D coordinates, try using 'NEWLINE 'pcolormesh or contour(f)')NEWLINENEWLINE # Centering the pixels- Assumes uniform spacingNEWLINE try:NEWLINE xstep = (x[1] - x[0]) / 2.0NEWLINE except IndexError:NEWLINE # Arbitrary default value, similar to matplotlib behaviourNEWLINE xstep = .1NEWLINE try:NEWLINE ystep = (y[1] - y[0]) / 2.0NEWLINE except IndexError:NEWLINE ystep = .1NEWLINE left, right = x[0] - xstep, x[-1] + xstepNEWLINE bottom, top = y[-1] + ystep, y[0] - ystepNEWLINENEWLINE defaults = {'origin': 'upper',NEWLINE 'interpolation': 'nearest'}NEWLINENEWLINE if not hasattr(ax, 'projection'):NEWLINE # not for cartopy geoaxesNEWLINE defaults['aspect'] = 'auto'NEWLINENEWLINE # Allow user to override these defaultsNEWLINE defaults.update(kwargs)NEWLINENEWLINE if defaults['origin'] == 'upper':NEWLINE defaults['extent'] = [left, right, bottom, top]NEWLINE else:NEWLINE defaults['extent'] = [left, right, top, bottom]NEWLINENEWLINE if z.ndim == 3:NEWLINE # matplotlib imshow uses black for missing data, but Xarray makesNEWLINE # missing data transparent. We therefore add an alpha channel ifNEWLINE # there isn't one, and set it to transparent where data is masked.NEWLINE if z.shape[-1] == 3:NEWLINE alpha = np.ma.ones(z.shape[:2] + (1,), dtype=z.dtype)NEWLINE if np.issubdtype(z.dtype, np.integer):NEWLINE alpha *= 255NEWLINE z = np.ma.concatenate((z, alpha), axis=2)NEWLINE else:NEWLINE z = z.copy()NEWLINE z[np.any(z.mask, axis=-1), -1] = 0NEWLINENEWLINE primitive = ax.imshow(z, **defaults)NEWLINENEWLINE return primitiveNEWLINENEWLINENEWLINE@_plot2dNEWLINEdef contour(x, y, z, ax, **kwargs):NEWLINE """NEWLINE Contour plot of 2d DataArrayNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.contour`NEWLINE """NEWLINE primitive = ax.contour(x, y, z, **kwargs)NEWLINE return primitiveNEWLINENEWLINENEWLINE@_plot2dNEWLINEdef contourf(x, y, z, ax, **kwargs):NEWLINE """NEWLINE Filled contour plot of 2d DataArrayNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.contourf`NEWLINE """NEWLINE primitive = ax.contourf(x, y, z, **kwargs)NEWLINE return primitiveNEWLINENEWLINENEWLINE@_plot2dNEWLINEdef pcolormesh(x, y, z, ax, infer_intervals=None, **kwargs):NEWLINE """NEWLINE Pseudocolor plot of 2d DataArrayNEWLINENEWLINE Wraps :func:`matplotlib:matplotlib.pyplot.pcolormesh`NEWLINE """NEWLINENEWLINE # decide on a default for infer_intervals (GH781)NEWLINE x = np.asarray(x)NEWLINE if infer_intervals is None:NEWLINE if hasattr(ax, 'projection'):NEWLINE if len(x.shape) == 1:NEWLINE infer_intervals = TrueNEWLINE else:NEWLINE infer_intervals = FalseNEWLINE else:NEWLINE infer_intervals = TrueNEWLINENEWLINE if (infer_intervals andNEWLINE ((np.shape(x)[0] == np.shape(z)[1]) orNEWLINE ((x.ndim > 1) and (np.shape(x)[1] == np.shape(z)[1])))):NEWLINE if len(x.shape) == 1:NEWLINE x = _infer_interval_breaks(x, check_monotonic=True)NEWLINE else:NEWLINE # we have to infer the intervals on both axesNEWLINE x = _infer_interval_breaks(x, axis=1)NEWLINE x = _infer_interval_breaks(x, axis=0)NEWLINENEWLINE if (infer_intervals andNEWLINE (np.shape(y)[0] == np.shape(z)[0])):NEWLINE if len(y.shape) == 1:NEWLINE y = _infer_interval_breaks(y, check_monotonic=True)NEWLINE else:NEWLINE # we have to infer the intervals on both axesNEWLINE y = _infer_interval_breaks(y, axis=1)NEWLINE y = _infer_interval_breaks(y, axis=0)NEWLINENEWLINE primitive = ax.pcolormesh(x, y, z, **kwargs)NEWLINENEWLINE # by default, pcolormesh picks "round" values for boundsNEWLINE # this results in ugly looking plots with lots of surrounding whitespaceNEWLINE if not hasattr(ax, 'projection') and x.ndim == 1 and y.ndim == 1:NEWLINE # not a cartopy geoaxisNEWLINE ax.set_xlim(x[0], x[-1])NEWLINE ax.set_ylim(y[0], y[-1])NEWLINENEWLINE return primitiveNEWLINE |
# vim: set et sw=4 sts=4 fileencoding=utf-8:NEWLINE#NEWLINE# Raspberry Pi Sense HAT Emulator library for the Raspberry PiNEWLINE# Copyright (c) 2016 Raspberry Pi Foundation <[email protected]>NEWLINE#NEWLINE# This package is free software; you can redistribute it and/or modify it underNEWLINE# the terms of the GNU Lesser General Public License as published by the FreeNEWLINE# Software Foundation; either version 2.1 of the License, or (at your option)NEWLINE# any later version.NEWLINE#NEWLINE# This package is distributed in the hope that it will be useful, but WITHOUTNEWLINE# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESSNEWLINE# FOR A PARTICULAR PURPOSE. See the GNU General Public License for moreNEWLINE# details.NEWLINE#NEWLINE# You should have received a copy of the GNU Lesser General Public LicenseNEWLINE# along with this program. If not, see <http://www.gnu.org/licenses/>NEWLINENEWLINEfrom __future__ import (NEWLINE unicode_literals,NEWLINE absolute_import,NEWLINE print_function,NEWLINE division,NEWLINE )NEWLINEnstr = strNEWLINEstr = type('')NEWLINENEWLINEimport sysNEWLINEimport osNEWLINEimport ioNEWLINEimport mmapNEWLINEimport errnoNEWLINEfrom struct import StructNEWLINEfrom collections import namedtupleNEWLINEfrom random import RandomNEWLINEfrom time import timeNEWLINEfrom threading import Thread, EventNEWLINEfrom math import isnanNEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom .common import clampNEWLINENEWLINENEWLINE# See LPS25H data-sheet for details of register valuesNEWLINEPRESSURE_FACTOR = 4096NEWLINETEMP_OFFSET = 37NEWLINETEMP_FACTOR = 480NEWLINEPRESSURE_DATA = Struct(nstr(NEWLINE '@' # native modeNEWLINE 'B' # pressure sensor typeNEWLINE '6p' # pressure sensor nameNEWLINE 'l' # P_REFNEWLINE 'l' # P_OUTNEWLINE 'h' # T_OUTNEWLINE 'B' # P_VALIDNEWLINE 'B' # T_VALIDNEWLINE ))NEWLINENEWLINEPressureData = namedtuple('PressureData',NEWLINE ('type', 'name', 'P_REF', 'P_OUT', 'T_OUT', 'P_VALID', 'T_VALID'))NEWLINENEWLINENEWLINEdef pressure_filename():NEWLINE """NEWLINE Return the filename used to represent the state of the emulated sense HAT'sNEWLINE pressure sensor. On UNIX we try ``/dev/shm`` then fall back to ``/tmp``; onNEWLINE Windows we use whatever ``%TEMP%`` containsNEWLINE """NEWLINE fname = 'rpi-sense-emu-pressure'NEWLINE if sys.platform.startswith('win'):NEWLINE # just use a temporary file on WindowsNEWLINE return os.path.join(os.environ['TEMP'], fname)NEWLINE else:NEWLINE if os.path.exists('/dev/shm'):NEWLINE return os.path.join('/dev/shm', fname)NEWLINE else:NEWLINE return os.path.join('/tmp', fname)NEWLINENEWLINENEWLINEdef init_pressure():NEWLINE """NEWLINE Opens the file representing the state of the pressure sensors. TheNEWLINE file-like object is returned.NEWLINENEWLINE If the file already exists we simply make sure it is the right size. IfNEWLINE the file does not already exist, it is created and zeroed.NEWLINE """NEWLINE try:NEWLINE # Attempt to open the IMU's device file and ensure it's the right sizeNEWLINE fd = io.open(pressure_filename(), 'r+b', buffering=0)NEWLINE fd.seek(PRESSURE_DATA.size)NEWLINE fd.truncate()NEWLINE except IOError as e:NEWLINE # If the screen's device file doesn't exist, create it with reasonableNEWLINE # initial valuesNEWLINE if e.errno == errno.ENOENT:NEWLINE fd = io.open(pressure_filename(), 'w+b', buffering=0)NEWLINE fd.write(b'\x00' * PRESSURE_DATA.size)NEWLINE else:NEWLINE raiseNEWLINE return fdNEWLINENEWLINENEWLINEclass PressureServer(object):NEWLINE def __init__(self, simulate_noise=True):NEWLINE self._random = Random()NEWLINE self._fd = init_pressure()NEWLINE self._map = mmap.mmap(self._fd.fileno(), 0, access=mmap.ACCESS_WRITE)NEWLINE data = self._read()NEWLINE if data.type != 3:NEWLINE self._write(PressureData(3, b'LPS25H', 0, 0, 0, 0, 0))NEWLINE self._pressure = 1013.0NEWLINE self._temperature = 20.0NEWLINE else:NEWLINE self._pressure = data.P_OUT / 4096NEWLINE self._temperature = data.T_OUT / 480 + 42.5NEWLINE self._noise_thread = NoneNEWLINE self._noise_event = Event()NEWLINE self._noise_write()NEWLINE # The queue lengths are selected to accurately represent the responseNEWLINE # time of the sensorsNEWLINE self._pressures = np.full((25,), self._pressure, dtype=np.float)NEWLINE self._temperatures = np.full((25,), self._temperature, dtype=np.float)NEWLINE self.simulate_noise = simulate_noiseNEWLINENEWLINE def close(self):NEWLINE if self._fd:NEWLINE self.simulate_noise = FalseNEWLINE self._map.close()NEWLINE self._fd.close()NEWLINE self._fd = NoneNEWLINE self._map = NoneNEWLINENEWLINE def _perturb(self, value, error):NEWLINE """NEWLINE Return *value* perturbed by +/- *error* which is derived from aNEWLINE gaussian random generator.NEWLINE """NEWLINE # We use an internal Random() instance here to avoid a threading issueNEWLINE # with the gaussian generator (could use locks, but an instance ofNEWLINE # Random is easier and faster)NEWLINE return value + self._random.gauss(0, 0.2) * errorNEWLINENEWLINE def _read(self):NEWLINE return PressureData(*PRESSURE_DATA.unpack_from(self._map))NEWLINENEWLINE def _write(self, value):NEWLINE PRESSURE_DATA.pack_into(self._map, 0, *value)NEWLINENEWLINE @propertyNEWLINE def pressure(self):NEWLINE return self._pressureNEWLINENEWLINE @propertyNEWLINE def temperature(self):NEWLINE return self._temperatureNEWLINENEWLINE def set_values(self, pressure, temperature):NEWLINE self._pressure = pressureNEWLINE self._temperature = temperatureNEWLINE if not self._noise_thread:NEWLINE self._noise_write()NEWLINENEWLINE @propertyNEWLINE def simulate_noise(self):NEWLINE return self._noise_thread is not NoneNEWLINENEWLINE @simulate_noise.setterNEWLINE def simulate_noise(self, value):NEWLINE if value and not self._noise_thread:NEWLINE self._noise_event.clear()NEWLINE self._noise_thread = Thread(target=self._noise_loop)NEWLINE self._noise_thread.daemon = TrueNEWLINE self._noise_thread.start()NEWLINE elif self._noise_thread and not value:NEWLINE self._noise_event.set()NEWLINE self._noise_thread.join()NEWLINE self._noise_thread = NoneNEWLINE self._noise_write()NEWLINENEWLINE def _noise_loop(self):NEWLINE while not self._noise_event.wait(0.04):NEWLINE self._noise_write()NEWLINENEWLINE def _noise_write(self):NEWLINE if self.simulate_noise:NEWLINE self._pressures[1:] = self._pressures[:-1]NEWLINE self._pressures[0] = self._perturb(self.pressure, (NEWLINE 0.2 if 800 <= self.pressure <= 1100 and 20 <= self.temperature <= 60 elseNEWLINE 1.0))NEWLINE self._temperatures[1:] = self._temperatures[:-1]NEWLINE self._temperatures[0] = self._perturb(self.temperature, (NEWLINE 2.0 if 0 <= self.temperature <= 65 elseNEWLINE 4.0))NEWLINE pressure = self._pressures.mean()NEWLINE temperature = self._temperatures.mean()NEWLINE else:NEWLINE pressure = self.pressureNEWLINE temperature = self.temperatureNEWLINE self._write(self._read()._replace(NEWLINE P_VALID=not isnan(pressure),NEWLINE T_VALID=not isnan(temperature),NEWLINE P_OUT=0 if isnan(pressure) else int(clamp(pressure, 260, 1260) * PRESSURE_FACTOR),NEWLINE T_OUT=0 if isnan(temperature) else int((clamp(temperature, -30, 105) - TEMP_OFFSET) * TEMP_FACTOR),NEWLINE ))NEWLINENEWLINENEWLINE |
import argparseNEWLINEimport jsonNEWLINEimport pandas as pdNEWLINEpd.options.display.float_format = '{:,.2f}'.formatNEWLINEimport randomNEWLINEimport numpy as npNEWLINEimport tqdmNEWLINENEWLINEfrom src.sim import SimNEWLINENEWLINEdef run(params):NEWLINE """simulates the investment on the S&P500 index similar toNEWLINE investing on index fundsNEWLINENEWLINE ParametersNEWLINE ----------NEWLINE params : dictNEWLINE contrain the parameters to run the simulationNEWLINE """NEWLINE #load data sourceNEWLINE data = pd.read_csv('./data/sp500.csv')NEWLINENEWLINE #create empty dataframe to store resultsNEWLINE res = pd.DataFrame(NEWLINE columns=['length','mean','median','std','iqr',NEWLINE 'wins','losses','zero','total','wins/losses',NEWLINE 'a_r_mean','a_r_median','a_r_std'])NEWLINE res_all = pd.DataFrame(NEWLINE columns=['len', 'year', 'month',NEWLINE 'gain', 'annualized_returns'])NEWLINE NEWLINE for i_l, length in enumerate(params['lengths']):NEWLINE for i_y, year in enumerate(params['years']):NEWLINE for i_m, month in enumerate(params['months']):NEWLINE try:NEWLINE config={'buy': params['buy'],NEWLINE 'buy_year': year,NEWLINE 'buy_month': month,NEWLINE 'sell_year': year+length,NEWLINE 'sell_month': month,NEWLINE 'dividends': params['dividends'],NEWLINE 'inflation_corrected': False}NEWLINE NEWLINE sim = Sim(config, data)NEWLINE sim.run()NEWLINE # calculates right row to store resultsNEWLINE i_res_all = i_l*len(params['years'])*len(params['months']) + \NEWLINE i_y*len(params['months']) + i_mNEWLINE res_all.at[i_res_all, 'len'] = lengthNEWLINE res_all.at[i_res_all, 'year'] = yearNEWLINE res_all.at[i_res_all, 'month'] = monthNEWLINE res_all.at[i_res_all, 'gain'] = sim.gainNEWLINE res_all.at[i_res_all, 'annualized_returns'] = sim.annualized_returnsNEWLINE except Exception as e:NEWLINE # happes usually when the length goes beyond the data (2021+)NEWLINE print(length, year, month, e)NEWLINE res_all.at[i_res_all, :] = np.nanNEWLINE NEWLINE res.at[i_l, 'length'] = lengthNEWLINE res.at[i_l, 'mean'] = np.mean(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'median'] = np.median(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'std'] = np.std(res_all[res_all['len']==length]['gain'])NEWLINE res.at[i_l, 'iqr'] = np.quantile(NEWLINE res_all[res_all['len']==length]['gain'], 0.75) - \NEWLINE np.quantile(res_all[res_all['len']==length]['gain'], 0.25)NEWLINE res.at[i_l, 'wins'] = np.sum(res_all[res_all['len']==length]['gain'] > 0)NEWLINE res.at[i_l, 'losses'] = np.sum(res_all[res_all['len']==length]['gain'] < 0)NEWLINE res.at[i_l, 'zero'] = np.sum(res_all[res_all['len']==length]['gain'] == 0)NEWLINE res.at[i_l, 'total'] = res.at[i_l, 'wins'] + res.at[i_l, 'losses'] + res.at[i_l, 'zero']NEWLINE res.at[i_l, 'wins/losses'] = res.at[i_l, 'wins'] / res.at[i_l, 'losses']NEWLINE res.at[i_l, 'a_r_mean'] = np.mean(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res.at[i_l, 'a_r_median'] = np.median(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res.at[i_l, 'a_r_std'] = np.std(np.vstack(res_all[res_all['len']==length]['annualized_returns']))NEWLINE res_all.to_csv(f'./results/res_all_buy_{params["buy"]}_dividends_{params["dividends"]}.csv')NEWLINE res.to_csv(f'./results/res_buy_{params["buy"]}_dividends_{params["dividends"]}.csv')NEWLINENEWLINEif __name__ == '__main__':NEWLINE NEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument("config_file", help="path to config file")NEWLINE args = parser.parse_args()NEWLINE params = json.load(open('./config/'+args.config_file+'.json', 'r'))NEWLINE run(params) |
# proxy moduleNEWLINEfrom traitsui.wx.boolean_editor import *NEWLINE |
"""Support for getting statistical data from a Pi-hole system."""NEWLINEimport loggingNEWLINENEWLINEfrom homeassistant.const import CONF_NAMENEWLINEfrom homeassistant.helpers.entity import EntityNEWLINENEWLINEfrom .const import (NEWLINE ATTR_BLOCKED_DOMAINS,NEWLINE DATA_KEY_API,NEWLINE DATA_KEY_COORDINATOR,NEWLINE DOMAIN as PIHOLE_DOMAIN,NEWLINE SENSOR_DICT,NEWLINE SENSOR_LIST,NEWLINE)NEWLINENEWLINELOGGER = logging.getLogger(__name__)NEWLINENEWLINENEWLINEasync def async_setup_entry(hass, entry, async_add_entities):NEWLINE """Set up the Pi-hole sensor."""NEWLINE name = entry.data[CONF_NAME]NEWLINE hole_data = hass.data[PIHOLE_DOMAIN][name]NEWLINE sensors = [NEWLINE PiHoleSensor(NEWLINE hole_data[DATA_KEY_API],NEWLINE hole_data[DATA_KEY_COORDINATOR],NEWLINE name,NEWLINE sensor_name,NEWLINE entry.entry_id,NEWLINE )NEWLINE for sensor_name in SENSOR_LISTNEWLINE ]NEWLINE async_add_entities(sensors, True)NEWLINENEWLINENEWLINEclass PiHoleSensor(Entity):NEWLINE """Representation of a Pi-hole sensor."""NEWLINENEWLINE def __init__(self, api, coordinator, name, sensor_name, server_unique_id):NEWLINE """Initialize a Pi-hole sensor."""NEWLINE self.api = apiNEWLINE self.coordinator = coordinatorNEWLINE self._name = nameNEWLINE self._condition = sensor_nameNEWLINE self._server_unique_id = server_unique_idNEWLINENEWLINE variable_info = SENSOR_DICT[sensor_name]NEWLINE self._condition_name = variable_info[0]NEWLINE self._unit_of_measurement = variable_info[1]NEWLINE self._icon = variable_info[2]NEWLINENEWLINE async def async_added_to_hass(self):NEWLINE """When entity is added to hass."""NEWLINE self.async_on_remove(NEWLINE self.coordinator.async_add_listener(self.async_write_ha_state)NEWLINE )NEWLINENEWLINE @propertyNEWLINE def name(self):NEWLINE """Return the name of the sensor."""NEWLINE return f"{self._name} {self._condition_name}"NEWLINENEWLINE @propertyNEWLINE def unique_id(self):NEWLINE """Return the unique id of the sensor."""NEWLINE return f"{self._server_unique_id}/{self._condition_name}"NEWLINENEWLINE @propertyNEWLINE def device_info(self):NEWLINE """Return the device information of the sensor."""NEWLINE return {NEWLINE "identifiers": {(PIHOLE_DOMAIN, self._server_unique_id)},NEWLINE "name": self._name,NEWLINE "manufacturer": "Pi-hole",NEWLINE }NEWLINENEWLINE @propertyNEWLINE def icon(self):NEWLINE """Icon to use in the frontend, if any."""NEWLINE return self._iconNEWLINENEWLINE @propertyNEWLINE def unit_of_measurement(self):NEWLINE """Return the unit the value is expressed in."""NEWLINE return self._unit_of_measurementNEWLINENEWLINE @propertyNEWLINE def state(self):NEWLINE """Return the state of the device."""NEWLINE try:NEWLINE return round(self.api.data[self._condition], 2)NEWLINE except TypeError:NEWLINE return self.api.data[self._condition]NEWLINENEWLINE @propertyNEWLINE def device_state_attributes(self):NEWLINE """Return the state attributes of the Pi-hole."""NEWLINE return {ATTR_BLOCKED_DOMAINS: self.api.data["domains_being_blocked"]}NEWLINENEWLINE @propertyNEWLINE def available(self):NEWLINE """Could the device be accessed during the last update call."""NEWLINE return self.coordinator.last_update_successNEWLINENEWLINE @propertyNEWLINE def should_poll(self):NEWLINE """No need to poll. Coordinator notifies entity of updates."""NEWLINE return FalseNEWLINENEWLINE async def async_update(self):NEWLINE """Get the latest data from the Pi-hole API."""NEWLINE await self.coordinator.async_request_refresh()NEWLINE |
#!/usr/bin/pythonNEWLINE'''NEWLINEExtract _("...") strings for translation and convert to Qt4 stringdefs so thatNEWLINEthey can be picked up by Qt linguist.NEWLINE'''NEWLINEfrom subprocess import Popen, PIPENEWLINEimport globNEWLINEimport operatorNEWLINENEWLINEOUT_CPP="src/qt/yiffcoinstrings.cpp"NEWLINEEMPTY=['""']NEWLINENEWLINEdef parse_po(text):NEWLINE """NEWLINE Parse 'po' format produced by xgettext.NEWLINE Return a list of (msgid,msgstr) tuples.NEWLINE """NEWLINE messages = []NEWLINE msgid = []NEWLINE msgstr = []NEWLINE in_msgid = FalseNEWLINE in_msgstr = FalseNEWLINENEWLINE for line in text.split('\n'):NEWLINE line = line.rstrip('\r')NEWLINE if line.startswith('msgid '):NEWLINE if in_msgstr:NEWLINE messages.append((msgid, msgstr))NEWLINE in_msgstr = FalseNEWLINE # message startNEWLINE in_msgid = TrueNEWLINE NEWLINE msgid = [line[6:]]NEWLINE elif line.startswith('msgstr '):NEWLINE in_msgid = FalseNEWLINE in_msgstr = TrueNEWLINE msgstr = [line[7:]]NEWLINE elif line.startswith('"'):NEWLINE if in_msgid:NEWLINE msgid.append(line)NEWLINE if in_msgstr:NEWLINE msgstr.append(line)NEWLINENEWLINE if in_msgstr:NEWLINE messages.append((msgid, msgstr))NEWLINENEWLINE return messagesNEWLINENEWLINEfiles = glob.glob('src/*.cpp') + glob.glob('src/*.h') NEWLINENEWLINE# xgettext -n --keyword=_ $FILESNEWLINEchild = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)NEWLINE(out, err) = child.communicate()NEWLINENEWLINEmessages = parse_po(out) NEWLINENEWLINEf = open(OUT_CPP, 'w')NEWLINEf.write("""#include <QtGlobal>NEWLINE// Automatically generated by extract_strings.pyNEWLINE#ifdef __GNUC__NEWLINE#define UNUSED __attribute__((unused))NEWLINE#elseNEWLINE#define UNUSEDNEWLINE#endifNEWLINE""")NEWLINEf.write('static const char UNUSED *yiffcoin_strings[] = {\n')NEWLINEmessages.sort(key=operator.itemgetter(0))NEWLINEfor (msgid, msgstr) in messages:NEWLINE if msgid != EMPTY:NEWLINE f.write('QT_TRANSLATE_NOOP("yiffcoin-core", %s),\n' % ('\n'.join(msgid)))NEWLINEf.write('};')NEWLINEf.close()NEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINEimport pymysqlNEWLINEimport structNEWLINENEWLINEfrom pymysql.constants.COMMAND import COM_BINLOG_DUMP, COM_REGISTER_SLAVENEWLINEfrom pymysql.cursors import DictCursorNEWLINEfrom pymysql.util import int2byteNEWLINENEWLINEfrom .packet import BinLogPacketWrapperNEWLINEfrom .constants.BINLOG import TABLE_MAP_EVENT, ROTATE_EVENTNEWLINEfrom .gtid import GtidSetNEWLINEfrom .event import (NEWLINE QueryEvent, RotateEvent, FormatDescriptionEvent,NEWLINE XidEvent, GtidEvent, StopEvent,NEWLINE BeginLoadQueryEvent, ExecuteLoadQueryEvent,NEWLINE HeartbeatLogEvent, NotImplementedEvent)NEWLINEfrom .exceptions import BinLogNotEnabledNEWLINEfrom .row_event import (NEWLINE UpdateRowsEvent, WriteRowsEvent, DeleteRowsEvent, TableMapEvent)NEWLINENEWLINEtry:NEWLINE from pymysql.constants.COMMAND import COM_BINLOG_DUMP_GTIDNEWLINEexcept ImportError:NEWLINE # Handle old pymysql versionsNEWLINE # See: https://github.com/PyMySQL/PyMySQL/pull/261NEWLINE COM_BINLOG_DUMP_GTID = 0x1eNEWLINENEWLINE# 2013 Connection LostNEWLINE# 2006 MySQL server has gone awayNEWLINEMYSQL_EXPECTED_ERROR_CODES = [2013, 2006]NEWLINENEWLINENEWLINEclass ReportSlave(object):NEWLINENEWLINE """Represent the values that you may report when connecting as a slaveNEWLINE to a master. SHOW SLAVE HOSTS related"""NEWLINENEWLINE hostname = ''NEWLINE username = ''NEWLINE password = ''NEWLINE port = 0NEWLINENEWLINE def __init__(self, value):NEWLINE """NEWLINE Attributes:NEWLINE value: string or tupleNEWLINE if string, then it will be used hostnameNEWLINE if tuple it will be used as (hostname, user, password, port)NEWLINE """NEWLINENEWLINE if isinstance(value, (tuple, list)):NEWLINE try:NEWLINE self.hostname = value[0]NEWLINE self.username = value[1]NEWLINE self.password = value[2]NEWLINE self.port = int(value[3])NEWLINE except IndexError:NEWLINE passNEWLINE elif isinstance(value, dict):NEWLINE for key in ['hostname', 'username', 'password', 'port']:NEWLINE try:NEWLINE setattr(self, key, value[key])NEWLINE except KeyError:NEWLINE passNEWLINE else:NEWLINE self.hostname = valueNEWLINENEWLINE def __repr__(self):NEWLINE return '<ReportSlave hostname=%s username=%s password=%s port=%d>' %\NEWLINE (self.hostname, self.username, self.password, self.port)NEWLINENEWLINE def encoded(self, server_id, master_id=0):NEWLINE """NEWLINE server_id: the slave server-idNEWLINE master_id: usually 0. Appears as "master id" in SHOW SLAVE HOSTSNEWLINE on the master. Unknown what else it impacts.NEWLINE """NEWLINENEWLINE # 1 [15] COM_REGISTER_SLAVENEWLINE # 4 server-idNEWLINE # 1 slaves hostname lengthNEWLINE # string[$len] slaves hostnameNEWLINE # 1 slaves user lenNEWLINE # string[$len] slaves userNEWLINE # 1 slaves password lenNEWLINE # string[$len] slaves passwordNEWLINE # 2 slaves mysql-portNEWLINE # 4 replication rankNEWLINE # 4 master-idNEWLINENEWLINE lhostname = len(self.hostname.encode())NEWLINE lusername = len(self.username.encode())NEWLINE lpassword = len(self.password.encode())NEWLINENEWLINE packet_len = (1 + # commandNEWLINE 4 + # server-idNEWLINE 1 + # hostname lengthNEWLINE lhostname +NEWLINE 1 + # username lengthNEWLINE lusername +NEWLINE 1 + # password lengthNEWLINE lpassword +NEWLINE 2 + # slave mysql portNEWLINE 4 + # replication rankNEWLINE 4) # master-idNEWLINENEWLINE MAX_STRING_LEN = 257 # one byte for length + 256 charsNEWLINENEWLINE return (struct.pack('<i', packet_len) +NEWLINE int2byte(COM_REGISTER_SLAVE) +NEWLINE struct.pack('<L', server_id) +NEWLINE struct.pack('<%dp' % min(MAX_STRING_LEN, lhostname + 1),NEWLINE self.hostname.encode()) +NEWLINE struct.pack('<%dp' % min(MAX_STRING_LEN, lusername + 1),NEWLINE self.username.encode()) +NEWLINE struct.pack('<%dp' % min(MAX_STRING_LEN, lpassword + 1),NEWLINE self.password.encode()) +NEWLINE struct.pack('<H', self.port) +NEWLINE struct.pack('<l', 0) +NEWLINE struct.pack('<l', master_id))NEWLINENEWLINENEWLINEclass BinLogStreamReader(object):NEWLINENEWLINE """Connect to replication stream and read eventNEWLINE """NEWLINE report_slave = NoneNEWLINENEWLINE def __init__(self, connection_settings, server_id, ctl_connection_settings=None, resume_stream=False,NEWLINE blocking=False, only_events=None, log_file=None, log_pos=None,NEWLINE filter_non_implemented_events=True,NEWLINE ignored_events=None, auto_position=None,NEWLINE only_tables=None, ignored_tables=None,NEWLINE only_schemas=None, ignored_schemas=None,NEWLINE freeze_schema=False, skip_to_timestamp=None,NEWLINE report_slave=None, slave_uuid=None,NEWLINE pymysql_wrapper=None,NEWLINE fail_on_table_metadata_unavailable=False,NEWLINE slave_heartbeat=None):NEWLINE """NEWLINE Attributes:NEWLINE ctl_connection_settings: Connection settings for cluster holding schema informationNEWLINE resume_stream: Start for event from position or the latest event ofNEWLINE binlog or from older available eventNEWLINE blocking: Read on stream is blockingNEWLINE only_events: Array of allowed eventsNEWLINE ignored_events: Array of ignored eventsNEWLINE log_file: Set replication start log fileNEWLINE log_pos: Set replication start log pos (resume_stream should be true)NEWLINE auto_position: Use master_auto_position gtid to set positionNEWLINE only_tables: An array with the tables you want to watch (only worksNEWLINE in binlog_format ROW)NEWLINE ignored_tables: An array with the tables you want to skipNEWLINE only_schemas: An array with the schemas you want to watchNEWLINE ignored_schemas: An array with the schemas you want to skipNEWLINE freeze_schema: If true do not support ALTER TABLE. It's faster.NEWLINE skip_to_timestamp: Ignore all events until reaching specified timestamp.NEWLINE report_slave: Report slave in SHOW SLAVE HOSTS.NEWLINE slave_uuid: Report slave_uuid in SHOW SLAVE HOSTS.NEWLINE fail_on_table_metadata_unavailable: Should raise exception if we can't getNEWLINE table information on row_eventsNEWLINE slave_heartbeat: (seconds) Should master actively send heartbeat onNEWLINE connection. This also reduces traffic in GTID replicationNEWLINE on replication resumption (in case many event to skip inNEWLINE binlog). See MASTER_HEARTBEAT_PERIOD in mysql documentationNEWLINE for semanticsNEWLINE """NEWLINENEWLINE self.__connection_settings = connection_settingsNEWLINE self.__connection_settings.setdefault("charset", "utf8")NEWLINENEWLINE self.__connected_stream = FalseNEWLINE self.__connected_ctl = FalseNEWLINE self.__resume_stream = resume_streamNEWLINE self.__blocking = blockingNEWLINE self._ctl_connection_settings = ctl_connection_settingsNEWLINE if ctl_connection_settings:NEWLINE self._ctl_connection_settings.setdefault("charset", "utf8")NEWLINENEWLINE self.__only_tables = only_tablesNEWLINE self.__ignored_tables = ignored_tablesNEWLINE self.__only_schemas = only_schemasNEWLINE self.__ignored_schemas = ignored_schemasNEWLINE self.__freeze_schema = freeze_schemaNEWLINE self.__allowed_events = self._allowed_event_list(NEWLINE only_events, ignored_events, filter_non_implemented_events)NEWLINE self.__fail_on_table_metadata_unavailable = fail_on_table_metadata_unavailableNEWLINENEWLINE # We can't filter on packet level TABLE_MAP and rotate event becauseNEWLINE # we need them for handling other operationsNEWLINE self.__allowed_events_in_packet = frozenset(NEWLINE [TableMapEvent, RotateEvent]).union(self.__allowed_events)NEWLINENEWLINE self.__server_id = server_idNEWLINE self.__use_checksum = FalseNEWLINENEWLINE # Store table meta informationNEWLINE self.table_map = {}NEWLINE self.log_pos = log_posNEWLINE self.log_file = log_fileNEWLINE self.auto_position = auto_positionNEWLINE self.skip_to_timestamp = skip_to_timestampNEWLINENEWLINE if report_slave:NEWLINE self.report_slave = ReportSlave(report_slave)NEWLINE self.slave_uuid = slave_uuidNEWLINE self.slave_heartbeat = slave_heartbeatNEWLINENEWLINE if pymysql_wrapper:NEWLINE self.pymysql_wrapper = pymysql_wrapperNEWLINE else:NEWLINE self.pymysql_wrapper = pymysql.connectNEWLINENEWLINE def close(self):NEWLINE if self.__connected_stream:NEWLINE self._stream_connection.close()NEWLINE self.__connected_stream = FalseNEWLINE if self.__connected_ctl:NEWLINE # break reference cycle between stream reader and underlyingNEWLINE # mysql connection objectNEWLINE self._ctl_connection._get_table_information = NoneNEWLINE self._ctl_connection.close()NEWLINE self.__connected_ctl = FalseNEWLINENEWLINE def __connect_to_ctl(self):NEWLINE if not self._ctl_connection_settings:NEWLINE self._ctl_connection_settings = dict(self.__connection_settings)NEWLINE self._ctl_connection_settings["db"] = "information_schema"NEWLINE self._ctl_connection_settings["cursorclass"] = DictCursorNEWLINE self._ctl_connection = self.pymysql_wrapper(**self._ctl_connection_settings)NEWLINE self._ctl_connection._get_table_information = self.__get_table_informationNEWLINE self.__connected_ctl = TrueNEWLINENEWLINE def __checksum_enabled(self):NEWLINE """Return True if binlog-checksum = CRC32. Only for MySQL > 5.6"""NEWLINE cur = self._stream_connection.cursor()NEWLINE cur.execute("SHOW GLOBAL VARIABLES LIKE 'BINLOG_CHECKSUM'")NEWLINE result = cur.fetchone()NEWLINE cur.close()NEWLINENEWLINE if result is None:NEWLINE return FalseNEWLINE var, value = result[:2]NEWLINE if value == 'NONE':NEWLINE return FalseNEWLINE return TrueNEWLINENEWLINE def _register_slave(self):NEWLINE if not self.report_slave:NEWLINE returnNEWLINENEWLINE packet = self.report_slave.encoded(self.__server_id)NEWLINENEWLINE if pymysql.__version__ < "0.6":NEWLINE self._stream_connection.wfile.write(packet)NEWLINE self._stream_connection.wfile.flush()NEWLINE self._stream_connection.read_packet()NEWLINE else:NEWLINE self._stream_connection._write_bytes(packet)NEWLINE self._stream_connection._next_seq_id = 1NEWLINE self._stream_connection._read_packet()NEWLINENEWLINE def __connect_to_stream(self):NEWLINE # log_pos (4) -- position in the binlog-file to start the stream withNEWLINE # flags (2) BINLOG_DUMP_NON_BLOCK (0 or 1)NEWLINE # server_id (4) -- server id of this slaveNEWLINE # log_file (string.EOF) -- filename of the binlog on the masterNEWLINE self._stream_connection = self.pymysql_wrapper(**self.__connection_settings)NEWLINENEWLINE self.__use_checksum = self.__checksum_enabled()NEWLINENEWLINE # If checksum is enabled we need to inform the server about the thatNEWLINE # we support itNEWLINE if self.__use_checksum:NEWLINE cur = self._stream_connection.cursor()NEWLINE cur.execute("set @master_binlog_checksum= @@global.binlog_checksum")NEWLINE cur.close()NEWLINENEWLINE if self.slave_uuid:NEWLINE cur = self._stream_connection.cursor()NEWLINE cur.execute("set @slave_uuid= '%s'" % self.slave_uuid)NEWLINE cur.close()NEWLINENEWLINE if self.slave_heartbeat:NEWLINE # 4294967 is documented as the max value for heartbeatsNEWLINE net_timeout = float(self.__connection_settings.get('read_timeout',NEWLINE 4294967))NEWLINE # If heartbeat is too low, the connection will disconnect before,NEWLINE # this is also the behavior in mysqlNEWLINE heartbeat = float(min(net_timeout/2., self.slave_heartbeat))NEWLINE if heartbeat > 4294967:NEWLINE heartbeat = 4294967NEWLINENEWLINE # master_heartbeat_period is nanosecondsNEWLINE heartbeat = int(heartbeat * 1000000000)NEWLINE cur = self._stream_connection.cursor()NEWLINE cur.execute("set @master_heartbeat_period= %d" % heartbeat)NEWLINE cur.close()NEWLINENEWLINE self._register_slave()NEWLINENEWLINE if not self.auto_position:NEWLINE # only when log_file and log_pos both provided, the position info isNEWLINE # valid, if not, get the current position from masterNEWLINE if self.log_file is None or self.log_pos is None:NEWLINE cur = self._stream_connection.cursor()NEWLINE cur.execute("SHOW MASTER STATUS")NEWLINE master_status = cur.fetchone()NEWLINE if master_status is None:NEWLINE raise BinLogNotEnabled()NEWLINE self.log_file, self.log_pos = master_status[:2]NEWLINE cur.close()NEWLINENEWLINE prelude = struct.pack('<i', len(self.log_file) + 11) \NEWLINE + int2byte(COM_BINLOG_DUMP)NEWLINENEWLINE if self.__resume_stream:NEWLINE prelude += struct.pack('<I', self.log_pos)NEWLINE else:NEWLINE prelude += struct.pack('<I', 4)NEWLINENEWLINE if self.__blocking:NEWLINE prelude += struct.pack('<h', 0)NEWLINE else:NEWLINE prelude += struct.pack('<h', 1)NEWLINENEWLINE prelude += struct.pack('<I', self.__server_id)NEWLINE prelude += self.log_file.encode()NEWLINE else:NEWLINE # Format for mysql packet master_auto_positionNEWLINE #NEWLINE # All fields are little endianNEWLINE # All fields are unsignedNEWLINENEWLINE # Packet length uint 4bytesNEWLINE # Packet type byte 1byte == 0x1eNEWLINE # Binlog flags ushort 2bytes == 0 (for retrocompatibilty)NEWLINE # Server id uint 4bytesNEWLINE # binlognamesize uint 4bytesNEWLINE # binlogname str Nbytes N = binlognamesizeNEWLINE # ZeroifiedNEWLINE # binlog position uint 4bytes == 4NEWLINE # payload_size uint 4bytesNEWLINENEWLINE # What come next, is the payload, where the slave gtid_executedNEWLINE # is sent to the masterNEWLINE # n_sid ulong 8bytes == which size is the gtid_setNEWLINE # | sid uuid 16bytes UUID as a binaryNEWLINE # | n_intervals ulong 8bytes == how many intervals are sent for this gtidNEWLINE # | | start ulong 8bytes Start position of this intervalNEWLINE # | | stop ulong 8bytes Stop position of this intervalNEWLINENEWLINE # A gtid set looks like:NEWLINE # 19d69c1e-ae97-4b8c-a1ef-9e12ba966457:1-3:8-10,NEWLINE # 1c2aad49-ae92-409a-b4df-d05a03e4702e:42-47:80-100:130-140NEWLINE #NEWLINE # In this particular gtid set, 19d69c1e-ae97-4b8c-a1ef-9e12ba966457:1-3:8-10NEWLINE # is the first member of the set, it is called a gtid.NEWLINE # In this gtid, 19d69c1e-ae97-4b8c-a1ef-9e12ba966457 is the sidNEWLINE # and have two intervals, 1-3 and 8-10, 1 is the start position of the first intervalNEWLINE # 3 is the stop position of the first interval.NEWLINENEWLINE gtid_set = GtidSet(self.auto_position)NEWLINE encoded_data_size = gtid_set.encoded_lengthNEWLINENEWLINE header_size = (2 + # binlog_flagsNEWLINE 4 + # server_idNEWLINE 4 + # binlog_name_info_sizeNEWLINE 4 + # empty binlog nameNEWLINE 8 + # binlog_pos_info_sizeNEWLINE 4) # encoded_data_sizeNEWLINENEWLINE prelude = b'' + struct.pack('<i', header_size + encoded_data_size) \NEWLINE + int2byte(COM_BINLOG_DUMP_GTID)NEWLINENEWLINE # binlog_flags = 0 (2 bytes)NEWLINE prelude += struct.pack('<H', 0)NEWLINE # server_id (4 bytes)NEWLINE prelude += struct.pack('<I', self.__server_id)NEWLINE # binlog_name_info_size (4 bytes)NEWLINE prelude += struct.pack('<I', 3)NEWLINE # empty_binlog_name (4 bytes)NEWLINE prelude += b'\0\0\0'NEWLINE # binlog_pos_info (8 bytes)NEWLINE prelude += struct.pack('<Q', 4)NEWLINENEWLINE # encoded_data_size (4 bytes)NEWLINE prelude += struct.pack('<I', gtid_set.encoded_length)NEWLINE # encoded_dataNEWLINE prelude += gtid_set.encoded()NEWLINENEWLINE if pymysql.__version__ < "0.6":NEWLINE self._stream_connection.wfile.write(prelude)NEWLINE self._stream_connection.wfile.flush()NEWLINE else:NEWLINE self._stream_connection._write_bytes(prelude)NEWLINE self._stream_connection._next_seq_id = 1NEWLINE self.__connected_stream = TrueNEWLINENEWLINE def fetchone(self):NEWLINE while True:NEWLINE if not self.__connected_stream:NEWLINE self.__connect_to_stream()NEWLINENEWLINE if not self.__connected_ctl:NEWLINE self.__connect_to_ctl()NEWLINENEWLINE try:NEWLINE if pymysql.__version__ < "0.6":NEWLINE pkt = self._stream_connection.read_packet()NEWLINE else:NEWLINE pkt = self._stream_connection._read_packet()NEWLINE except pymysql.OperationalError as error:NEWLINE code, message = error.argsNEWLINE if code in MYSQL_EXPECTED_ERROR_CODES:NEWLINE self._stream_connection.close()NEWLINE self.__connected_stream = FalseNEWLINE continueNEWLINE raiseNEWLINENEWLINE if pkt.is_eof_packet():NEWLINE self.close()NEWLINE return NoneNEWLINENEWLINE if not pkt.is_ok_packet():NEWLINE continueNEWLINENEWLINE binlog_event = BinLogPacketWrapper(pkt, self.table_map,NEWLINE self._ctl_connection,NEWLINE self.__use_checksum,NEWLINE self.__allowed_events_in_packet,NEWLINE self.__only_tables,NEWLINE self.__ignored_tables,NEWLINE self.__only_schemas,NEWLINE self.__ignored_schemas,NEWLINE self.__freeze_schema,NEWLINE self.__fail_on_table_metadata_unavailable)NEWLINENEWLINE if binlog_event.event_type == ROTATE_EVENT:NEWLINE self.log_pos = binlog_event.event.positionNEWLINE self.log_file = binlog_event.event.next_binlogNEWLINE # Table Id in binlog are NOT persistent in MySQL - they are in-memory identifiersNEWLINE # that means that when MySQL master restarts, it will reuse same table id for different tablesNEWLINE # which will cause errors for us since our in-memory map will try to decode row data withNEWLINE # wrong table schema.NEWLINE # The fix is to rely on the fact that MySQL will also rotate to a new binlog file every time itNEWLINE # restarts. That means every rotation we see *could* be a sign of restart and so potentiallyNEWLINE # invalidates all our cached table id to schema mappings. This means we have to load them allNEWLINE # again for each logfile which is potentially wasted effort but we can't really do much betterNEWLINE # without being broken in restart caseNEWLINE self.table_map = {}NEWLINE elif binlog_event.log_pos:NEWLINE self.log_pos = binlog_event.log_posNEWLINENEWLINE # This check must not occur before clearing the ``table_map`` as aNEWLINE # result of a RotateEvent.NEWLINE #NEWLINE # The first RotateEvent in a binlog file has a timestamp ofNEWLINE # zero. If the server has moved to a new log and not written aNEWLINE # timestamped RotateEvent at the end of the previous log, theNEWLINE # RotateEvent at the beginning of the new log will be ignoredNEWLINE # if the caller provided a positive ``skip_to_timestamp``NEWLINE # value. This will result in the ``table_map`` becomingNEWLINE # corrupt.NEWLINE #NEWLINE # https://dev.mysql.com/doc/internals/en/event-data-for-specific-event-types.htmlNEWLINE # From the MySQL Internals Manual:NEWLINE #NEWLINE # ROTATE_EVENT is generated locally and written to the binaryNEWLINE # log on the master. It is written to the relay log on theNEWLINE # slave when FLUSH LOGS occurs, and when receiving aNEWLINE # ROTATE_EVENT from the master. In the latter case, thereNEWLINE # will be two rotate events in total originating on differentNEWLINE # servers.NEWLINE #NEWLINE # There are conditions under which the terminatingNEWLINE # log-rotation event does not occur. For example, the serverNEWLINE # might crash.NEWLINE if self.skip_to_timestamp and binlog_event.timestamp < self.skip_to_timestamp:NEWLINE continueNEWLINENEWLINE if binlog_event.event_type == TABLE_MAP_EVENT and \NEWLINE binlog_event.event is not None:NEWLINE self.table_map[binlog_event.event.table_id] = \NEWLINE binlog_event.event.get_table()NEWLINENEWLINE # event is none if we have filter it on packet levelNEWLINE # we filter also not allowed eventsNEWLINE if binlog_event.event is None or (binlog_event.event.__class__ not in self.__allowed_events):NEWLINE continueNEWLINENEWLINE return binlog_event.eventNEWLINENEWLINE def _allowed_event_list(self, only_events, ignored_events,NEWLINE filter_non_implemented_events):NEWLINE if only_events is not None:NEWLINE events = set(only_events)NEWLINE else:NEWLINE events = set((NEWLINE QueryEvent,NEWLINE RotateEvent,NEWLINE StopEvent,NEWLINE FormatDescriptionEvent,NEWLINE XidEvent,NEWLINE GtidEvent,NEWLINE BeginLoadQueryEvent,NEWLINE ExecuteLoadQueryEvent,NEWLINE UpdateRowsEvent,NEWLINE WriteRowsEvent,NEWLINE DeleteRowsEvent,NEWLINE TableMapEvent,NEWLINE HeartbeatLogEvent,NEWLINE NotImplementedEvent,NEWLINE ))NEWLINE if ignored_events is not None:NEWLINE for e in ignored_events:NEWLINE events.remove(e)NEWLINE if filter_non_implemented_events:NEWLINE try:NEWLINE events.remove(NotImplementedEvent)NEWLINE except KeyError:NEWLINE passNEWLINE return frozenset(events)NEWLINENEWLINE def __get_table_information(self, schema, table):NEWLINE for i in range(1, 3):NEWLINE try:NEWLINE if not self.__connected_ctl:NEWLINE self.__connect_to_ctl()NEWLINENEWLINE cur = self._ctl_connection.cursor()NEWLINE cur.execute("""NEWLINE SELECTNEWLINE COLUMN_NAME, COLLATION_NAME, CHARACTER_SET_NAME,NEWLINE COLUMN_COMMENT, COLUMN_TYPE, COLUMN_KEYNEWLINE FROMNEWLINE information_schema.columnsNEWLINE WHERENEWLINE table_schema = %s AND table_name = %sNEWLINE ORDER BY ORDINAL_POSITIONNEWLINE """, (schema, table))NEWLINENEWLINE return cur.fetchall()NEWLINE except pymysql.OperationalError as error:NEWLINE code, message = error.argsNEWLINE if code in MYSQL_EXPECTED_ERROR_CODES:NEWLINE self.__connected_ctl = FalseNEWLINE continueNEWLINE else:NEWLINE raise errorNEWLINENEWLINE def __iter__(self):NEWLINE return iter(self.fetchone, None)NEWLINE |
# ==============================================================================NEWLINE# zero.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport osNEWLINEimport sysNEWLINENEWLINEERROR = FalseNEWLINENEWLINEdef main(function):NEWLINE try:NEWLINE arguments = sys.argv[1:]NEWLINE assert argumentsNEWLINE for path in arguments:NEWLINE assert os.path.isdir(path)NEWLINE for path in arguments:NEWLINE engine(path, function)NEWLINE except:NEWLINE sys.stdout.write('Usage: %s <directory>' % os.path.basename(sys.argv[0]))NEWLINENEWLINEdef engine(path, function):NEWLINE global ERRORNEWLINE for root, dirs, files in os.walk(path):NEWLINE for name in files:NEWLINE path = os.path.join(root, name)NEWLINE try:NEWLINE function(path)NEWLINE except:NEWLINE sys.stderr.write('%sError: %s' % (ERROR and '\n' or '', path))NEWLINE ERROR = TrueNEWLINENEWLINEdef zero(path):NEWLINE size = os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE if todo >= 2 ** 20:NEWLINE buff = '\x00' * 2 ** 20NEWLINE while todo >= 2 ** 20:NEWLINE data.write(buff)NEWLINE todo = size - data.tell()NEWLINE data.write('\x00' * todo)NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE main(zero)NEWLINENEWLINE# ==============================================================================NEWLINE# upper.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef upper(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE upper = ext.upper()NEWLINE if ext != upper:NEWLINE zero.os.rename(path, root + upper)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(upper)NEWLINENEWLINE# ==============================================================================NEWLINE# untar.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINEimport tarfileNEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(lambda path: tarfile.open(path).extractall(NEWLINE zero.os.path.dirname(path)))NEWLINENEWLINE# ==============================================================================NEWLINE# remove.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(zero.os.remove)NEWLINENEWLINE# ==============================================================================NEWLINE# one.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef one(path):NEWLINE size = zero.os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE if todo >= 2 ** 20:NEWLINE buff = '\xFF' * 2 ** 20NEWLINE while todo >= 2 ** 20:NEWLINE data.write(buff)NEWLINE todo = size - data.tell()NEWLINE data.write('\xFF' * todo)NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(one)NEWLINENEWLINE# ==============================================================================NEWLINE# lower.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef lower(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE lower = ext.lower()NEWLINE if ext != lower:NEWLINE zero.os.rename(path, root + lower)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(lower)NEWLINENEWLINE# ==============================================================================NEWLINE# random.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef kaos(path):NEWLINE size = zero.os.path.getsize(path)NEWLINE if size:NEWLINE data = open(path, 'wb')NEWLINE todo = sizeNEWLINE while todo:NEWLINE data.write(zero.os.urandom(min(todo, 2 ** 20)))NEWLINE todo = size - data.tell()NEWLINE data.close()NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(kaos)NEWLINENEWLINE# ==============================================================================NEWLINE# name.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINEimport randomNEWLINENEWLINESTRING = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'NEWLINENEWLINEdef ident(path):NEWLINE d, b = zero.os.path.split(path)NEWLINE zero.os.rename(path, zero.os.path.join(d, ''.join(random.sample(NEWLINE STRING, len(STRING))) + zero.os.path.splitext(b)[1]))NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(ident)NEWLINENEWLINE# ==============================================================================NEWLINE# newlines.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINETABLE = ''.join(map(chr, range(256)))NEWLINEDELETECHARS = ''.join(c for c in TABLE if len(repr(c)) != 6)NEWLINENEWLINEdef convert(path):NEWLINE if not file(path, 'rb').read(2 ** 20).translate(TABLE, DELETECHARS):NEWLINE data = file(path, 'r').read()NEWLINE file(path, 'w').write(data)NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(convert)NEWLINENEWLINE# ==============================================================================NEWLINE# extension.pyNEWLINE# ==============================================================================NEWLINENEWLINEimport zeroNEWLINENEWLINEdef bias(path):NEWLINE root, ext = zero.os.path.splitext(path)NEWLINE if not ext[1:]:NEWLINE zero.os.rename(path, root + '.txt')NEWLINENEWLINEif __name__ == '__main__':NEWLINE zero.main(bias)NEWLINE |
"""NEWLINEA WSGI application which lists available versionsNEWLINEof APIs understood by another WSGI application.NEWLINE"""NEWLINENEWLINEfrom reporting_api.common.apiversion import APIVersionNEWLINEfrom reporting_api.common.application import ApplicationNEWLINENEWLINENEWLINEclass VersionsApp(Application):NEWLINE """A WSGI application which lists available versionsNEWLINE of APIs understood by another WSGI application.NEWLINE """NEWLINENEWLINE def __init__(self):NEWLINE super(VersionsApp, self).__init__(None)NEWLINENEWLINE def operation_api_version_list(self, req, params):NEWLINE """Return a list of available API versions.NEWLINE """NEWLINE return (NEWLINE [NEWLINE version.api_version_detail(req, params)NEWLINE for version in APIVersion.version_classesNEWLINE ],NEWLINE NoneNEWLINE )NEWLINENEWLINE def operation_api_version_details(self, req, params):NEWLINE """Return details of one API version.NEWLINE FIXME: This calls an abstract base class method.NEWLINE """NEWLINE return (APIVersion.api_version_detail(req, params), None)NEWLINENEWLINENEWLINEdef app_factory(global_config, **settings):NEWLINE """A factory function which returns WSGI version-list applications.NEWLINE """NEWLINE return VersionsApp()NEWLINE |
from django.test import TestCaseNEWLINEfrom django.contrib.auth import get_user_modelNEWLINEfrom django.urls import reverseNEWLINENEWLINEfrom rest_framework.test import APIClientNEWLINEfrom rest_framework import statusNEWLINENEWLINENEWLINECREATE_USER_URL = reverse('user:create')NEWLINETOKEN_URL = reverse('user:token')NEWLINEME_URL = reverse('user:me')NEWLINENEWLINENEWLINEdef create_user(**params):NEWLINE return get_user_model().objects.create_user(**params)NEWLINENEWLINENEWLINEclass PublicUserApiTests(TestCase):NEWLINE """Test the users API (public)"""NEWLINENEWLINE def setUp(self):NEWLINE self.client = APIClient()NEWLINENEWLINE def test_create_valid_user_success(self):NEWLINE """Test creating user with valid payload is successful"""NEWLINE payload = {NEWLINE 'email': '[email protected]',NEWLINE 'password': 'testpass',NEWLINE 'name': 'Test name'NEWLINE }NEWLINE res = self.client.post(CREATE_USER_URL, payload)NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_201_CREATED)NEWLINE user = get_user_model().objects.get(**res.data)NEWLINE self.assertTrue(user.check_password(payload['password']))NEWLINE self.assertNotIn('password', res.data)NEWLINENEWLINE def test_user_exists(self):NEWLINE """Test creating a user that already exists fails"""NEWLINE payload = {'email': '[email protected]', 'password': 'testpass'}NEWLINE create_user(**payload)NEWLINENEWLINE res = self.client.post(CREATE_USER_URL, payload)NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)NEWLINENEWLINE def test_password_too_short(self):NEWLINE """Test that the password must be more than 5 characters"""NEWLINE payload = {'email': '[email protected]', 'password': 'pw'}NEWLINE res = self.client.post(CREATE_USER_URL, payload)NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)NEWLINE user_exists = get_user_model().objects.filter(NEWLINE email=payload['email']NEWLINE ).exists()NEWLINE self.assertFalse(user_exists)NEWLINENEWLINE def test_create_token_for_user(self):NEWLINE """Test that a token is created for the user"""NEWLINE payload = {'email': '[email protected]', 'password': 'testpass'}NEWLINE create_user(**payload)NEWLINE res = self.client.post(TOKEN_URL, payload)NEWLINENEWLINE self.assertIn('token', res.data)NEWLINE self.assertEqual(res.status_code, status.HTTP_200_OK)NEWLINENEWLINE def test_create_token_invalid_credentials(self):NEWLINE """Test that token is not created if invalid credentials are given"""NEWLINE create_user(email='[email protected]', password='testpass')NEWLINE payload = {'email': '[email protected]', 'password': 'wrongpass'}NEWLINE res = self.client.post(TOKEN_URL, payload)NEWLINENEWLINE self.assertNotIn('token', res.data)NEWLINE self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)NEWLINENEWLINE def test_create_token_no_user(self):NEWLINE """Test that token is not created if user doesn't exist"""NEWLINE payload = {'email': '[email protected]', 'password': 'testpass'}NEWLINE res = self.client.post(TOKEN_URL, payload)NEWLINENEWLINE self.assertNotIn('token', res.data)NEWLINE self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)NEWLINENEWLINE def test_create_token_missing_field(self):NEWLINE """Test that email and password are required"""NEWLINE res = self.client.post(TOKEN_URL, {'email': 'one', 'password': ''})NEWLINENEWLINE self.assertNotIn('token', res.data)NEWLINE self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)NEWLINENEWLINE def test_retrive_user_unauthorized(self):NEWLINE """Test that authentication is required for users"""NEWLINE res = self.client.get(ME_URL)NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)NEWLINENEWLINENEWLINEclass PrivateUserApiTests(TestCase):NEWLINE """Test API requests that require authentication"""NEWLINENEWLINE def setUp(self):NEWLINE self.user = create_user(NEWLINE email='[email protected]',NEWLINE password='testpass',NEWLINE name='name'NEWLINE )NEWLINE self.client = APIClient()NEWLINE self.client.force_authenticate(user=self.user)NEWLINENEWLINE def test_retrieve_profile_success(self):NEWLINE """Test retrieving profile for logged in used"""NEWLINE res = self.client.get(ME_URL)NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_200_OK)NEWLINE self.assertEqual(res.data, {NEWLINE 'name': self.user.name,NEWLINE 'email': self.user.emailNEWLINE })NEWLINENEWLINE def test_post_me_not_allowed(self):NEWLINE """Test that POST is not allowed on the me url"""NEWLINE res = self.client.post(ME_URL, {})NEWLINENEWLINE self.assertEqual(res.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)NEWLINENEWLINE def test_update_user_profile(self):NEWLINE """Test updating the user profile for authenticated user"""NEWLINE payload = {'name': 'new name', 'password': 'newpassword123'}NEWLINENEWLINE res = self.client.patch(ME_URL, payload)NEWLINENEWLINE self.user.refresh_from_db()NEWLINE self.assertEqual(self.user.name, payload['name'])NEWLINE self.assertTrue(self.user.check_password(payload['password']))NEWLINE self.assertEqual(res.status_code, status.HTTP_200_OK)NEWLINE |
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINE"""NEWLINECreated on Thu Jun 6 12:02:09 2019NEWLINENEWLINE@author: smrakNEWLINE"""NEWLINEfrom gpstec import gpstecNEWLINEimport h5pyNEWLINEimport os, globNEWLINEfrom datetime import datetime, timedeltaNEWLINEfrom dateutil import parserNEWLINEimport numpy as npNEWLINEimport cartopy.crs as ccrsNEWLINEimport cartomap.geogmap as cmNEWLINEimport matplotlib.pyplot as pltNEWLINEfrom argparse import ArgumentParserNEWLINEimport platformNEWLINEfrom sunrise import terminator as terNEWLINEfrom apexpy import ApexNEWLINENEWLINEA = Apex()NEWLINEDPI = 150NEWLINESAVE = 0NEWLINENEWLINEdate = '2015-12-21'NEWLINEdatedt = [parser.parse(date), parser.parse(date) + timedelta(days=1)]NEWLINEtlim = [datetime(2015,12,21,9), datetime(2015,12,21,9,5)]NEWLINEresolution = 10NEWLINEtrange = 2.5NEWLINENEWLINElatlim = [-40, 40]NEWLINElonlim = [-125, -45]NEWLINEclim = [0, 50]NEWLINEcmap = 'gray'NEWLINENEWLINEnightshade = FalseNEWLINEterminator = TrueNEWLINEterminator_conjugate = TrueNEWLINENEWLINEroot = 'G:\\My Drive\\scintillation_data\\'NEWLINEodir = 'G:\\My Drive\\scintillation_data\\20151221\\grl\\maps\\'NEWLINEscintfn = 'G:\\My Drive\\scintillation_data\\20151221\\ix_2015_1221T0000-1222T0000_grleq1221_yaml_30el_1s_350km.h5'NEWLINENEWLINEif trange is None:NEWLINE trange = resolution / 2NEWLINEif latlim is None:NEWLINE latlim=[-10, 75]NEWLINEif lonlim is None:NEWLINE lonlim=[-160, -50]NEWLINEif clim is None:NEWLINE tecclim = [0, 20]NEWLINEelse:NEWLINE tecclim = climNEWLINEif root is None:NEWLINE if platform.system() == 'Windows':NEWLINE root = 'G:\\My Drive\\scintillation_data\\'NEWLINE else:NEWLINE root = '/home/smrak/Documents/scintillation/'NEWLINEif odir is None:NEWLINE if platform.system() == 'Windows':NEWLINE odir = root + '{}\\{}-{}-{}\\'.format(parser.parse(date).strftime("%Y%m%d"), NEWLINE cmap, tecclim[0], tecclim[1])NEWLINE else:NEWLINE odir = root + '/maps/{}/{}-{}-{}/'.format(parser.parse(date).strftime("%Y%m%d"), NEWLINE cmap, tecclim[0], tecclim[1])NEWLINENEWLINEif platform.system() == 'Windows':NEWLINE TECFN = root + 'tid\\{}\\conv_{}T0000-{}T0000.h5'.format(parser.parse(date).strftime("%Y%m%d"), NEWLINE datedt[0].strftime("%Y%m%d"), datedt[1].strftime("%Y%m%d"))NEWLINEelse:NEWLINE TECFN = '/media/smrak/figures/gpstec/{}/{}/conv_{}T0000-{}T0000.h5'.format(parser.parse(date).year, NEWLINE parser.parse(date).strftime("%m%d"),NEWLINE datedt[0].strftime("%Y%m%d"), datedt[1].strftime("%Y%m%d"))NEWLINEassert os.path.isfile(TECFN), TECFNNEWLINEif scintfn is None:NEWLINE if platform.system() == 'Windows':NEWLINE scint_root = root + '\\hdf\\{}\\'.format(datedt[0].year)NEWLINE NEWLINE else:NEWLINE scint_root = root + '/hdf/'NEWLINE scint_fn_list = sorted(glob.glob(scint_root + "ix_{}_{}T*.h5".format(datedt[0].year, datedt[0].strftime("%m%d"))))NEWLINE assert len(scint_fn_list) > 0NEWLINE scintfn = scint_fn_list[0]NEWLINEassert os.path.isfile(scintfn)NEWLINENEWLINE#TECNEWLINETEC = gpstec.readFromHDF(TECFN)NEWLINEtectime = TEC['time']NEWLINExgrid = TEC['xgrid']NEWLINEygrid = TEC['ygrid']NEWLINE#SCINT DATANEWLINEscintdata = h5py.File(scintfn, 'r')NEWLINEscint_time = scintdata['data/time'][:]NEWLINEscint_dt = np.array([datetime.utcfromtimestamp(t) for t in scint_time])NEWLINENEWLINEif tlim is None:NEWLINE tlim = [parser.parse(date), parser.parse(date) + timedelta(days=1)]NEWLINEif isinstance(tlim[0], str):NEWLINE dirnametime = scint_dt[0].strftime('%Y%m%d')NEWLINE if dirnametime != parser.parse(tlim[0]).strftime('%Y%m%d'):NEWLINE t0 = parser.parse(dirnametime + 'T' + tlim[0])NEWLINE t1 = parser.parse(dirnametime + 'T' + tlim[1])NEWLINE else:NEWLINE t0 = parser.parse(tlim[0])NEWLINE t1 = parser.parse(tlim[0])NEWLINE tlim = [t0, t1]NEWLINEassert isinstance(tlim[0], datetime) and isinstance(tlim[1], datetime)NEWLINEobstimes = []NEWLINEt = tlim[0]NEWLINEwhile t <= tlim[1]:NEWLINE obstimes.append(t)NEWLINE t += timedelta(minutes=resolution)NEWLINENEWLINE# --------------------------------------------------------------------------- #NEWLINEfor ii, it in enumerate(obstimes):NEWLINE # TEC dataNEWLINE idt_tec = abs(tectime - it).argmin()NEWLINE if idt_tec < tectime.size-2:NEWLINE tecim = np.nanmean(TEC['tecim'][idt_tec:idt_tec+2], axis=0)NEWLINE else:NEWLINE tecim = TEC['tecim'][idt_tec]NEWLINE # Scintillation dataNEWLINE # Filter out time range of interestNEWLINE scint_idt = np.zeros(scint_dt.size, dtype=bool)NEWLINE time_range = np.where( (scint_dt >= it-timedelta(minutes=trange)) & (scint_dt <= it+timedelta(minutes=trange)) )[0]NEWLINE scint_idt[time_range[0]:time_range[-1]+1] = TrueNEWLINE# scint_idt[time_range[0]] = TrueNEWLINE # Read in dataNEWLINE ipp_lat = scintdata['data/ipp'][scint_idt, :, :, 0]NEWLINE ipp_lon = scintdata['data/ipp'][scint_idt, :, :, 1]NEWLINE sigma_tec = scintdata['data/sigma_tec'][scint_idt, :, :]NEWLINE snr4 = scintdata['data/snr4'][scint_idt, :, :]NEWLINE roti = scintdata['data/roti'][scint_idt, :, :]NEWLINE # PlotNEWLINE fig = plt.figure(figsize=[15,6])NEWLINE ax0 = plt.subplot(121, projection=ccrs.Stereographic(central_longitude=(sum(lonlim)/2)))NEWLINE ax1 = plt.subplot(122, projection=ccrs.Stereographic(central_longitude=(sum(lonlim)/2)))NEWLINENEWLINE ax0 = cm.plotCartoMap(latlim=latlim, lonlim=lonlim, projection='stereo',NEWLINE meridians=None, parallels=None, ax=ax0,NEWLINE grid_linewidth=1, states = False,NEWLINE title=it, background_color='grey',NEWLINE apex=True, mlat_levels=[-40,-20,0,20,40,60,80,90],NEWLINE mlat_colors='w', mgrid_width=1, mgrid_style='--',NEWLINE nightshade=nightshade, terminator=terminator,NEWLINE terminator_altkm=350, ter_color='r', ter_style='-',NEWLINE ter_width=2, mlon_cs='mlt', date=it,NEWLINE mlon_levels=np.arange(0,24.1,4), mlat_labels=False,NEWLINE mlon_colors='w', mlon_labels=False)NEWLINE NEWLINE ax1 = cm.plotCartoMap(latlim=latlim, lonlim=lonlim, projection='stereo',NEWLINE meridians=None, parallels=None, ax=ax1,NEWLINE grid_linewidth=1, states = False,NEWLINE title=it, background_color='grey',NEWLINE apex=True, mlat_levels=[-40,-20,0,20,40,60,80,90],NEWLINE nightshade=nightshade, terminator=terminator,NEWLINE terminator_altkm=350, ter_color='r', ter_style='-',NEWLINE ter_width=2, mlon_cs='mlt', date=it,NEWLINE mlat_colors='w', mgrid_width=1, mgrid_style='--',NEWLINE mlon_levels=np.arange(0,24.1,4), mlat_labels=False,NEWLINE mlon_colors='w', mlon_labels=False)NEWLINE if terminator_conjugate:NEWLINE glon_ter, glat_ter = ter.get_terminator(it, alt_km = 350)NEWLINE idlon = (glon_ter > -160) & (glon_ter < 0)NEWLINE mlat_ter, mlon_ter = A.convert(glat_ter[idlon], glon_ter[idlon], 'geo', 'apex', height=350)NEWLINE mlat_south = (mlat_ter < 0)NEWLINE glat_ter_conj, glon_ter_conj = A.convert(-mlat_ter[mlat_south], mlon_ter[mlat_south], 'apex', 'geo', height=350)NEWLINE ax0.plot(np.unwrap(glon_ter_conj,180), glat_ter_conj, NEWLINE '--r', lw=2, transform=ccrs.PlateCarree())NEWLINE # ------------------------------------------------------------------------- - #NEWLINE ax0.pcolormesh(xgrid, ygrid, tecim.T, cmap=cmap, NEWLINE vmin = tecclim[0], vmax = tecclim[1], NEWLINE transform=ccrs.PlateCarree())NEWLINE im1 = ax1.pcolormesh(xgrid, ygrid, tecim.T, cmap=cmap, #'nipy_spectral'NEWLINE vmin = tecclim[0], vmax = tecclim[1], NEWLINE transform=ccrs.PlateCarree())NEWLINE # Scint with amplitudeNEWLINE# if np.sum(np.isfinite(sigma_tec)) > 0:NEWLINE if trange >= 1:NEWLINE idf0 = np.isfinite(sigma_tec)NEWLINE if np.sum(np.isfinite(idf0)) == 0:NEWLINE idf0[0]=TrueNEWLINE imst = ax0.scatter(ipp_lon[idf0], ipp_lat[idf0],NEWLINE c = sigma_tec[idf0],NEWLINE s = 30, #(sigma_tec)**2 * 1000000,NEWLINE marker = 'o',NEWLINE cmap='Reds',NEWLINE alpha=0.8,NEWLINE vmin=0, vmax=0.05,NEWLINE # facecolors = 'none',NEWLINE transform = ccrs.PlateCarree())NEWLINE idf0 = np.isfinite(snr4)NEWLINE if np.sum(np.isfinite(idf0)) == 0:NEWLINE idf0[0]=TrueNEWLINE# if np.sum(np.isfinite(snr4)) > 0:NEWLINE imsnr4 = ax0.scatter(ipp_lon[idf0], ipp_lat[idf0],NEWLINE c = snr4[idf0],NEWLINE s = 30, #np.square(snr4) * 1000,NEWLINE# linewidth = 0.8,NEWLINE marker = 'o',NEWLINE alpha = 0.8,NEWLINE cmap='Blues',NEWLINE vmin=0, vmax=1.2,NEWLINE# facecolors = 'none',NEWLINE transform = ccrs.PlateCarree())NEWLINE # Scint locationsNEWLINE if np.sum(np.isfinite(roti)) > 0:NEWLINE idf0 = np.isfinite(roti)NEWLINE imroti = ax1.scatter(ipp_lon[idf0], ipp_lat[idf0],NEWLINE c = roti[idf0],NEWLINE s = 15,NEWLINE marker = 'o',NEWLINE alpha=0.8,NEWLINE vmin=0,vmax=0.02,NEWLINE cmap='jet',NEWLINE transform = ccrs.PlateCarree())NEWLINE NEWLINE posn0 = ax0.get_position()NEWLINE cax = fig.add_axes([posn0.x0, posn0.y0-0.03, posn0.width, 0.02])NEWLINE fig.colorbar(imsnr4, cax=cax, label='$SNR_4$', orientation='horizontal')NEWLINE posn1 = ax1.get_position()NEWLINE cax = fig.add_axes([posn1.x0, posn1.y0-0.03, posn1.width, 0.02])NEWLINE fig.colorbar(imroti, cax=cax, label='ROTI [TECu]', orientation='horizontal')NEWLINE cax = fig.add_axes([posn1.x0+posn1.width+0.01, posn1.y0, 0.02, posn1.height])NEWLINE fig.colorbar(im1, cax=cax, label='TEC [TECu]')NEWLINE posn0 = ax0.get_position()NEWLINE cax = fig.add_axes([posn0.x0+posn0.width+0.01, posn0.y0, 0.02, posn0.height])NEWLINE fig.colorbar(imst, cax=cax, label='$\sigma_{TEC}$ [TECu]')NEWLINE else:NEWLINE posn0 = ax0.get_position()NEWLINE cax = fig.add_axes([posn0.x0+posn0.width+0.01, posn0.y0, 0.02, posn0.height])NEWLINE fig.colorbar(im1, cax=cax, label='$\sigma_{TEC}$ [TECu]')NEWLINE NEWLINE if SAVE:NEWLINE# plt.tight_layout()NEWLINE if not os.path.exists(odir):NEWLINE import subprocessNEWLINE if platform.system() == 'Linux':NEWLINE subprocess.call('mkdir -p "{}"'.format(odir), shell=True, timeout=2)NEWLINE elif platform.system() == 'Windows':NEWLINE subprocess.call('mkdir "{}"'.format(odir), shell=True, timeout=2)NEWLINE print ("Plotting {}/{} - {}".format(ii+1, len(obstimes), it))NEWLINE fig.savefig(odir+'{}.png'.format(it.strftime('%m%d_%H%M')), dpi=DPI)NEWLINE plt.close(fig)NEWLINE NEWLINE del figNEWLINE del sigma_tecNEWLINE del snr4NEWLINE scintdata.close() |
#!/usr/bin/env python2NEWLINE#NEWLINE# Distributed under the MIT/X11 software license, see the accompanyingNEWLINE# file COPYING or http://www.opensource.org/licenses/mit-license.php.NEWLINE#NEWLINENEWLINEfrom test_framework.mininode import *NEWLINEfrom test_framework.test_framework import BitcoinTestFrameworkNEWLINEfrom test_framework.util import *NEWLINEimport loggingNEWLINENEWLINE'''NEWLINEIn this test we connect to one node over p2p, send it numerous inv's, andNEWLINEcompare the resulting number of getdata requests to a max allowed value. WeNEWLINEtest for exceeding 128 blocks in flight, which was the limit an 0.9 client willNEWLINEreach. [0.10 clients shouldn't request more than 16 from a single peer.]NEWLINE'''NEWLINEMAX_REQUESTS = 128NEWLINENEWLINEclass TestManager(NodeConnCB):NEWLINE # set up NodeConnCB callbacks, overriding base classNEWLINE def on_getdata(self, conn, message):NEWLINE self.log.debug("got getdata %s" % repr(message))NEWLINE # Log the requestsNEWLINE for inv in message.inv:NEWLINE if inv.hash not in self.blockReqCounts:NEWLINE self.blockReqCounts[inv.hash] = 0NEWLINE self.blockReqCounts[inv.hash] += 1NEWLINENEWLINE def on_close(self, conn):NEWLINE if not self.disconnectOkay:NEWLINE raise EarlyDisconnectError(0)NEWLINENEWLINE def __init__(self):NEWLINE NodeConnCB.__init__(self)NEWLINE self.log = logging.getLogger("BlockRelayTest")NEWLINE self.create_callback_map()NEWLINENEWLINE def add_new_connection(self, connection):NEWLINE self.connection = connectionNEWLINE self.blockReqCounts = {}NEWLINE self.disconnectOkay = FalseNEWLINENEWLINE def run(self):NEWLINE try:NEWLINE fail = FalseNEWLINE self.connection.rpc.generate(1) # Leave IBDNEWLINENEWLINE numBlocksToGenerate = [ 8, 16, 128, 1024 ]NEWLINE for count in range(len(numBlocksToGenerate)):NEWLINE current_invs = []NEWLINE for i in range(numBlocksToGenerate[count]):NEWLINE current_invs.append(CInv(2, random.randrange(0, 1<<256)))NEWLINE if len(current_invs) >= 50000:NEWLINE self.connection.send_message(msg_inv(current_invs))NEWLINE current_invs = []NEWLINE if len(current_invs) > 0:NEWLINE self.connection.send_message(msg_inv(current_invs))NEWLINE NEWLINE # Wait and see how many blocks were requestedNEWLINE time.sleep(2)NEWLINENEWLINE total_requests = 0NEWLINE with mininode_lock:NEWLINE for key in self.blockReqCounts:NEWLINE total_requests += self.blockReqCounts[key]NEWLINE if self.blockReqCounts[key] > 1:NEWLINE raise AssertionError("Error, test failed: block %064x requested more than once" % key)NEWLINE if total_requests > MAX_REQUESTS:NEWLINE raise AssertionError("Error, too many blocks (%d) requested" % total_requests)NEWLINE print "Round %d: success (total requests: %d)" % (count, total_requests)NEWLINE except AssertionError as e:NEWLINE print "TEST FAILED: ", e.argsNEWLINENEWLINE self.disconnectOkay = TrueNEWLINE self.connection.disconnect_node()NEWLINENEWLINE NEWLINEclass MaxBlocksInFlightTest(BitcoinTestFramework):NEWLINE def add_options(self, parser):NEWLINE parser.add_option("--testbinary", dest="testbinary",NEWLINE default=os.getenv("LEMONCOIND", "lemoncoind"),NEWLINE help="Binary to test max block requests behavior")NEWLINENEWLINE def setup_chain(self):NEWLINE print "Initializing test directory "+self.options.tmpdirNEWLINE initialize_chain_clean(self.options.tmpdir, 1)NEWLINENEWLINE def setup_network(self):NEWLINE self.nodes = start_nodes(1, self.options.tmpdir, NEWLINE extra_args=[['-debug', '-whitelist=127.0.0.1']],NEWLINE binary=[self.options.testbinary])NEWLINENEWLINE def run_test(self):NEWLINE test = TestManager()NEWLINE test.add_new_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test))NEWLINE NetworkThread().start() # Start up network handling in another threadNEWLINE test.run()NEWLINENEWLINEif __name__ == '__main__':NEWLINE MaxBlocksInFlightTest().main()NEWLINE |
"""Application-specific settings."""NEWLINEimport osNEWLINEfrom django.conf import settings as _settingsNEWLINEfrom django.core.exceptions import ImproperlyConfiguredNEWLINENEWLINENEWLINE###############################################################################NEWLINE# Single settings.NEWLINE###############################################################################NEWLINEclass Setting(object):NEWLINE """Settings option helper class."""NEWLINE def __init__(self, **kwargs):NEWLINE """Initializer.NEWLINENEWLINE :kwarg default: Override default for getting.NEWLINE :type default: ``object``NEWLINE :kwarg from_env: Allow variable from evironment.NEWLINE :type from_env: ``bool``NEWLINE :kwarg valid_set: Set of valid values for setting.NEWLINE :type valid_set: ``set``NEWLINE """NEWLINE self.from_env = kwargs.get('from_env', False)NEWLINE self.default = kwargs.get('default', None)NEWLINE self.valid_set = kwargs.get('valid_set', None)NEWLINENEWLINE def validate(self, name, value):NEWLINE """Validate and return a value."""NEWLINENEWLINE if self.valid_set and value not in self.valid_set:NEWLINE raise ImproperlyConfigured(NEWLINE "%s: \"%s\" is not a valid setting (choose between %s)." %NEWLINE (name, value, ", ".join("\"%s\"" % x for x in self.valid_set)))NEWLINENEWLINE return valueNEWLINENEWLINE def env_clean(self, value): # pylint: disable=R0201NEWLINE """Clean / convert environment variable to proper type."""NEWLINE return valueNEWLINENEWLINE def get(self, name, default=None):NEWLINE """Get value."""NEWLINE default = default if default is not None else self.defaultNEWLINE try:NEWLINE value = getattr(_settings, name)NEWLINE except AttributeError:NEWLINE value = os.environ.get(name, default) if self.from_env else defaultNEWLINE # Convert env variable.NEWLINE if value != default:NEWLINE value = self.env_clean(value)NEWLINENEWLINE return self.validate(name, value)NEWLINENEWLINENEWLINEclass BoolSetting(Setting):NEWLINE """Boolean setting.."""NEWLINE def env_clean(self, value):NEWLINE """Clean / convert environment variable to proper type."""NEWLINE return self.parse_bool(value)NEWLINENEWLINE @classmethodNEWLINE def parse_bool(cls, value, default=None):NEWLINE """Convert ``string`` or ``bool`` to ``bool``."""NEWLINE if value is None:NEWLINE return defaultNEWLINENEWLINE elif isinstance(value, bool):NEWLINE return valueNEWLINENEWLINE elif isinstance(value, basestring):NEWLINE if value == 'True':NEWLINE return TrueNEWLINE elif value == 'False':NEWLINE return FalseNEWLINENEWLINE raise Exception("Value %s is not boolean." % value)NEWLINENEWLINENEWLINE###############################################################################NEWLINE# Settings wrapper.NEWLINE###############################################################################NEWLINEclass Settings(object):NEWLINE """Cloud Browser application settings.NEWLINENEWLINE This class wraps the "real" Django settings object, so can be used instead.NEWLINE The additional cloud browser settings are as follows:NEWLINENEWLINE .. note::NEWLINE **Environment Variables**: Certain credential settings can come from OSNEWLINE environment variables instead of from a settings file value to open upNEWLINE more options for secrets management. Values that can be set in theNEWLINE environment are designated with an "(*Env*)" notation.NEWLINENEWLINE Setting a value this way could be done, e.g.::NEWLINENEWLINE $ export CLOUD_BROWSER_AWS_ACCOUNT="my_account"NEWLINE $ export CLOUD_BROWSER_AWS_SECRET_KEY="my_secret"NEWLINE $ # ... start django application with environment variables.NEWLINENEWLINE **Datastore Settings**:NEWLINENEWLINE * ``CLOUD_BROWSER_DATASTORE``: Choice of datastore (see values below).NEWLINENEWLINE **Amazon Web Services**: Configure AWS S3 as backing datastore.NEWLINENEWLINE * ``CLOUD_BROWSER_DATASTORE = "AWS"``NEWLINE * ``CLOUD_BROWSER_AWS_ACCOUNT``: Account name. (*Env*)NEWLINE * ``CLOUD_BROWSER_AWS_SECRET_KEY``: Account API secret key. (*Env*)NEWLINENEWLINE **Google Storage for Developers**: Configure Google Storage as backingNEWLINE datastore.NEWLINENEWLINE * ``CLOUD_BROWSER_DATASTORE = "Google"``NEWLINE * ``CLOUD_BROWSER_GS_ACCOUNT``: Account name. (*Env*)NEWLINE * ``CLOUD_BROWSER_GS_SECRET_KEY``: Account API secret key. (*Env*)NEWLINENEWLINE **Rackspace**: Configure Rackspace Cloud Files as backing datastore.NEWLINENEWLINE * ``CLOUD_BROWSER_DATASTORE = "Rackspace"``NEWLINE * ``CLOUD_BROWSER_RACKSPACE_ACCOUNT``: Account name. (*Env*)NEWLINE * ``CLOUD_BROWSER_RACKSPACE_SECRET_KEY``: Account API secret key. (*Env*)NEWLINE * ``CLOUD_BROWSER_RACKSPACE_SERVICENET``: Boolean designating whether orNEWLINE not to use Rackspace's servicenet (i.e., the private interface on aNEWLINE Cloud Server). (*Env*)NEWLINE * ``CLOUD_BROWSER_RACKSPACE_AUTHURL``: Alternative authorization server,NEWLINE for use, e.g., with `OpenStack <http://www.openstack.org/>`_ instead ofNEWLINE Rackspace. (*Env*)NEWLINENEWLINE **Filesystem**: Configure simple filesystem mock datastore.NEWLINENEWLINE * ``CLOUD_BROWSER_DATASTORE = "Filesystem"``NEWLINE * ``CLOUD_BROWSER_FILESYSTEM_ROOT``: Filesystem root to serve from.NEWLINENEWLINE **View Permissions**: A standard Django view decorator object can beNEWLINE specified, which is wrapped for all browsing / viewing view -- for example,NEWLINE to limit views to logged in members, use ``login_required`` and for staffNEWLINE only, use ``staff_member_required``. Note that either a real decoratorNEWLINE function or a fully-qualifid string path are acceptable, so you can use,NEWLINE e.g., "django.contrib.admin.views.decorators.staff_member_required" insteadNEWLINE which might help with certain settings.py import-order-related issues.NEWLINENEWLINE * ``CLOUD_BROWSER_VIEW_DECORATOR``: View decorator or fully-qualifiedNEWLINE string path.NEWLINENEWLINE **Container Permissions**: Cloud browser allows a very rudimentary formNEWLINE of access control at the container level with white and black lists.NEWLINE If the white list is set, only container names in the white list areNEWLINE allowed. If the white list is unset, then any container name *not* inNEWLINE the black list is permitted. All name matching is exact (no regularNEWLINE expressions, etc.).NEWLINENEWLINE * ``CLOUD_BROWSER_CONTAINER_WHITELIST``: White list of names. (Iterable)NEWLINE * ``CLOUD_BROWSER_CONTAINER_BLACKLIST``: Black list of names. (Iterable)NEWLINENEWLINE **General**: Other settings.NEWLINENEWLINE * ``CLOUD_BROWSER_DEFAULT_LIST_LIMIT``: Default number of objects toNEWLINE diplay per browser page.NEWLINE * ``CLOUD_BROWSER_STATIC_MEDIA_DIR``: If this applications static mediaNEWLINE (found in ``app_media``) is served up under the ``settings.MEDIA_ROOT``,NEWLINE then set a relative path from the root, and the static media will be usedNEWLINE instead of a Django-based static view fallback.NEWLINE """NEWLINE #: Valid datastore types.NEWLINE DATASTORES = set((NEWLINE 'AWS',NEWLINE 'Google',NEWLINE 'Rackspace',NEWLINE 'Filesystem',NEWLINE ))NEWLINENEWLINE #: Settings dictionary of accessor callables.NEWLINE SETTINGS = {NEWLINE # Datastore choice.NEWLINE 'CLOUD_BROWSER_DATASTORE': Setting(NEWLINE default='Filesystem',NEWLINE valid_set=DATASTORESNEWLINE ),NEWLINENEWLINE # Amazon Web Services S3 datastore settings.NEWLINE 'CLOUD_BROWSER_AWS_ACCOUNT': Setting(from_env=True),NEWLINE 'CLOUD_BROWSER_AWS_SECRET_KEY': Setting(from_env=True),NEWLINENEWLINE # Google Storage for Developers datastore settings.NEWLINE 'CLOUD_BROWSER_GS_ACCOUNT': Setting(from_env=True),NEWLINE 'CLOUD_BROWSER_GS_SECRET_KEY': Setting(from_env=True),NEWLINENEWLINE # Rackspace datastore settings.NEWLINE 'CLOUD_BROWSER_RACKSPACE_ACCOUNT': Setting(from_env=True),NEWLINE 'CLOUD_BROWSER_RACKSPACE_SECRET_KEY': Setting(from_env=True),NEWLINE 'CLOUD_BROWSER_RACKSPACE_SERVICENET': BoolSetting(from_env=True),NEWLINE 'CLOUD_BROWSER_RACKSPACE_AUTHURL': BoolSetting(from_env=True),NEWLINENEWLINE # Filesystem datastore settings.NEWLINE 'CLOUD_BROWSER_FILESYSTEM_ROOT': Setting(),NEWLINENEWLINE # View permissions.NEWLINE 'CLOUD_BROWSER_VIEW_DECORATOR': Setting(),NEWLINENEWLINE # Permissions lists for containers.NEWLINE 'CLOUD_BROWSER_CONTAINER_WHITELIST': Setting(),NEWLINE 'CLOUD_BROWSER_CONTAINER_BLACKLIST': Setting(),NEWLINENEWLINE # Browser settings.NEWLINE 'CLOUD_BROWSER_DEFAULT_LIST_LIMIT': Setting(default=20),NEWLINENEWLINE # Static media root.NEWLINE 'CLOUD_BROWSER_STATIC_MEDIA_DIR': Setting(),NEWLINE }NEWLINENEWLINE def __init__(self):NEWLINE """Initializer."""NEWLINE self.__container_whitelist = NoneNEWLINE self.__container_blacklist = NoneNEWLINENEWLINE def __getattr__(self, name, default=None):NEWLINE """Get setting."""NEWLINE if name in self.SETTINGS:NEWLINE return self.SETTINGS[name].get(name, default)NEWLINENEWLINE # Use real Django settings.NEWLINE return getattr(_settings, name, default)NEWLINENEWLINE @propertyNEWLINE def _container_whitelist(self):NEWLINE """Container whitelist."""NEWLINE if self.__container_whitelist is None:NEWLINE self.__container_whitelist = \NEWLINE set(self.CLOUD_BROWSER_CONTAINER_WHITELIST or [])NEWLINE return self.__container_whitelistNEWLINENEWLINE @propertyNEWLINE def _container_blacklist(self):NEWLINE """Container blacklist."""NEWLINE if self.__container_blacklist is None:NEWLINE self.__container_blacklist = \NEWLINE set(self.CLOUD_BROWSER_CONTAINER_BLACKLIST or [])NEWLINE return self.__container_blacklistNEWLINENEWLINE def container_permitted(self, name):NEWLINE """Return whether or not a container is permitted.NEWLINENEWLINE :param name: Container name.NEWLINE :return: ``True`` if container is permitted.NEWLINE :rtype: ``bool``NEWLINE """NEWLINE white = self._container_whitelistNEWLINE black = self._container_blacklistNEWLINE return name not in black and (not white or name in white)NEWLINENEWLINE @propertyNEWLINE def app_media_url(self):NEWLINE """Get application media root from real media root URL."""NEWLINE url = NoneNEWLINE media_dir = self.CLOUD_BROWSER_STATIC_MEDIA_DIRNEWLINE if media_dir:NEWLINE url = os.path.join(self.MEDIA_URL, media_dir).rstrip('/') + '/'NEWLINENEWLINE return urlNEWLINENEWLINE @propertyNEWLINE def app_media_doc_root(self): # pylint: disable=R0201NEWLINE """Get application media document (file) root."""NEWLINE app_dir = os.path.abspath(os.path.dirname(__file__))NEWLINE media_root = os.path.join(app_dir, 'media')NEWLINENEWLINE return media_rootNEWLINENEWLINENEWLINEsettings = Settings() # pylint: disable=C0103NEWLINE |
"""NEWLINE Copyright (c) 2020-2022 Intel CorporationNEWLINENEWLINE Licensed under the Apache License, Version 2.0 (the "License");NEWLINE you may not use this file except in compliance with the License.NEWLINE You may obtain a copy of the License atNEWLINENEWLINE http://www.apache.org/licenses/LICENSE-2.0NEWLINENEWLINE Unless required by applicable law or agreed to in writing, softwareNEWLINE distributed under the License is distributed on an "AS IS" BASIS,NEWLINE WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE See the License for the specific language governing permissions andNEWLINE limitations under the License.NEWLINE"""NEWLINENEWLINEimport logging as logNEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom utils.text_preprocessing import text_to_sequence, _symbol_to_idNEWLINEfrom utils.embeddings_processing import PCANEWLINENEWLINENEWLINEdef check_input_name(model, input_tensor_name):NEWLINE try:NEWLINE model.input(input_tensor_name)NEWLINE return TrueNEWLINE except RuntimeError:NEWLINE return FalseNEWLINENEWLINENEWLINEclass ForwardTacotronIE:NEWLINE def __init__(self, model_duration, model_forward, core, device='CPU', verbose=False):NEWLINE self.verbose = verboseNEWLINE self.device = deviceNEWLINE self.core = coreNEWLINENEWLINE self.duration_predictor_model = self.load_network(model_duration)NEWLINE self.duration_predictor_request = self.create_infer_request(self.duration_predictor_model, model_duration)NEWLINENEWLINE self.forward_model = self.load_network(model_forward)NEWLINE self.forward_request = self.create_infer_request(self.forward_model, model_forward)NEWLINENEWLINE # fixed length of the sequence of symbolsNEWLINE self.duration_len = self.duration_predictor_model.input('input_seq').shape[1]NEWLINE # fixed length of the input embeddings for forwardNEWLINE self.forward_len = self.forward_model.input('data').shape[1]NEWLINE if self.verbose:NEWLINE log.debug('Forward limitations : {0} symbols and {1} embeddings'.format(self.duration_len, self.forward_len))NEWLINE self.is_attention = check_input_name(self.forward_model, "pos_mask")NEWLINE if self.is_attention:NEWLINE self.init_pos_mask()NEWLINE else:NEWLINE self.pos_mask = NoneNEWLINENEWLINE self.is_multi_speaker = check_input_name(self.duration_predictor_model, "speaker_embedding")NEWLINE if self.is_multi_speaker:NEWLINE self.init_speaker_information()NEWLINE else:NEWLINE self.male_idx = NoneNEWLINE self.female_idx = NoneNEWLINE self.speaker_embeddings = NoneNEWLINE self.female_embeddings = NoneNEWLINE self.male_embeddings = NoneNEWLINENEWLINE def init_pos_mask(self, mask_sz=6000, window_size=4):NEWLINE mask_arr = np.zeros((1, 1, mask_sz, mask_sz), dtype=np.float32)NEWLINE width = 2 * window_size + 1NEWLINE for i in range(mask_sz - width):NEWLINE mask_arr[0][0][i][i:i + width] = 1.0NEWLINENEWLINE self.pos_mask = mask_arrNEWLINENEWLINE @staticmethodNEWLINE def sequence_mask(length, max_length=None):NEWLINE if max_length is None:NEWLINE max_length = np.max(length)NEWLINE x = np.arange(max_length, dtype=length.dtype)NEWLINE x = np.expand_dims(x, axis=(0))NEWLINE length = np.expand_dims(length, axis=(1))NEWLINE return x < lengthNEWLINENEWLINE def seq_to_indexes(self, text):NEWLINE res = text_to_sequence(text)NEWLINE if self.verbose:NEWLINE log.debug(res)NEWLINE return resNEWLINENEWLINE @staticmethodNEWLINE def build_index(duration, x):NEWLINE duration[np.where(duration < 0)] = 0NEWLINE tot_duration = np.cumsum(duration, 1)NEWLINE max_duration = int(tot_duration.max().item())NEWLINE index = np.zeros([x.shape[0], max_duration, x.shape[2]], dtype='long')NEWLINENEWLINE for i in range(tot_duration.shape[0]):NEWLINE pos = 0NEWLINE for j in range(tot_duration.shape[1]):NEWLINE pos1 = tot_duration[i, j]NEWLINE index[i, pos:pos1, :] = jNEWLINE pos = pos1NEWLINE index[i, pos:, :] = jNEWLINE return indexNEWLINENEWLINE @staticmethodNEWLINE def gather(a, dim, index):NEWLINE expanded_index = [index if dim == i else np.arange(a.shape[i]).reshape(NEWLINE [-1 if i == j else 1 for j in range(a.ndim)]) for i in range(a.ndim)]NEWLINE return a[tuple(expanded_index)]NEWLINENEWLINE def load_network(self, model_path):NEWLINE log.info('Reading ForwardTacotron model {}'.format(model_path))NEWLINE return self.core.read_model(model_path)NEWLINENEWLINE def create_infer_request(self, model, path):NEWLINE compiled_model = self.core.compile_model(model, device_name=self.device)NEWLINE log.info('The ForwardTacotron model {} is loaded to {}'.format(path, self.device))NEWLINE return compiled_model.create_infer_request()NEWLINENEWLINE def infer_duration(self, sequence, speaker_embedding=None, alpha=1.0, non_empty_symbols=None):NEWLINE if self.is_attention:NEWLINE input_mask = self.sequence_mask(np.array([[non_empty_symbols]]), sequence.shape[1])NEWLINE pos_mask = self.pos_mask[:, :, :sequence.shape[1], :sequence.shape[1]]NEWLINE inputs = {"input_seq": sequence,NEWLINE "input_mask": input_mask,NEWLINE "pos_mask": pos_mask}NEWLINE if speaker_embedding is not None:NEWLINE inputs["speaker_embedding"] = np.array(speaker_embedding)NEWLINE self.duration_predictor_request.infer(inputs)NEWLINE else:NEWLINE self.duration_predictor_request.infer(inputs={"input_seq": sequence})NEWLINE duration = self.duration_predictor_request.get_tensor("duration").data[:] * alphaNEWLINENEWLINE duration = (duration + 0.5).astype('int').flatten()NEWLINE duration = np.expand_dims(duration, axis=0)NEWLINE preprocessed_embeddings = self.duration_predictor_request.get_tensor("embeddings").data[:]NEWLINENEWLINE if non_empty_symbols is not None:NEWLINE duration = duration[:, :non_empty_symbols]NEWLINE preprocessed_embeddings = preprocessed_embeddings[:, :non_empty_symbols]NEWLINE indexes = self.build_index(duration, preprocessed_embeddings)NEWLINE if self.verbose:NEWLINE log.debug("Index: {0}, duration: {1}, embeddings: {2}, non_empty_symbols: {3}"NEWLINE .format(indexes.shape, duration.shape, preprocessed_embeddings.shape, non_empty_symbols))NEWLINENEWLINE return self.gather(preprocessed_embeddings, 1, indexes)NEWLINENEWLINE def infer_mel(self, aligned_emb, non_empty_symbols, speaker_embedding=None):NEWLINE if self.is_attention:NEWLINE data_mask = self.sequence_mask(np.array([[non_empty_symbols]]), aligned_emb.shape[1])NEWLINE pos_mask = self.pos_mask[:, :, :aligned_emb.shape[1], :aligned_emb.shape[1]]NEWLINE inputs = {"data": aligned_emb,NEWLINE "data_mask": data_mask,NEWLINE "pos_mask": pos_mask}NEWLINE if speaker_embedding is not None:NEWLINE inputs["speaker_embedding"] = np.array(speaker_embedding)NEWLINE self.forward_request.infer(inputs)NEWLINE else:NEWLINE self.forward_request.infer(inputs={"data": aligned_emb})NEWLINE return self.forward_request.get_tensor('mel').data[:, :non_empty_symbols]NEWLINENEWLINE def find_optimal_delimiters_position(self, sequence, delimiters, idx, window=20):NEWLINE res = {d: -1 for d in delimiters}NEWLINE for i in range(max(0, idx - window), idx):NEWLINE if sequence[i] in delimiters:NEWLINE res[sequence[i]] = i + 1NEWLINE return resNEWLINENEWLINE def forward_duration_prediction_by_delimiters(self, text, speaker_embedding, alpha):NEWLINE sequence = self.seq_to_indexes(text)NEWLINE seq_len = len(sequence)NEWLINE outputs = []NEWLINENEWLINE if seq_len <= self.duration_len:NEWLINE non_empty_symbols = len(sequence) + min(1, self.duration_len - seq_len)NEWLINE sequence = sequence + [_symbol_to_id[' ']] * (self.duration_len - seq_len)NEWLINE sequence = np.array(sequence)NEWLINE sequence = np.expand_dims(sequence, axis=0)NEWLINE outputs.append(self.infer_duration(sequence, speaker_embedding, alpha, non_empty_symbols=non_empty_symbols))NEWLINE else:NEWLINE punctuation = '.!?,;: 'NEWLINE delimiters = [_symbol_to_id[p] for p in punctuation]NEWLINENEWLINE start_idx = 0NEWLINE while start_idx < seq_len:NEWLINE if start_idx + self.duration_len < seq_len:NEWLINE positions = self.find_optimal_delimiters_position(sequence, delimiters,NEWLINE start_idx + self.duration_len,NEWLINE window=self.duration_len//10)NEWLINE else:NEWLINE positions = {delimiters[0]: seq_len}NEWLINE edge = -1NEWLINE for d in delimiters:NEWLINE if positions[d] > 0:NEWLINE edge = positions[d]NEWLINE breakNEWLINE if edge < 0:NEWLINE raise Exception("Bad delimiter position {0} for sequence with length {1}".format(edge, seq_len))NEWLINENEWLINE sub_sequence = sequence[start_idx:edge]NEWLINE non_empty_symbols = len(sub_sequence) + min(1, self.duration_len - len(sub_sequence))NEWLINE sub_sequence += [_symbol_to_id[' ']] * (self.duration_len - len(sub_sequence))NEWLINE sub_sequence = np.array(sub_sequence)NEWLINE sub_sequence = np.expand_dims(sub_sequence, axis=0)NEWLINE outputs.append(self.infer_duration(sub_sequence, speaker_embedding, alpha, non_empty_symbols=non_empty_symbols))NEWLINE start_idx = edgeNEWLINENEWLINE aligned_emb = np.concatenate(outputs, axis=1)NEWLINE return aligned_embNEWLINENEWLINE def forward(self, text, alpha=1.0, speaker_id=19, speaker_emb=None):NEWLINE speaker_embedding = NoneNEWLINE if self.is_multi_speaker:NEWLINE if speaker_emb is not None:NEWLINE speaker_embedding = speaker_embNEWLINE else:NEWLINE speaker_embedding = [self.speaker_embeddings[speaker_id, :]]NEWLINENEWLINE aligned_emb = self.forward_duration_prediction_by_delimiters(text, speaker_embedding, alpha)NEWLINENEWLINE mels = []NEWLINE start_idx = 0NEWLINE end_idx = 0NEWLINE while start_idx < aligned_emb.shape[1] and end_idx < aligned_emb.shape[1]:NEWLINE end_idx = min(start_idx + self.forward_len, aligned_emb.shape[1])NEWLINE sub_aligned_emb = aligned_emb[:, start_idx:end_idx, :]NEWLINE if sub_aligned_emb.shape[1] < self.forward_len:NEWLINE sub_aligned_emb = np.pad(sub_aligned_emb,NEWLINE ((0, 0), (0, self.forward_len - sub_aligned_emb.shape[1]), (0, 0)),NEWLINE 'constant', constant_values=0)NEWLINE if self.verbose:NEWLINE log.debug("SAEmb shape: {0}".format(sub_aligned_emb.shape))NEWLINE mel = self.infer_mel(sub_aligned_emb, end_idx - start_idx, speaker_embedding)NEWLINE mels.append(np.copy(mel))NEWLINE start_idx += self.forward_lenNEWLINENEWLINE res = np.concatenate(mels, axis=1)NEWLINE if self.verbose:NEWLINE log.debug("MEL shape :{0}".format(res.shape))NEWLINENEWLINE return resNEWLINENEWLINE def get_speaker_embeddings(self):NEWLINE if self.is_multi_speaker:NEWLINE return self.speaker_embeddingsNEWLINE return NoneNEWLINENEWLINE def get_pca_speaker_embedding(self, gender, alpha):NEWLINE if not self.is_multi_speaker:NEWLINE return NoneNEWLINENEWLINE emb = self.male_embeddings if gender == "Male" else self.female_embeddingsNEWLINE pca = PCA()NEWLINE projection = pca.build(emb)NEWLINE x1 = min(projection)NEWLINE x2 = max(projection)NEWLINE pca_component = x1 + alpha * (x2 - x1)NEWLINE emb = pca.iproject(np.array([pca_component]))NEWLINE return embNEWLINENEWLINE def init_speaker_information(self):NEWLINE self.male_idx = [2, 3, 7, 11, 12, 15, 16, 19, 20, 21, 25, 26, 27, 29, 32, 33, 34, 35, 36, 38]NEWLINE self.female_idx = [0, 1, 4, 5, 6, 8, 9, 10, 13, 14, 17, 18, 22, 23, 24, 28, 30, 31, 37, 39]NEWLINE self.speaker_embeddings = np.array([[-0.4327550530433655, -0.5420686602592468],NEWLINE [-0.5264465808868408, -0.6281864643096924],NEWLINE [0.15513141453266144, 0.7856010794639587],NEWLINE [0.3424123525619507, 0.8129010200500488],NEWLINE [-0.6081429719924927, -0.6511518359184265],NEWLINE [-0.49752333760261536, -0.8568740487098694],NEWLINE [-0.005007751286029816, -1.3364707231521606],NEWLINE [0.14275427162647247, 1.121581792831421],NEWLINE [-0.45601722598075867, -0.9648892283439636],NEWLINE [-0.26137179136276245, -1.1388417482376099],NEWLINE [0.12628738582134247, -1.149622917175293],NEWLINE [0.34105026721954346, 1.0184416770935059],NEWLINE [0.3222722113132477, 1.070836067199707],NEWLINE [-0.2694351375102997, -0.9980007410049438],NEWLINE [-0.11780811846256256, -1.0476068258285522],NEWLINE [0.2472933977842331, 1.1816325187683105],NEWLINE [0.04263993725180626, 1.4357256889343262],NEWLINE [0.05275965854525566, -1.0010212659835815],NEWLINE [-0.17100927233695984, -1.1538763046264648],NEWLINE [0.09288709610700607, 1.296027660369873],NEWLINE [0.13041983544826508, 1.1497610807418823],NEWLINE [0.11197542399168015, 1.0537633895874023],NEWLINE [-0.13089995086193085, -1.2036861181259155],NEWLINE [0.055261872708797455, -1.338423728942871],NEWLINE [0.20335668325424194, -1.2085381746292114],NEWLINE [-0.038247253745794296, 1.268439769744873],NEWLINE [-0.11069679260253906, 1.050403356552124],NEWLINE [-0.19113299250602722, 1.0872247219085693],NEWLINE [0.17568981647491455, -1.247299075126648],NEWLINE [-0.34791627526283264, 1.0054986476898193],NEWLINE [0.2401651293039322, -1.1724580526351929],NEWLINE [0.30263951420783997, -1.043319582939148],NEWLINE [-0.3040805160999298, 1.1061657667160034],NEWLINE [-0.27853792905807495, 1.145222544670105],NEWLINE [-0.49230968952178955, 0.9106340408325195],NEWLINE [-0.45115727186203003, 0.9025603532791138],NEWLINE [-0.49153658747673035, 0.7804651260375977],NEWLINE [0.253637433052063, -1.014277696609497],NEWLINE [-0.48516881465911865, 0.6745203137397766],NEWLINE [0.3036082983016968, -0.8406648635864258]])NEWLINE mask = np.array([True if i in self.male_idx else False for i in range(self.speaker_embeddings.shape[0])])NEWLINE self.male_embeddings = self.speaker_embeddings[mask, :]NEWLINE mask = np.array([True if i in self.female_idx else False for i in range(self.speaker_embeddings.shape[0])])NEWLINE self.female_embeddings = self.speaker_embeddings[mask, :]NEWLINE |
# -*- coding: utf-8 -*-NEWLINENEWLINEimport osNEWLINEimport argparseNEWLINEfrom lxml import etree, htmlNEWLINEfrom lxml.html.clean import CleanerNEWLINEimport fnmatch # To match files by patternNEWLINEimport regex as re # Maybe not necessaryNEWLINEimport timeNEWLINEimport dateparserNEWLINEimport jsonNEWLINENEWLINENEWLINEdef timeit(method):NEWLINE """Time methods."""NEWLINE def timed(*args, **kw):NEWLINE ts = time.time()NEWLINE result = method(*args, **kw)NEWLINE te = time.time()NEWLINENEWLINE print('%r %2.2f sec' %NEWLINE (method.__name__, te-ts))NEWLINE return resultNEWLINENEWLINE return timedNEWLINENEWLINENEWLINEclass TransformHtmlProceedingsToXml(object):NEWLINE """Get proceedings of the European Parliament."""NEWLINENEWLINE @timeitNEWLINE def __init__(self):NEWLINE self.cli()NEWLINE self.infiles = self.get_files(self.indir, self.pattern)NEWLINE self.n_proceedings = 0NEWLINE self.ns = {'re': 'http://exslt.org/regular-expressions'}NEWLINE self.loc = self.get_localized_vars()NEWLINE self.explanations_of_vote = re.compile(r' *EXPLANATIONS? OF VOTES?')NEWLINE self.langs = [NEWLINE "BG",NEWLINE "ES",NEWLINE "CS",NEWLINE "DA",NEWLINE "DE",NEWLINE "ET",NEWLINE "EL",NEWLINE "EN",NEWLINE "FR",NEWLINE "GA",NEWLINE "HR",NEWLINE "IT",NEWLINE "LV",NEWLINE "LT",NEWLINE "HU",NEWLINE "MT",NEWLINE "NL",NEWLINE "PL",NEWLINE "PT",NEWLINE "RO",NEWLINE "SK",NEWLINE "SL",NEWLINE "FI",NEWLINE "SV",NEWLINE ]NEWLINE self.main()NEWLINENEWLINE def __str__(self):NEWLINE message = "{} EuroParl's {} proceedings transformed!".format(NEWLINE str(self.n_proceedings),NEWLINE self.language)NEWLINE return messageNEWLINENEWLINE def get_files(self, directory, fileclue):NEWLINE """Get all files in a directory matching a pattern.NEWLINENEWLINE Keyword arguments:NEWLINE directory -- a string for the input folder pathNEWLINE fileclue -- a string as glob patternNEWLINE """NEWLINE matches = []NEWLINE for root, dirnames, filenames in os.walk(directory):NEWLINE for filename in fnmatch.filter(filenames, fileclue):NEWLINE matches.append(os.path.join(root, filename))NEWLINE return matchesNEWLINENEWLINE def get_localized_vars(self):NEWLINE fname = self.language+".json"NEWLINE fpath = os.path.join('localization', fname)NEWLINE with open(fpath, mode="r", encoding="utf-8") as jfile:NEWLINE content = jfile.read()NEWLINE vars = json.loads(content)NEWLINE return varsNEWLINENEWLINE def read_html(self, infile):NEWLINE """Parse a HTML file."""NEWLINE with open(infile, encoding='utf-8', mode='r') as input:NEWLINE return html.parse(input)NEWLINENEWLINE def regextract(self, content, a_pattern, target_dic, dic_attrib):NEWLINE """Extract information with a regular expression.NEWLINENEWLINE Keyword arguments:NEWLINE a_string -- stringNEWLINE a_regex -- a stringNEWLINE target_dic -- a dictionary where the extraction has to be storedNEWLINE dic_attrib -- dictionary key where to store extractionNEWLINE """NEWLINE # match the a_regex in a_stringNEWLINE is_match = re.match(r'{}'.format(a_pattern), content)NEWLINE # if matchNEWLINE if is_match is not None:NEWLINE if dic_attrib not in target_dic.keys():NEWLINE target_dic[dic_attrib] = is_match.group(1)NEWLINE content = re.sub(r'{}'.format(a_pattern), r'', content)NEWLINE return content, target_dicNEWLINENEWLINE def get_speaker_name(self, intervention):NEWLINE speaker_name = intervention.xpath(NEWLINE './/span[@class="doc_subtitle_level1_bis"]//text()')NEWLINE speaker_name = ''.join(speaker_name)NEWLINE speaker_name = re.sub(r'\n', r'', speaker_name)NEWLINE speaker_name = re.sub(r'\&', r'&', speaker_name)NEWLINE speaker_name = re.sub(r'\([\p{Lu}\&/\-–\s]+\)', r'', speaker_name)NEWLINE speaker_name = re.sub(r'\(\p{Lu}\p{Ll}+[/-]ALE\)', r'', speaker_name)NEWLINE speaker_name = re.sub(r' +', r' ', speaker_name)NEWLINE speaker_name = re.sub(r'\A[\xad\s\.—–\-−,\)]+', r'', speaker_name)NEWLINE speaker_name = re.sub(NEWLINE r'([ \.]\p{LU}\.)[\xad\s\.—–\-−,:]+\Z',NEWLINE r'\1',NEWLINE speaker_name)NEWLINE speaker_name = re.sub(NEWLINE r'(\p{L}\p{L})[\xad\s\.—–\-−,\):]+\Z',NEWLINE r'\1',NEWLINE speaker_name)NEWLINE speaker_name = re.sub(r'(\p{L}\p{L}) . —\Z', r'\1', speaker_name)NEWLINE speaker_name = re.sub(NEWLINE r'(Figel’)[\xad\s\.—–\-−,\):]+\Z',NEWLINE r'\1',NEWLINE speaker_name)NEWLINE speaker_name = re.sub(r' \.\Z', r'', speaker_name)NEWLINE speaker_name = re.sub(r'\([\p{Lu}/\xad\-–]+\Z', r'', speaker_name)NEWLINE speaker_name = re.sub(r' +\Z', r'', speaker_name)NEWLINE speaker_name = re.sub(r', +,', r',', speaker_name)NEWLINE speaker_name = re.sub(r' +, +', r',', speaker_name)NEWLINE speaker_name = re.sub(r',+', r',', speaker_name)NEWLINE speaker_name = re.sub(r' *,(\S)', r', \1', speaker_name)NEWLINE speaker_name = re.sub(r',\Z', r'', speaker_name)NEWLINE speaker_name = re.sub(r'(Bartholomeos I)\.', r'\1', speaker_name)NEWLINE speaker_name = re.sub(NEWLINE r', im Namen der Delegation der britischen Konservativen',NEWLINE r'',NEWLINE speaker_name)NEWLINE return speaker_nameNEWLINENEWLINE def get_speaker_id(self, intervention):NEWLINE speaker_id = intervention.xpath('.//img[@alt="MPphoto"]')NEWLINE speaker_id = speaker_id[0].attrib['src']NEWLINE speaker_id = os.path.split(speaker_id)[1]NEWLINE speaker_id = os.path.splitext(speaker_id)[0]NEWLINE return speaker_idNEWLINENEWLINE def get_is_mep(self, speaker_id):NEWLINE if speaker_id is not 'photo_generic':NEWLINE output = TrueNEWLINE else:NEWLINE output = FalseNEWLINE return outputNEWLINENEWLINE def get_mode(self, intervention):NEWLINE in_writing = intervention.xpath(NEWLINE './/span[@class="italic"][text()[re:test(.,"{}")]]'.format(NEWLINE self.loc['in_writing']),NEWLINE namespaces=self.ns)NEWLINE if len(in_writing) > 0:NEWLINE output = 'written'NEWLINE for writing in in_writing:NEWLINE writing.drop_tree()NEWLINE else:NEWLINE output = 'spoken'NEWLINE return outputNEWLINENEWLINE def get_role(self, intervention):NEWLINE roles = intervention.xpath('.//span[@class="italic"][text()[re:test(.,"^[\s\xad\-–−—\.]*(?:{})[\s\xad\-–−\.]*(?:\([A-Z][A-Z]\))?[\s\xad\-–−—\.]*$", "m")]]'.format('|'.join(self.loc['roles'])), namespaces=self.ns)NEWLINE if len(roles) > 0:NEWLINE output = []NEWLINE for role in roles:NEWLINE if type(role) is str:NEWLINE output.append(role)NEWLINE elif type(role) is html.HtmlElement:NEWLINE output.append(role.text)NEWLINE for role in roles:NEWLINE lang = re.match(NEWLINE r'.*({}).*'.format('|'.join(self.langs)),NEWLINE role.text)NEWLINE if lang is not None:NEWLINE i_lang = lang.group(1)NEWLINE else:NEWLINE i_lang = NoneNEWLINE role.drop_tree()NEWLINE else:NEWLINE output = NoneNEWLINE i_lang = NoneNEWLINE if output is not None:NEWLINE output = " ".join(output)NEWLINE output = re.sub(r'\n', r' ', output)NEWLINE output = re.sub(r' +', r' ', output)NEWLINE output = re.sub(r'\([\p{Lu}\&/\-–]+\)', r'', output)NEWLINE output = re.sub(r'(\p{Ll})[\s\.\xad–\-−—,\)]+\Z', r'\1', output)NEWLINE output = re.sub(r'\A[\xad\s\.—–\-−,\)\(]+', r'', output)NEWLINE output = re.sub(r'[\xad\s\.—–\-−,\)]+\Z', r'', output)NEWLINE return output, i_langNEWLINENEWLINE def get_heading(self, section):NEWLINE heading = section.xpath('.//td[@class="doc_title"]//text()')NEWLINE heading = ''.join(heading)NEWLINE heading = heading.strip()NEWLINE heading = re.sub(r'\(\n', r'(', heading)NEWLINE heading = re.sub(r'\n,', r',', heading)NEWLINE return headingNEWLINENEWLINE def get_language(self, s_intervention, p, i_lang, new_paragraphs):NEWLINE language = p.xpath('.//span[@class="italic"][text()[re:test(.,"^[\xad\s\.—–\-−,\(]*({})[\xad\s\.—–\-−,\)]*")]]'.format('|'.join(self.langs)), namespaces=self.ns)NEWLINE if len(language) > 0 and not self.explanations_of_vote.match(language[0].text):NEWLINE lang = re.match(NEWLINE r'.*({}).*'.format('|'.join(self.langs)),NEWLINE language[0].text)NEWLINE output = lang.group(1)NEWLINE for l in language:NEWLINE l.drop_tree()NEWLINE else:NEWLINE p = html.tostring(p, with_tail=True, encoding='utf-8').decode('utf-8')NEWLINE lang_in_text = re.search(NEWLINE r'\(({})\)'.format('|'.join(self.langs)),NEWLINE p)NEWLINE if lang_in_text is not None:NEWLINE output = lang_in_text.group(1)NEWLINE p = re.sub(r'\(({})\) *'.format('|'.join(self.langs)), r'', p)NEWLINE else:NEWLINE if len(new_paragraphs) == 0:NEWLINE if 'role' in s_intervention.keys():NEWLINE president_pattern = '|'.join(self.loc['president'])NEWLINE if re.match(r'{}\Z'.format(president_pattern), s_intervention['role']):NEWLINE output = 'unknown'NEWLINE else:NEWLINE if i_lang is None:NEWLINE output = self.language.upper()NEWLINE else:NEWLINE output = i_langNEWLINE else:NEWLINE if i_lang is None:NEWLINE output = self.language.upper()NEWLINE else:NEWLINE output = i_langNEWLINE else:NEWLINE output = new_paragraphs[-1]['language']NEWLINE p = html.fromstring(p)NEWLINE return output, pNEWLINENEWLINE def clean_paragraph(self, p):NEWLINE cleaner = Cleaner(remove_tags=['a'], kill_tags=['sup', 'img'])NEWLINE p = cleaner.clean_html(p)NEWLINE doc_subtitle = p.xpath('.//span[@class="doc_subtitle_level1_bis"]')NEWLINE for d in doc_subtitle:NEWLINE d.drop_tree()NEWLINE return pNEWLINENEWLINE def get_paragraphs(self, intervention, s_intervention, i_lang):NEWLINE paragraphs = intervention.xpath(NEWLINE './/p[@class="contents" or @class="doc_subtitle_level1"]')NEWLINE new_paragraphs = []NEWLINE for p in paragraphs:NEWLINE new_p = {}NEWLINE p = html.tostring(NEWLINE p,NEWLINE with_tail=True,NEWLINE encoding='utf-8').decode('utf-8')NEWLINE p = re.sub(r'\n+', r' ', p)NEWLINE p = re.sub(r'<br ?/?>', r' ', p)NEWLINE p = html.fromstring(p)NEWLINE p = self.clean_paragraph(p)NEWLINE new_p['language'], p = self.get_language(NEWLINE s_intervention,NEWLINE p,NEWLINE i_lang,NEWLINE new_paragraphs)NEWLINE content = p.text_content()NEWLINE content = content.strip()NEWLINE content = re.sub(r'\t', r' ', content)NEWLINE content = re.sub(r'\xad', r'-', content) # reviseNEWLINE content = re.sub(r'\xa0', r' ', content)NEWLINE content = re.sub(r' +', r' ', content)NEWLINE content = re.sub(r'\. \. \.', r'...', content)NEWLINE content = re.sub(r'\.{3,}', r'…', content)NEWLINE content = re.sub(r'…\.\.', r'…', content)NEWLINE content = re.sub(r'^([\s\.—–\-−,\)]+)', r'', content)NEWLINE content = re.sub(r'([^\.])(…)', r'\1 \2', content)NEWLINE content = re.sub(r'\.…', r' …', content)NEWLINE content = re.sub(r'\( ?… ?\)', r'(…)', content)NEWLINE content = re.sub(r'(…)(\.)(\w)', r'\1\2 \3', content)NEWLINE content = re.sub(r'([\w”])(…)', r'\1 \2', content)NEWLINE content = re.sub(r'(…)(\w)', r'\1 \2', content)NEWLINE content = re.sub(r'\( +\)', r'', content)NEWLINE content = re.sub(r'\( +?', r'(', content)NEWLINE content = re.sub(r' +\)', r')', content)NEWLINE content = re.sub(r'(\[lt\]|<) ?BRK ?(\[gt\]|>)?', r'', content)NEWLINE content = re.sub(r'>(.+?)=', r'"\1"', content)NEWLINE content = re.sub(r's= ', r"s' ", content)NEWLINE content = re.sub(r'<Titre>', r'Titre', content)NEWLINE content = re.sub(r'<0', r'', content)NEWLINE content = re.sub(r'>', r'', content)NEWLINE content = re.sub(r'<', r'', content)NEWLINE content = re.sub(r'^,? *Neil,? +\. +– +', r'', content)NEWLINE content = re.sub(r'^\(PPE-DE\), +\. +– +', r'', content)NEWLINE content = re.sub(r'^\(Verts/ALE\), +\. +– +', r'', content)NEWLINE content = re.sub(r'\A\([\p{Lu}\&/\-–]+\)', r'', content)NEWLINE content = re.sub(r' +', r' ', content)NEWLINE content = re.sub(r'\A([\s\.—–\-−,\)]+)', r'', content)NEWLINE content = re.sub(r'^\((Madam President)', r'\1', content)NEWLINE content = re.sub(r'^\((Mr President)', r'\1', content)NEWLINE for pattern in self.loc['more_roles']:NEWLINE content, s_intervention = self.regextract(NEWLINE content,NEWLINE pattern,NEWLINE s_intervention,NEWLINE 'role')NEWLINE content = re.sub(r'\*{3,}', r'', content)NEWLINE new_p['content'] = contentNEWLINE new_paragraphs.append(new_p)NEWLINE s_intervention['contents'] = new_paragraphsNEWLINE return s_interventionNEWLINENEWLINE def add_root_attributes(self, root, tree, infile):NEWLINE root.attrib['id'] = os.path.splitext(os.path.basename(infile))[0]NEWLINE root.attrib['lang'] = self.language.lower()NEWLINE date_string = re.match(NEWLINE r'^(.+?,? \d.+?) - (.+)$',NEWLINE tree.xpath('//td[@class="doc_title" and @align="left" and @valign="top"]')[0].text)NEWLINE date = dateparser.parse(date_string.group(1)).date()NEWLINE place = date_string.group(2)NEWLINE root.attrib['date'] = str(date)NEWLINE root.attrib['place'] = placeNEWLINE root.attrib['edition'] = tree.xpath('//td[@class="doc_title" and @align="right" and @valign="top"]')[0].textNEWLINE passNEWLINENEWLINE def intervention_to_xml(self, x_section, s_intervention):NEWLINE x_intervention = etree.SubElement(x_section, 'intervention')NEWLINE if 'id' in s_intervention.keys():NEWLINE x_intervention.attrib['id'] = s_intervention['id']NEWLINE if 'speaker_id' in s_intervention.keys():NEWLINE x_intervention.attrib['speaker_id'] = s_intervention['speaker_id']NEWLINE if 'name' in s_intervention.keys():NEWLINE x_intervention.attrib['name'] = s_intervention['name']NEWLINE if 'is_mep' in s_intervention.keys():NEWLINE x_intervention.attrib['is_mep'] = str(s_intervention['is_mep'])NEWLINE if 'mode' in s_intervention.keys():NEWLINE x_intervention.attrib['mode'] = s_intervention['mode']NEWLINE if 'role' in s_intervention.keys():NEWLINE x_intervention.attrib['role'] = s_intervention['role']NEWLINE for paragraph in s_intervention['contents']:NEWLINE if len(paragraph['content']) > 0:NEWLINE if not re.match(r'^\(.+?\)$', paragraph['content']):NEWLINE x_p = etree.SubElement(NEWLINE x_intervention,NEWLINE 'p',NEWLINE sl=paragraph['language'].lower())NEWLINE x_p.text = paragraph['content']NEWLINE else:NEWLINE etree.SubElement(NEWLINE x_intervention,NEWLINE 'a',NEWLINE text=paragraph['content'])NEWLINE passNEWLINENEWLINE def serialize(self, infile, root):NEWLINE ofile_name = os.path.splitext(os.path.basename(infile))[0]NEWLINE ofile_path = os.path.join(self.outdir, ofile_name+'.xml')NEWLINE xml = etree.tostring(NEWLINE root,NEWLINE encoding='utf-8',NEWLINE xml_declaration=True,NEWLINE pretty_print=True).decode('utf-8')NEWLINE with open(ofile_path, mode='w', encoding='utf-8') as ofile:NEWLINE ofile.write(xml)NEWLINE passNEWLINENEWLINE def get_element_id(self, element):NEWLINE output = element.getprevious().attrib['name']NEWLINE return outputNEWLINENEWLINE def main(self):NEWLINE for infile in self.infiles:NEWLINE print(infile)NEWLINE tree = self.read_html(infile)NEWLINE root = etree.Element('text')NEWLINE self.add_root_attributes(root, tree, infile)NEWLINE sections = tree.xpath(NEWLINE '//table[@class="doc_box_header" and @cellpadding="0"]')NEWLINE for section in sections:NEWLINE heading = self.get_heading(section)NEWLINE section_id = self.get_element_id(section)NEWLINE x_section = etree.SubElement(root, 'section')NEWLINE x_section.attrib['id'] = section_idNEWLINE x_section.attrib['title'] = headingNEWLINE interventions = section.xpath(NEWLINE './/table[@cellpadding="5"][.//img[@alt="MPphoto"]]')NEWLINE for idx, intervention in enumerate(interventions):NEWLINE s_intervention = {}NEWLINE intervention_id = self.get_element_id(intervention)NEWLINE s_intervention['id'] = intervention_idNEWLINE i_lang = NoneNEWLINE s_intervention['speaker_id'] = self.get_speaker_id(intervention)NEWLINE s_intervention['is_mep'] = self.get_is_mep(NEWLINE s_intervention['speaker_id'])NEWLINE s_intervention['mode'] = self.get_mode(intervention)NEWLINE speaker_name = self.get_speaker_name(intervention)NEWLINE president_pattern = '|'.join(self.loc['president'])NEWLINE if re.match(r'{}\Z'.format(president_pattern), speaker_name):NEWLINE s_intervention['role'] = speaker_nameNEWLINE else:NEWLINE s_intervention['name'] = speaker_nameNEWLINE role, i_lang = self.get_role(intervention)NEWLINE if role is not None:NEWLINE s_intervention['role'] = roleNEWLINE s_intervention = self.get_paragraphs(NEWLINE intervention,NEWLINE s_intervention,NEWLINE i_lang)NEWLINE self.intervention_to_xml(x_section, s_intervention)NEWLINE self.serialize(infile, root)NEWLINE self.n_proceedings += 1NEWLINE passNEWLINENEWLINE def cli(self):NEWLINE """CLI parses command-line arguments"""NEWLINE parser = argparse.ArgumentParser()NEWLINE parser.add_argument(NEWLINE "-i", "--input",NEWLINE required=True,NEWLINE help="path to the input directory.")NEWLINE parser.add_argument(NEWLINE "-o", "--output",NEWLINE required=True,NEWLINE help="path to the output directory.")NEWLINE parser.add_argument(NEWLINE "-l", "--language",NEWLINE required=True,NEWLINE choices=['en', 'es', 'de'],NEWLINE help="language of the version to be processed.")NEWLINE parser.add_argument(NEWLINE '-p', "--pattern",NEWLINE required=False,NEWLINE default="*.html",NEWLINE help="glob pattern to filter files.")NEWLINE args = parser.parse_args()NEWLINE self.indir = args.inputNEWLINE self.outdir = args.outputNEWLINE if not os.path.exists(self.outdir):NEWLINE os.makedirs(self.outdir)NEWLINE self.language = args.languageNEWLINE self.pattern = args.patternNEWLINE passNEWLINENEWLINENEWLINEprint(TransformHtmlProceedingsToXml())NEWLINE |
import asyncioNEWLINEimport loggingNEWLINEimport pickleNEWLINENEWLINEimport timeNEWLINEfrom functools import partialNEWLINEfrom typing import Callable, Any, TypeVarNEWLINENEWLINEfrom aio_pika.exchange import ExchangeTypeNEWLINEfrom aio_pika.channel import ChannelNEWLINEfrom aio_pika.exceptions import UnroutableErrorNEWLINEfrom aio_pika.message import (NEWLINE Message, IncomingMessage, DeliveryMode, ReturnedMessageNEWLINE)NEWLINEfrom .base import Proxy, BaseNEWLINENEWLINElog = logging.getLogger(__name__)NEWLINENEWLINER = TypeVar('R')NEWLINEP = TypeVar('P')NEWLINECallbackType = Callable[[P], R]NEWLINENEWLINENEWLINEclass RPC(Base):NEWLINE __slots__ = ("channel", "loop", "proxy", "result_queue",NEWLINE "result_consumer_tag", "routes", "consumer_tags",NEWLINE "dlx_exchange",)NEWLINENEWLINE DLX_NAME = 'rpc.dlx'NEWLINE DELIVERY_MODE = DeliveryMode.NOT_PERSISTENTNEWLINENEWLINE __doc__ = """NEWLINE Remote Procedure Call helper.NEWLINENEWLINE Create an instance ::NEWLINENEWLINE rpc = await RPC.create(channel)NEWLINENEWLINE Registering python function ::NEWLINENEWLINE # RPC instance passes only keyword argumentsNEWLINE def multiply(*, x, y):NEWLINE return x * yNEWLINENEWLINE await rpc.register("multiply", multiply)NEWLINENEWLINE Call function through proxy ::NEWLINENEWLINE assert await rpc.proxy.multiply(x=2, y=3) == 6NEWLINENEWLINE Call function explicit ::NEWLINENEWLINE assert await rpc.call('multiply', dict(x=2, y=3)) == 6NEWLINENEWLINE """NEWLINENEWLINE def __init__(self, channel: Channel):NEWLINE self.channel = channelNEWLINE self.loop = self.channel.loopNEWLINE self.proxy = Proxy(self.call)NEWLINE self.result_queue = NoneNEWLINE self.futures = dict()NEWLINE self.result_consumer_tag = NoneNEWLINE self.routes = {}NEWLINE self.queues = {}NEWLINE self.consumer_tags = {}NEWLINE self.dlx_exchange = NoneNEWLINENEWLINE def create_future(self) -> asyncio.Future:NEWLINE future = self.loop.create_future()NEWLINE future_id = id(future)NEWLINE self.futures[future_id] = futureNEWLINE future.add_done_callback(lambda f: self.futures.pop(future_id, None))NEWLINE return futureNEWLINENEWLINE def close(self) -> asyncio.Task:NEWLINE async def closer():NEWLINE nonlocal selfNEWLINENEWLINE if self.result_queue is None:NEWLINE returnNEWLINENEWLINE for future in self.futures.values():NEWLINE future.set_exception(asyncio.CancelledError)NEWLINENEWLINE await self.result_queue.unbind(NEWLINE self.dlx_exchange, "",NEWLINE arguments={NEWLINE "From": self.result_queue.name,NEWLINE 'x-match': 'any',NEWLINE }NEWLINE )NEWLINENEWLINE await self.result_queue.cancel(self.result_consumer_tag)NEWLINE self.result_consumer_tag = NoneNEWLINENEWLINE await self.result_queue.delete()NEWLINE self.result_queue = NoneNEWLINENEWLINE return self.loop.create_task(closer())NEWLINENEWLINE async def initialize(self, **kwargs):NEWLINE if self.result_queue is not None:NEWLINE returnNEWLINENEWLINE self.result_queue = await self.channel.declare_queue(None, **kwargs)NEWLINENEWLINE self.dlx_exchange = await self.channel.declare_exchange(NEWLINE self.DLX_NAME,NEWLINE type=ExchangeType.HEADERS,NEWLINE auto_delete=True,NEWLINE )NEWLINENEWLINE await self.result_queue.bind(NEWLINE self.dlx_exchange, "",NEWLINE arguments={NEWLINE "From": self.result_queue.name,NEWLINE 'x-match': 'any',NEWLINE }NEWLINE )NEWLINENEWLINE self.result_consumer_tag = await self.result_queue.consume(NEWLINE self.on_result_message, exclusive=True, no_ack=TrueNEWLINE )NEWLINENEWLINE self.channel.add_on_return_callback(self.on_message_returned)NEWLINENEWLINE @classmethodNEWLINE async def create(cls, channel: Channel, **kwargs) -> "RPC":NEWLINE """ Creates a new instance of :class:`aio_pika.patterns.RPC`.NEWLINE You should use this method instead of :func:`__init__`,NEWLINE because :func:`create` returns coroutine and makes async initializeNEWLINENEWLINE :param channel: initialized instance of :class:`aio_pika.Channel`NEWLINE :returns: :class:`RPC`NEWLINENEWLINE """NEWLINE rpc = cls(channel)NEWLINE await rpc.initialize(**kwargs)NEWLINE return rpcNEWLINENEWLINE def on_message_returned(self, message: ReturnedMessage):NEWLINE correlation_id = int(NEWLINE message.correlation_idNEWLINE ) if message.correlation_id else NoneNEWLINENEWLINE future = self.futures.pop(correlation_id, None) # type: asyncio.FutureNEWLINENEWLINE if not future or future.done():NEWLINE log.warning("Unknown message was returned: %r", message)NEWLINE returnNEWLINENEWLINE future.set_exception(UnroutableError([message]))NEWLINENEWLINE async def on_result_message(self, message: IncomingMessage):NEWLINE correlation_id = int(NEWLINE message.correlation_idNEWLINE ) if message.correlation_id else NoneNEWLINENEWLINE future = self.futures.pop(correlation_id, None) # type: asyncio.FutureNEWLINENEWLINE if future is None:NEWLINE log.warning("Unknown message: %r", message)NEWLINE returnNEWLINENEWLINE try:NEWLINE payload = self.deserialize(message.body)NEWLINE except Exception as e:NEWLINE log.error("Failed to deserialize response on message: %r", message)NEWLINE future.set_exception(e)NEWLINE returnNEWLINENEWLINE if message.type == 'result':NEWLINE future.set_result(payload)NEWLINE elif message.type == 'error':NEWLINE future.set_exception(payload)NEWLINE elif message.type == 'call':NEWLINE future.set_exception(NEWLINE asyncio.TimeoutError("Message timed-out", message)NEWLINE )NEWLINE else:NEWLINE future.set_exception(NEWLINE RuntimeError("Unknown message type %r" % message.type)NEWLINE )NEWLINENEWLINE async def on_call_message(self, method_name: str, message: IncomingMessage):NEWLINE if method_name not in self.routes:NEWLINE log.warning("Method %r not registered in %r", method_name, self)NEWLINE returnNEWLINENEWLINE try:NEWLINE payload = self.deserialize(message.body)NEWLINE func = self.routes[method_name]NEWLINENEWLINE result = await self.execute(func, payload)NEWLINE result = self.serialize(result)NEWLINE message_type = 'result'NEWLINE except Exception as e:NEWLINE result = self.serialize_exception(e)NEWLINE message_type = 'error'NEWLINENEWLINE result_message = Message(NEWLINE result,NEWLINE delivery_mode=message.delivery_mode,NEWLINE correlation_id=message.correlation_id,NEWLINE timestamp=time.time(),NEWLINE type=message_type,NEWLINE )NEWLINENEWLINE await self.channel.default_exchange.publish(NEWLINE result_message,NEWLINE message.reply_to,NEWLINE mandatory=FalseNEWLINE )NEWLINENEWLINE message.ack()NEWLINENEWLINE def serialize(self, data: Any) -> bytes:NEWLINE """ Serialize data to the bytes.NEWLINE Uses `pickle` by default.NEWLINE You should overlap this method when you want to change serializerNEWLINENEWLINE :param data: Data which will be serializedNEWLINE :returns: bytesNEWLINE """NEWLINE return super().serialize(data)NEWLINENEWLINE def deserialize(self, data: Any) -> bytes:NEWLINE """ Deserialize data from bytes.NEWLINE Uses `pickle` by default.NEWLINE You should overlap this method when you want to change serializerNEWLINENEWLINE :param data: Data which will be deserializedNEWLINE :returns: :class:`Any`NEWLINE """NEWLINE return super().deserialize(data)NEWLINENEWLINE def serialize_exception(self, exception: Exception) -> bytes:NEWLINE """ Serialize python exception to bytesNEWLINENEWLINE :param exception: :class:`Exception`NEWLINE :return: bytesNEWLINE """NEWLINE return pickle.dumps(exception)NEWLINENEWLINE async def execute(self, func: CallbackType, payload: P) -> R:NEWLINE """ Executes rpc call. Might be overlapped. """NEWLINE return await func(**payload)NEWLINENEWLINE async def call(self, method_name, kwargs: dict=None, *,NEWLINE expiration: int=None, priority: int=128,NEWLINE delivery_mode: DeliveryMode=DELIVERY_MODE):NEWLINENEWLINE """ Call remote method and awaiting result.NEWLINENEWLINE :param method_name: Name of methodNEWLINE :param kwargs: Methos kwargsNEWLINE :param expiration:NEWLINE If not `None` messages which staying in queue longerNEWLINE will be returned and :class:`asyncio.TimeoutError` will be raised.NEWLINE :param priority: Message priorityNEWLINE :param delivery_mode: Call message delivery modeNEWLINE :raises asyncio.TimeoutError: when message expiredNEWLINE :raises CancelledError: when called :func:`RPC.cancel`NEWLINE :raises RuntimeError: internal errorNEWLINE """NEWLINENEWLINE future = self.create_future()NEWLINENEWLINE message = Message(NEWLINE body=self.serialize(kwargs or {}),NEWLINE type='call',NEWLINE timestamp=time.time(),NEWLINE priority=priority,NEWLINE correlation_id=id(future),NEWLINE delivery_mode=delivery_mode,NEWLINE reply_to=self.result_queue.name,NEWLINE headers={NEWLINE 'From': self.result_queue.nameNEWLINE }NEWLINE )NEWLINENEWLINE if expiration is not None:NEWLINE message.expiration = expirationNEWLINENEWLINE await self.channel.default_exchange.publish(NEWLINE message, routing_key=method_name, mandatory=TrueNEWLINE )NEWLINENEWLINE return await futureNEWLINENEWLINE async def register(self, method_name, func: CallbackType, **kwargs):NEWLINE """ Method creates a queue with name which equal ofNEWLINE `method_name` argument. Then subscribes this queue.NEWLINENEWLINE :param method_name: Method nameNEWLINE :param func:NEWLINE target function. Function **MUST** accept only keyword arguments.NEWLINE :param kwargs: arguments which will be passed to `queue_declare`NEWLINE :raises RuntimeError:NEWLINE Function already registered in this :class:`RPC` instanceNEWLINE or method_name already used.NEWLINE """NEWLINE arguments = kwargs.pop('arguments', {})NEWLINE arguments.update({NEWLINE 'x-dead-letter-exchange': self.DLX_NAME,NEWLINE })NEWLINENEWLINE kwargs['arguments'] = argumentsNEWLINENEWLINE queue = await self.channel.declare_queue(method_name, **kwargs)NEWLINENEWLINE if func in self.consumer_tags:NEWLINE raise RuntimeError('Function already registered')NEWLINENEWLINE if method_name in self.routes:NEWLINE raise RuntimeError(NEWLINE 'Method name already used for %r' % self.routes[method_name]NEWLINE )NEWLINENEWLINE self.consumer_tags[func] = await queue.consume(NEWLINE partial(self.on_call_message, method_name)NEWLINE )NEWLINENEWLINE self.routes[method_name] = asyncio.coroutine(func)NEWLINE self.queues[func] = queueNEWLINENEWLINE async def unregister(self, func):NEWLINE """ Cancels subscription to the method-queue.NEWLINENEWLINE :param func: FunctionNEWLINE """NEWLINE if func not in self.consumer_tags:NEWLINE returnNEWLINENEWLINE consumer_tag = self.consumer_tags.pop(func)NEWLINE queue = self.queues.pop(func)NEWLINENEWLINE await queue.cancel(consumer_tag)NEWLINENEWLINE self.routes.pop(queue.name)NEWLINE |
# fine-tune on different datasetNEWLINE# Copyright (c) 2017-present, Facebook, Inc.NEWLINE# All rights reserved.NEWLINE#NEWLINE# This source code is licensed under the license found in theNEWLINE# LICENSE file in the root directory of this source tree.NEWLINE#NEWLINE#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINEimport argparseNEWLINEimport osNEWLINEimport mathNEWLINEimport timeNEWLINEimport globNEWLINEfrom collections import defaultdictNEWLINENEWLINEimport numpy as npNEWLINEimport torchNEWLINEimport torch.nn as nnNEWLINEimport torch.optimNEWLINEimport torch.utils.dataNEWLINEimport torchvisionNEWLINEimport torchvision.transforms as transformsNEWLINEimport torch.backends.cudnn as cudnnNEWLINEfrom sklearn import metricsNEWLINEfrom models.resnet import resnet50, resnet18NEWLINEfrom PIL import ImageNEWLINEfrom PIL import ImageFileNEWLINEImageFile.LOAD_TRUNCATED_IMAGES = TrueNEWLINENEWLINEclass AverageMeter(object):NEWLINE """Computes and stores the average and current value"""NEWLINE def __init__(self):NEWLINE self.reset()NEWLINENEWLINE def reset(self):NEWLINE self.val = 0NEWLINE self.avg = 0NEWLINE self.sum = 0NEWLINE self.count = 0NEWLINENEWLINE def update(self, val, n=1):NEWLINE self.val = valNEWLINE self.sum += val * nNEWLINE self.count += nNEWLINE self.avg = self.sum / self.countNEWLINENEWLINENEWLINEdef pil_loader(path):NEWLINE # open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)NEWLINE with open(path, 'rb') as f:NEWLINE img = Image.open(f)NEWLINE return img.convert('RGB')NEWLINENEWLINENEWLINEdef accimage_loader(path):NEWLINE try:NEWLINE return accimage.Image(path)NEWLINE except IOError:NEWLINE # Potentially a decoding problem, fall back to PIL.ImageNEWLINE return pil_loader(path)NEWLINENEWLINENEWLINEparser = argparse.ArgumentParser()NEWLINEparser.add_argument('--vocdir', type=str, required=False,NEWLINE default=os.path.expanduser('/data/VOC/VOCdevkit/VOC2007/'), help='pascal voc 2007 dataset')NEWLINEparser.add_argument('--split', type=str, required=False, default='trainval',NEWLINE choices=['train', 'trainval'], help='training split')NEWLINENEWLINEparser.add_argument('--pretrain_path', default='', type=str)NEWLINEparser.add_argument('--model', default='resnet50', type=str)NEWLINEparser.add_argument('--exp', default='', type=str)NEWLINENEWLINEparser.add_argument('--dropout', default=0, type=int)NEWLINEparser.add_argument('--nit', type=int, default=80000,NEWLINE help='Number of training iterations')NEWLINEparser.add_argument('--batch_size', type=int, default=16,NEWLINE help='Number of Batch Size')NEWLINEparser.add_argument('--fc6_8', type=int, default=0,NEWLINE help='If true, train only the final classifier')NEWLINEparser.add_argument('--train_batchnorm', type=int, default=0,NEWLINE help='If true, train batch-norm layer parameters')NEWLINEparser.add_argument('--eval_random_crops', type=int, default=1,NEWLINE help='If true, eval on 10 random crops, otherwise eval on 10 fixed crops')NEWLINEparser.add_argument('--stepsize', type=int, default=5000, help='Decay step') NEWLINEparser.add_argument('--lr', type=float, required=False,NEWLINE default=0.0003, help='learning rate')NEWLINEparser.add_argument('--wd', type=float, required=False,NEWLINE default=1e-6, help='weight decay')NEWLINEparser.add_argument('--seed', type=int, default=31, help='random seed')NEWLINENEWLINEos.system('ulimit -n 10000')NEWLINENEWLINEdef main():NEWLINE args = parser.parse_args()NEWLINE print(args)NEWLINENEWLINE # fix random seedsNEWLINE torch.manual_seed(args.seed)NEWLINE torch.cuda.manual_seed_all(args.seed)NEWLINE np.random.seed(args.seed)NEWLINENEWLINE # create model and move it to gpuNEWLINE if args.model == 'resnet50':NEWLINE model = resnet50()NEWLINE model = nn.DataParallel(model)NEWLINE classifier = nn.Linear(2048, 20).cuda()NEWLINE elif args.model == 'resnet18':NEWLINE model = resnet18()NEWLINE model = nn.DataParallel(model)NEWLINE classifier = nn.Linear(512, 20).cuda()NEWLINENEWLINE ckpt = torch.load(args.pretrain_path)NEWLINE model.load_state_dict(ckpt['state_dict'])NEWLINENEWLINE model.eval()NEWLINENEWLINE # model.cuda()NEWLINE cudnn.benchmark = TrueNEWLINENEWLINE # what partition of the data to useNEWLINE if args.split == 'train':NEWLINE args.test = 'val'NEWLINE elif args.split == 'trainval':NEWLINE args.test = 'test'NEWLINE # data loaderNEWLINE normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],NEWLINE std=[0.229, 0.224, 0.225])NEWLINE dataset = VOC2007_dataset(args.vocdir, split=args.split, transform=transforms.Compose([NEWLINE transforms.RandomHorizontalFlip(),NEWLINE transforms.RandomResizedCrop(224, scale=(0.2,1.0)),NEWLINE transforms.ToTensor(),NEWLINE normalize,NEWLINE ]))NEWLINENEWLINE loader = torch.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=False, num_workers=24, pin_memory=False)NEWLINENEWLINE print('PASCAL VOC 2007 ' + args.split + ' dataset loaded')NEWLINENEWLINE if args.fc6_8:NEWLINE # freeze some layersNEWLINE for param in model.parameters():NEWLINE param.requires_grad = FalseNEWLINE # unfreeze batchnorm scalingNEWLINE if args.train_batchnorm:NEWLINE for layer in model.modules():NEWLINE if isinstance(layer, torch.nn.BatchNorm2d):NEWLINE for param in layer.parameters():NEWLINE param.requires_grad = TrueNEWLINENEWLINE device = torch.device('cuda:0')NEWLINE model.to(device)NEWLINE # set optimizerNEWLINE optimizer = torch.optim.SGD(NEWLINE filter(lambda x: x.requires_grad, model.parameters()),NEWLINE lr=args.lr,NEWLINE momentum=0.9,NEWLINE weight_decay=args.wd,NEWLINE )NEWLINE cls_optimizer = torch.optim.SGD(NEWLINE filter(lambda x: x.requires_grad, classifier.parameters()),NEWLINE lr=args.lr,NEWLINE momentum=0.9,NEWLINE weight_decay=args.wd,NEWLINE )NEWLINE criterion = nn.BCEWithLogitsLoss(reduction='none')NEWLINENEWLINE if args.eval_random_crops:NEWLINE transform_eval = [NEWLINE transforms.RandomHorizontalFlip(),NEWLINE transforms.RandomResizedCrop(224, scale=(0.2,1.0)),NEWLINE transforms.ToTensor(),NEWLINE normalize,NEWLINE ]NEWLINE else:NEWLINE transform_eval = [NEWLINE transforms.Resize(256),NEWLINE transforms.TenCrop(224),NEWLINE transforms.Lambda(lambda crops: torch.stack(NEWLINE [normalize(transforms.ToTensor()(crop)) for crop in crops]))NEWLINE ]NEWLINENEWLINE print('Test set')NEWLINE test_dataset = VOC2007_dataset(NEWLINE args.vocdir, split=args.test, transform=transforms.Compose(transform_eval))NEWLINE test_loader = torch.utils.data.DataLoader(NEWLINE test_dataset,NEWLINE batch_size=1,NEWLINE shuffle=False,NEWLINE num_workers=1,NEWLINE pin_memory=False,NEWLINE )NEWLINENEWLINE print('Start training')NEWLINE it = 0NEWLINE losses = AverageMeter()NEWLINE count = 0NEWLINE while it < args.nit:NEWLINE it = train(NEWLINE loader,NEWLINE model,NEWLINE classifier,NEWLINE optimizer,NEWLINE cls_optimizer,NEWLINE criterion,NEWLINE args.fc6_8,NEWLINE losses,NEWLINE it=it,NEWLINE total_iterations=args.nit,NEWLINE stepsize=args.stepsize,NEWLINE device=deviceNEWLINE )NEWLINE count += 1NEWLINE if count % 30 == 0:NEWLINE evaluate(test_loader, model, classifier, args.eval_random_crops, device)NEWLINENEWLINE print('Evaluation')NEWLINE map = evaluate(test_loader, model, classifier, args.eval_random_crops, device)NEWLINENEWLINENEWLINEdef evaluate(loader, model, classifier, eval_random_crops, device):NEWLINE model.eval()NEWLINE classifier.eval()NEWLINE gts = []NEWLINE scr = []NEWLINE for crop in range(9 * eval_random_crops + 1):NEWLINE for i, (input, target) in enumerate(loader):NEWLINE # move input to gpu and optionally reshape itNEWLINE if len(input.size()) == 5:NEWLINE bs, ncrops, c, h, w = input.size()NEWLINE input = input.view(-1, c, h, w)NEWLINE #input = input.cuda(non_blocking=True)NEWLINE input = input.to(device)NEWLINENEWLINE # forward pass without grad computationNEWLINE with torch.no_grad():NEWLINE output = model(input, 6)NEWLINE output.to(device)NEWLINE output = classifier(output)NEWLINE if crop < 1:NEWLINE scr.append(torch.sum(output, 0, keepdim=True).cpu().numpy())NEWLINE gts.append(target)NEWLINE else:NEWLINE scr[i] += output.cpu().numpy()NEWLINE gts = np.concatenate(gts, axis=0).TNEWLINE scr = np.concatenate(scr, axis=0).TNEWLINE aps = []NEWLINE for i in range(20):NEWLINE # Subtract eps from score to make AP work for tied scoresNEWLINE ap = metrics.average_precision_score(NEWLINE gts[i][gts[i] <= 1], scr[i][gts[i] <= 1]-1e-5*gts[i][gts[i] <= 1])NEWLINE aps.append(ap)NEWLINE print(np.mean(aps), ' ', ' '.join(['%0.2f' % a for a in aps]))NEWLINE return np.mean(aps)NEWLINENEWLINENEWLINEdef train(loader, model, classifier, optimizer, cls_optimizer, criterion, fc6_8, losses, it=0, total_iterations=None, stepsize=None, verbose=True, device=None):NEWLINE # to logNEWLINE batch_time = AverageMeter()NEWLINE data_time = AverageMeter()NEWLINE top1 = AverageMeter()NEWLINE end = time.time()NEWLINENEWLINE current_iteration = itNEWLINENEWLINE # use dropout for the MLPNEWLINE model.eval()NEWLINE # in the batch norms always use global statisticsNEWLINENEWLINE for (input, target) in loader:NEWLINE # measure data loading timeNEWLINE data_time.update(time.time() - end)NEWLINENEWLINE # adjust learning rateNEWLINE if current_iteration != 0 and current_iteration % stepsize == 0:NEWLINE for param_group in optimizer.param_groups:NEWLINE param_group['lr'] = param_group['lr'] * 0.5NEWLINE print('iter {0} learning rate is {1}'.format(NEWLINE current_iteration, param_group['lr']))NEWLINENEWLINE # move input to gpuNEWLINE input = input.to(device)NEWLINE target = target.float().to(device)NEWLINE #input = input.cuda(non_blocking=True)NEWLINENEWLINE # forward pass with or without grad computationNEWLINE output = model(input, 6)NEWLINE output.to(device)NEWLINE output = classifier(output)NEWLINENEWLINE #target = target.float().cuda()NEWLINE mask = (target == 255)NEWLINE loss = torch.sum(criterion(output, target).masked_fill_(NEWLINE mask, 0)) / target.size(0)NEWLINENEWLINE # backwardNEWLINE optimizer.zero_grad()NEWLINE cls_optimizer.zero_grad()NEWLINE loss.backward()NEWLINE optimizer.step()NEWLINE cls_optimizer.step()NEWLINE # clip gradientsNEWLINE # torch.nn.utils.clip_grad_norm_(model.parameters(), 10)NEWLINE # and weights updateNEWLINENEWLINE # measure accuracy and record lossNEWLINE losses.update(loss.item(), input.size(0))NEWLINENEWLINE # measure elapsed timeNEWLINE batch_time.update(time.time() - end)NEWLINE end = time.time()NEWLINE if verbose is True and current_iteration % 25 == 0:NEWLINE print('Iteration[{0}]\t'NEWLINE 'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'NEWLINE 'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'NEWLINE 'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(NEWLINE current_iteration, batch_time=batch_time,NEWLINE data_time=data_time, loss=losses))NEWLINE current_iteration = current_iteration + 1NEWLINE if total_iterations is not None and current_iteration == total_iterations:NEWLINE breakNEWLINE return current_iterationNEWLINENEWLINENEWLINEclass VOC2007_dataset(torch.utils.data.Dataset):NEWLINE def __init__(self, voc_dir, split='train', transform=None):NEWLINE # Find the image setsNEWLINE image_set_dir = os.path.join(voc_dir, 'ImageSets', 'Main')NEWLINE image_sets = glob.glob(os.path.join(NEWLINE image_set_dir, '*_' + split + '.txt'))NEWLINE assert len(image_sets) == 20NEWLINE # Read the labelsNEWLINE self.n_labels = len(image_sets)NEWLINE images = defaultdict(lambda: -np.ones(self.n_labels, dtype=np.uint8))NEWLINE for k, s in enumerate(sorted(image_sets)):NEWLINE for l in open(s, 'r'):NEWLINE name, lbl = l.strip().split()NEWLINE lbl = int(lbl)NEWLINE # Switch the ignore label and 0 label (in VOC -1: not present, 0: ignore)NEWLINE if lbl < 0:NEWLINE lbl = 0NEWLINE elif lbl == 0:NEWLINE lbl = 255NEWLINE images[os.path.join(voc_dir, 'JPEGImages',NEWLINE name + '.jpg')][k] = lblNEWLINE self.images = [(k, images[k]) for k in images.keys()]NEWLINE np.random.shuffle(self.images)NEWLINE self.transform = transformNEWLINENEWLINE def __len__(self):NEWLINE return len(self.images)NEWLINENEWLINE def __getitem__(self, i):NEWLINE #img = Image.open(self.images[i][0])NEWLINE #img = img.convert('RGB')NEWLINE #img = accimage.Image(self.images[i][0])NEWLINE img = pil_loader(self.images[i][0])NEWLINE if self.transform is not None:NEWLINE img = self.transform(img)NEWLINE return img, self.images[i][1]NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINENEWLINE |