file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
s0290_word_pattern.go
/* https://leetcode.com/problems/word-pattern/ Given a pattern and a string s, find if s follows the same pattern. Here follow means a full match, such that there is a bijection between a letter in pattern and a non-empty word in s. */ package solutions import ( "strings" ) func
(pattern, s string) bool { words := strings.Split(s, " ") wordsLen := len(words) if len(pattern) != wordsLen { return false } seenPattern := make(map[string]int, wordsLen) seenWords := make(map[string]int, wordsLen) for i := 0; i < wordsLen; i++ { ch := string(pattern[i]) w := words[i] if _, ok := seenPattern[ch]; !ok { seenPattern[ch] = i } if _, ok := seenWords[w]; !ok { seenWords[w] = i } if seenPattern[ch] != seenWords[w] { return false } } return true }
wordPattern
percy-command.ts
import { Command } from '@oclif/command' import * as winston from 'winston' import { Configuration } from '../configuration/configuration' import { AgentService } from '../services/agent-service' import ProcessService from '../services/process-service' import logger from '../utils/logger' export default class PercyCommand extends Command { static hidden = true agentService: AgentService processService: ProcessService logger: winston.Logger percyToken: string // helps prevent exiting before the agent service has stopped private exiting = false constructor(argv: string[], config: any) { super(argv, config) this.agentService = new AgentService() this.processService = new ProcessService() this.logger = logger this.percyToken = process.env.PERCY_TOKEN || '' } async run() { if (this.percyEnabled() && !this.percyTokenPresent()) { this.warn('Skipping visual tests. PERCY_TOKEN was not provided.') } } percyEnabled(): boolean { return process.env.PERCY_ENABLE !== '0' } percyWillRun(): boolean { return (this.percyEnabled() && this.percyTokenPresent()) } percyTokenPresent(): boolean { return this.percyToken.trim() !== '' } logStart() { this.logger.info('percy has started.') } async start(configuration: Configuration) { if (this.percyWillRun()) { await this.agentService.start(configuration) this.logStart() // Receiving any of these events should stop the agent and exit process.on('SIGHUP', () => this.stop(0, true)) process.on('SIGINT', () => this.stop(0, true)) process.on('SIGTERM', () => this.stop(0, true)) } } async stop(exitCode?: number | null, stopProcess?: boolean) { if (this.exiting) { return } this.exiting = true
} if (stopProcess) { process.exit(exitCode || 0) } else { this.exit(exitCode || 0) } } }
if (this.percyWillRun()) { await this.agentService.stop()
models.go
// +build go1.9 // Copyright 2020 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This code was auto-generated by: // github.com/Azure/azure-sdk-for-go/tools/profileBuilder package datamigration import ( "context" original "github.com/Azure/azure-sdk-for-go/services/datamigration/mgmt/2018-04-19/datamigration" ) const ( DefaultBaseURI = original.DefaultBaseURI ) type AuthenticationType = original.AuthenticationType const ( ActiveDirectoryIntegrated AuthenticationType = original.ActiveDirectoryIntegrated ActiveDirectoryPassword AuthenticationType = original.ActiveDirectoryPassword None AuthenticationType = original.None SQLAuthentication AuthenticationType = original.SQLAuthentication WindowsAuthentication AuthenticationType = original.WindowsAuthentication ) type BackupFileStatus = original.BackupFileStatus const ( Arrived BackupFileStatus = original.Arrived Cancelled BackupFileStatus = original.Cancelled Queued BackupFileStatus = original.Queued Restored BackupFileStatus = original.Restored Restoring BackupFileStatus = original.Restoring Uploaded BackupFileStatus = original.Uploaded Uploading BackupFileStatus = original.Uploading ) type BackupMode = original.BackupMode const ( CreateBackup BackupMode = original.CreateBackup ExistingBackup BackupMode = original.ExistingBackup ) type BackupType = original.BackupType const ( BackupTypeDatabase BackupType = original.BackupTypeDatabase BackupTypeDifferentialDatabase BackupType = original.BackupTypeDifferentialDatabase BackupTypeDifferentialFile BackupType = original.BackupTypeDifferentialFile BackupTypeDifferentialPartial BackupType = original.BackupTypeDifferentialPartial BackupTypeFile BackupType = original.BackupTypeFile BackupTypePartial BackupType = original.BackupTypePartial BackupTypeTransactionLog BackupType = original.BackupTypeTransactionLog ) type CommandState = original.CommandState const ( Accepted CommandState = original.Accepted Failed CommandState = original.Failed Running CommandState = original.Running Succeeded CommandState = original.Succeeded Unknown CommandState = original.Unknown ) type CommandType = original.CommandType const ( CommandTypeCommandProperties CommandType = original.CommandTypeCommandProperties CommandTypeMigrateSQLServerAzureDbSQLMiComplete CommandType = original.CommandTypeMigrateSQLServerAzureDbSQLMiComplete CommandTypeMigrateSyncCompleteDatabase CommandType = original.CommandTypeMigrateSyncCompleteDatabase ) type DatabaseCompatLevel = original.DatabaseCompatLevel const ( CompatLevel100 DatabaseCompatLevel = original.CompatLevel100 CompatLevel110 DatabaseCompatLevel = original.CompatLevel110 CompatLevel120 DatabaseCompatLevel = original.CompatLevel120 CompatLevel130 DatabaseCompatLevel = original.CompatLevel130 CompatLevel140 DatabaseCompatLevel = original.CompatLevel140 CompatLevel80 DatabaseCompatLevel = original.CompatLevel80 CompatLevel90 DatabaseCompatLevel = original.CompatLevel90 ) type DatabaseFileType = original.DatabaseFileType const ( Filestream DatabaseFileType = original.Filestream Fulltext DatabaseFileType = original.Fulltext Log DatabaseFileType = original.Log NotSupported DatabaseFileType = original.NotSupported Rows DatabaseFileType = original.Rows ) type DatabaseMigrationStage = original.DatabaseMigrationStage const ( DatabaseMigrationStageBackup DatabaseMigrationStage = original.DatabaseMigrationStageBackup DatabaseMigrationStageCompleted DatabaseMigrationStage = original.DatabaseMigrationStageCompleted DatabaseMigrationStageFileCopy DatabaseMigrationStage = original.DatabaseMigrationStageFileCopy DatabaseMigrationStageInitialize DatabaseMigrationStage = original.DatabaseMigrationStageInitialize DatabaseMigrationStageNone DatabaseMigrationStage = original.DatabaseMigrationStageNone DatabaseMigrationStageRestore DatabaseMigrationStage = original.DatabaseMigrationStageRestore ) type DatabaseMigrationState = original.DatabaseMigrationState const ( CANCELLED DatabaseMigrationState = original.CANCELLED COMPLETED DatabaseMigrationState = original.COMPLETED CUTOVERSTART DatabaseMigrationState = original.CUTOVERSTART FAILED DatabaseMigrationState = original.FAILED FULLBACKUPUPLOADSTART DatabaseMigrationState = original.FULLBACKUPUPLOADSTART INITIAL DatabaseMigrationState = original.INITIAL LOGSHIPPINGSTART DatabaseMigrationState = original.LOGSHIPPINGSTART POSTCUTOVERCOMPLETE DatabaseMigrationState = original.POSTCUTOVERCOMPLETE UNDEFINED DatabaseMigrationState = original.UNDEFINED UPLOADLOGFILESSTART DatabaseMigrationState = original.UPLOADLOGFILESSTART ) type DatabaseState = original.DatabaseState const ( DatabaseStateCopying DatabaseState = original.DatabaseStateCopying DatabaseStateEmergency DatabaseState = original.DatabaseStateEmergency DatabaseStateOffline DatabaseState = original.DatabaseStateOffline DatabaseStateOfflineSecondary DatabaseState = original.DatabaseStateOfflineSecondary DatabaseStateOnline DatabaseState = original.DatabaseStateOnline DatabaseStateRecovering DatabaseState = original.DatabaseStateRecovering DatabaseStateRecoveryPending DatabaseState = original.DatabaseStateRecoveryPending DatabaseStateRestoring DatabaseState = original.DatabaseStateRestoring DatabaseStateSuspect DatabaseState = original.DatabaseStateSuspect ) type ErrorType = original.ErrorType const ( ErrorTypeDefault ErrorType = original.ErrorTypeDefault ErrorTypeError ErrorType = original.ErrorTypeError ErrorTypeWarning ErrorType = original.ErrorTypeWarning ) type LoginMigrationStage = original.LoginMigrationStage const ( LoginMigrationStageAssignRoleMembership LoginMigrationStage = original.LoginMigrationStageAssignRoleMembership LoginMigrationStageAssignRoleOwnership LoginMigrationStage = original.LoginMigrationStageAssignRoleOwnership LoginMigrationStageCompleted LoginMigrationStage = original.LoginMigrationStageCompleted LoginMigrationStageEstablishObjectPermissions LoginMigrationStage = original.LoginMigrationStageEstablishObjectPermissions LoginMigrationStageEstablishServerPermissions LoginMigrationStage = original.LoginMigrationStageEstablishServerPermissions LoginMigrationStageEstablishUserMapping LoginMigrationStage = original.LoginMigrationStageEstablishUserMapping LoginMigrationStageInitialize LoginMigrationStage = original.LoginMigrationStageInitialize LoginMigrationStageLoginMigration LoginMigrationStage = original.LoginMigrationStageLoginMigration LoginMigrationStageNone LoginMigrationStage = original.LoginMigrationStageNone ) type LoginType = original.LoginType const ( AsymmetricKey LoginType = original.AsymmetricKey Certificate LoginType = original.Certificate ExternalGroup LoginType = original.ExternalGroup ExternalUser LoginType = original.ExternalUser SQLLogin LoginType = original.SQLLogin WindowsGroup LoginType = original.WindowsGroup WindowsUser LoginType = original.WindowsUser ) type MigrationState = original.MigrationState const ( MigrationStateCompleted MigrationState = original.MigrationStateCompleted MigrationStateFailed MigrationState = original.MigrationStateFailed MigrationStateInProgress MigrationState = original.MigrationStateInProgress MigrationStateNone MigrationState = original.MigrationStateNone MigrationStateSkipped MigrationState = original.MigrationStateSkipped MigrationStateStopped MigrationState = original.MigrationStateStopped MigrationStateWarning MigrationState = original.MigrationStateWarning ) type MigrationStatus = original.MigrationStatus const ( MigrationStatusCompleted MigrationStatus = original.MigrationStatusCompleted MigrationStatusCompletedWithWarnings MigrationStatus = original.MigrationStatusCompletedWithWarnings MigrationStatusConfigured MigrationStatus = original.MigrationStatusConfigured MigrationStatusConnecting MigrationStatus = original.MigrationStatusConnecting MigrationStatusDefault MigrationStatus = original.MigrationStatusDefault MigrationStatusError MigrationStatus = original.MigrationStatusError MigrationStatusRunning MigrationStatus = original.MigrationStatusRunning MigrationStatusSelectLogins MigrationStatus = original.MigrationStatusSelectLogins MigrationStatusSourceAndTargetSelected MigrationStatus = original.MigrationStatusSourceAndTargetSelected MigrationStatusStopped MigrationStatus = original.MigrationStatusStopped ) type MySQLTargetPlatformType = original.MySQLTargetPlatformType const ( AzureDbForMySQL MySQLTargetPlatformType = original.AzureDbForMySQL SQLServer MySQLTargetPlatformType = original.SQLServer ) type NameCheckFailureReason = original.NameCheckFailureReason const ( AlreadyExists NameCheckFailureReason = original.AlreadyExists Invalid NameCheckFailureReason = original.Invalid ) type ObjectType = original.ObjectType const ( Function ObjectType = original.Function StoredProcedures ObjectType = original.StoredProcedures Table ObjectType = original.Table User ObjectType = original.User View ObjectType = original.View ) type ProjectProvisioningState = original.ProjectProvisioningState const ( ProjectProvisioningStateDeleting ProjectProvisioningState = original.ProjectProvisioningStateDeleting ProjectProvisioningStateSucceeded ProjectProvisioningState = original.ProjectProvisioningStateSucceeded ) type ProjectSourcePlatform = original.ProjectSourcePlatform const ( ProjectSourcePlatformSQL ProjectSourcePlatform = original.ProjectSourcePlatformSQL ProjectSourcePlatformUnknown ProjectSourcePlatform = original.ProjectSourcePlatformUnknown ) type ProjectTargetPlatform = original.ProjectTargetPlatform const ( ProjectTargetPlatformSQLDB ProjectTargetPlatform = original.ProjectTargetPlatformSQLDB ProjectTargetPlatformUnknown ProjectTargetPlatform = original.ProjectTargetPlatformUnknown ) type ResourceSkuCapacityScaleType = original.ResourceSkuCapacityScaleType const ( ResourceSkuCapacityScaleTypeAutomatic ResourceSkuCapacityScaleType = original.ResourceSkuCapacityScaleTypeAutomatic ResourceSkuCapacityScaleTypeManual ResourceSkuCapacityScaleType = original.ResourceSkuCapacityScaleTypeManual ResourceSkuCapacityScaleTypeNone ResourceSkuCapacityScaleType = original.ResourceSkuCapacityScaleTypeNone ) type ResourceSkuRestrictionsReasonCode = original.ResourceSkuRestrictionsReasonCode const ( NotAvailableForSubscription ResourceSkuRestrictionsReasonCode = original.NotAvailableForSubscription QuotaID ResourceSkuRestrictionsReasonCode = original.QuotaID ) type ResourceSkuRestrictionsType = original.ResourceSkuRestrictionsType const ( Location ResourceSkuRestrictionsType = original.Location ) type ResultCode = original.ResultCode const ( Completed ResultCode = original.Completed FatalError ResultCode = original.FatalError Initial ResultCode = original.Initial ObjectNotExistsInSource ResultCode = original.ObjectNotExistsInSource ObjectNotExistsInTarget ResultCode = original.ObjectNotExistsInTarget TargetObjectIsInaccessible ResultCode = original.TargetObjectIsInaccessible ) type ResultType = original.ResultType const ( ResultTypeDatabaseLevelErrorOutput ResultType = original.ResultTypeDatabaseLevelErrorOutput ResultTypeDatabaseLevelOutput ResultType = original.ResultTypeDatabaseLevelOutput ResultTypeErrorOutput ResultType = original.ResultTypeErrorOutput ResultTypeMigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput ResultType = original.ResultTypeMigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput ResultTypeMigrationLevelOutput ResultType = original.ResultTypeMigrationLevelOutput ResultTypeTableLevelOutput ResultType = original.ResultTypeTableLevelOutput ) type ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutput const ( ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeAgentJobLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeAgentJobLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeConnectToSourceSQLServerTaskOutput ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeConnectToSourceSQLServerTaskOutput ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeLoginLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeLoginLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeTaskLevelOutput ResultTypeBasicConnectToSourceSQLServerTaskOutput = original.ResultTypeBasicConnectToSourceSQLServerTaskOutputResultTypeTaskLevelOutput ) type ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput const ( ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeDatabaseLevelErrorOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeDatabaseLevelErrorOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeMigrateMySQLAzureDbForMySQLSyncTaskOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeMigrateMySQLAzureDbForMySQLSyncTaskOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeTableLevelOutput ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputResultTypeTableLevelOutput ) type ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput const ( ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeDatabaseLevelErrorOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeDatabaseLevelErrorOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeMigrateSQLServerSQLDbSyncTaskOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeMigrateSQLServerSQLDbSyncTaskOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeTableLevelOutput ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputResultTypeTableLevelOutput ) type ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutput const ( ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrateSQLServerSQLDbTaskOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrateSQLServerSQLDbTaskOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationDatabaseLevelValidationOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationDatabaseLevelValidationOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationValidationOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeMigrationValidationOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeTableLevelOutput ResultTypeBasicMigrateSQLServerSQLDbTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLDbTaskOutputResultTypeTableLevelOutput ) type ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput const ( ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeMigrateSQLServerSQLMISyncTaskOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeMigrateSQLServerSQLMISyncTaskOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputResultTypeMigrationLevelOutput ) type ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutput const ( ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeAgentJobLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeAgentJobLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeDatabaseLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeErrorOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeLoginLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeLoginLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeMigrateSQLServerSQLMITaskOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeMigrateSQLServerSQLMITaskOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeMigrationLevelOutput ResultTypeBasicMigrateSQLServerSQLMITaskOutput = original.ResultTypeBasicMigrateSQLServerSQLMITaskOutputResultTypeMigrationLevelOutput ) type SQLSourcePlatform = original.SQLSourcePlatform const ( SQLOnPrem SQLSourcePlatform = original.SQLOnPrem ) type SchemaMigrationStage = original.SchemaMigrationStage const ( SchemaMigrationStageCollectingObjects SchemaMigrationStage = original.SchemaMigrationStageCollectingObjects SchemaMigrationStageCompleted SchemaMigrationStage = original.SchemaMigrationStageCompleted SchemaMigrationStageCompletedWithWarnings SchemaMigrationStage = original.SchemaMigrationStageCompletedWithWarnings SchemaMigrationStageDeployingSchema SchemaMigrationStage = original.SchemaMigrationStageDeployingSchema SchemaMigrationStageDownloadingScript SchemaMigrationStage = original.SchemaMigrationStageDownloadingScript SchemaMigrationStageFailed SchemaMigrationStage = original.SchemaMigrationStageFailed SchemaMigrationStageGeneratingScript SchemaMigrationStage = original.SchemaMigrationStageGeneratingScript SchemaMigrationStageNotStarted SchemaMigrationStage = original.SchemaMigrationStageNotStarted SchemaMigrationStageUploadingScript SchemaMigrationStage = original.SchemaMigrationStageUploadingScript SchemaMigrationStageValidatingInputs SchemaMigrationStage = original.SchemaMigrationStageValidatingInputs ) type ServerLevelPermissionsGroup = original.ServerLevelPermissionsGroup const ( Default ServerLevelPermissionsGroup = original.Default MigrationFromMySQLToAzureDBForMySQL ServerLevelPermissionsGroup = original.MigrationFromMySQLToAzureDBForMySQL MigrationFromSQLServerToAzureDB ServerLevelPermissionsGroup = original.MigrationFromSQLServerToAzureDB MigrationFromSQLServerToAzureMI ServerLevelPermissionsGroup = original.MigrationFromSQLServerToAzureMI ) type ServiceProvisioningState = original.ServiceProvisioningState const ( ServiceProvisioningStateAccepted ServiceProvisioningState = original.ServiceProvisioningStateAccepted ServiceProvisioningStateDeleting ServiceProvisioningState = original.ServiceProvisioningStateDeleting ServiceProvisioningStateDeploying ServiceProvisioningState = original.ServiceProvisioningStateDeploying ServiceProvisioningStateFailed ServiceProvisioningState = original.ServiceProvisioningStateFailed ServiceProvisioningStateFailedToStart ServiceProvisioningState = original.ServiceProvisioningStateFailedToStart ServiceProvisioningStateFailedToStop ServiceProvisioningState = original.ServiceProvisioningStateFailedToStop ServiceProvisioningStateStarting ServiceProvisioningState = original.ServiceProvisioningStateStarting ServiceProvisioningStateStopped ServiceProvisioningState = original.ServiceProvisioningStateStopped ServiceProvisioningStateStopping ServiceProvisioningState = original.ServiceProvisioningStateStopping ServiceProvisioningStateSucceeded ServiceProvisioningState = original.ServiceProvisioningStateSucceeded ) type ServiceScalability = original.ServiceScalability const ( ServiceScalabilityAutomatic ServiceScalability = original.ServiceScalabilityAutomatic ServiceScalabilityManual ServiceScalability = original.ServiceScalabilityManual ServiceScalabilityNone ServiceScalability = original.ServiceScalabilityNone ) type Severity = original.Severity const ( SeverityError Severity = original.SeverityError SeverityMessage Severity = original.SeverityMessage SeverityWarning Severity = original.SeverityWarning ) type SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingState const ( SyncDatabaseMigrationReportingStateCANCELLED SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateCANCELLED SyncDatabaseMigrationReportingStateCANCELLING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateCANCELLING SyncDatabaseMigrationReportingStateCOMPLETE SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateCOMPLETE SyncDatabaseMigrationReportingStateCOMPLETING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateCOMPLETING SyncDatabaseMigrationReportingStateCONFIGURING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateCONFIGURING SyncDatabaseMigrationReportingStateFAILED SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateFAILED SyncDatabaseMigrationReportingStateINITIALIAZING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateINITIALIAZING SyncDatabaseMigrationReportingStateREADYTOCOMPLETE SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateREADYTOCOMPLETE SyncDatabaseMigrationReportingStateRUNNING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateRUNNING SyncDatabaseMigrationReportingStateSTARTING SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateSTARTING SyncDatabaseMigrationReportingStateUNDEFINED SyncDatabaseMigrationReportingState = original.SyncDatabaseMigrationReportingStateUNDEFINED ) type SyncTableMigrationState = original.SyncTableMigrationState const ( SyncTableMigrationStateBEFORELOAD SyncTableMigrationState = original.SyncTableMigrationStateBEFORELOAD SyncTableMigrationStateCANCELED SyncTableMigrationState = original.SyncTableMigrationStateCANCELED SyncTableMigrationStateCOMPLETED SyncTableMigrationState = original.SyncTableMigrationStateCOMPLETED SyncTableMigrationStateERROR SyncTableMigrationState = original.SyncTableMigrationStateERROR SyncTableMigrationStateFAILED SyncTableMigrationState = original.SyncTableMigrationStateFAILED SyncTableMigrationStateFULLLOAD SyncTableMigrationState = original.SyncTableMigrationStateFULLLOAD ) type TaskState = original.TaskState const ( TaskStateCanceled TaskState = original.TaskStateCanceled TaskStateFailed TaskState = original.TaskStateFailed TaskStateFailedInputValidation TaskState = original.TaskStateFailedInputValidation TaskStateFaulted TaskState = original.TaskStateFaulted TaskStateQueued TaskState = original.TaskStateQueued TaskStateRunning TaskState = original.TaskStateRunning TaskStateSucceeded TaskState = original.TaskStateSucceeded TaskStateUnknown TaskState = original.TaskStateUnknown ) type TaskType = original.TaskType const ( TaskTypeConnectToSourceMySQL TaskType = original.TaskTypeConnectToSourceMySQL TaskTypeConnectToSourcePostgreSQLSync TaskType = original.TaskTypeConnectToSourcePostgreSQLSync TaskTypeConnectToSourceSQLServer TaskType = original.TaskTypeConnectToSourceSQLServer TaskTypeConnectToSourceSQLServerSync TaskType = original.TaskTypeConnectToSourceSQLServerSync TaskTypeConnectToTargetAzureDbForMySQL TaskType = original.TaskTypeConnectToTargetAzureDbForMySQL TaskTypeConnectToTargetAzureDbForPostgreSQLSync TaskType = original.TaskTypeConnectToTargetAzureDbForPostgreSQLSync TaskTypeConnectToTargetAzureSQLDbMI TaskType = original.TaskTypeConnectToTargetAzureSQLDbMI TaskTypeConnectToTargetAzureSQLDbMISyncLRS TaskType = original.TaskTypeConnectToTargetAzureSQLDbMISyncLRS TaskTypeConnectToTargetSQLDb TaskType = original.TaskTypeConnectToTargetSQLDb TaskTypeConnectToTargetSQLDbSync TaskType = original.TaskTypeConnectToTargetSQLDbSync TaskTypeGetTDECertificatesSQL TaskType = original.TaskTypeGetTDECertificatesSQL TaskTypeGetUserTablesAzureSQLDbSync TaskType = original.TaskTypeGetUserTablesAzureSQLDbSync TaskTypeGetUserTablesSQL TaskType = original.TaskTypeGetUserTablesSQL TaskTypeMigrateMySQLAzureDbForMySQLSync TaskType = original.TaskTypeMigrateMySQLAzureDbForMySQLSync TaskTypeMigratePostgreSQLAzureDbForPostgreSQLSync TaskType = original.TaskTypeMigratePostgreSQLAzureDbForPostgreSQLSync TaskTypeMigrateSQLServerAzureSQLDbMI TaskType = original.TaskTypeMigrateSQLServerAzureSQLDbMI TaskTypeMigrateSQLServerAzureSQLDbMISyncLRS TaskType = original.TaskTypeMigrateSQLServerAzureSQLDbMISyncLRS TaskTypeMigrateSQLServerAzureSQLDbSync TaskType = original.TaskTypeMigrateSQLServerAzureSQLDbSync TaskTypeMigrateSQLServerSQLDb TaskType = original.TaskTypeMigrateSQLServerSQLDb TaskTypeProjectTaskProperties TaskType = original.TaskTypeProjectTaskProperties TaskTypeValidateMigrationInputSQLServerAzureSQLDbMI TaskType = original.TaskTypeValidateMigrationInputSQLServerAzureSQLDbMI TaskTypeValidateMigrationInputSQLServerAzureSQLDbMISyncLRS TaskType = original.TaskTypeValidateMigrationInputSQLServerAzureSQLDbMISyncLRS TaskTypeValidateMigrationInputSQLServerSQLDbSync TaskType = original.TaskTypeValidateMigrationInputSQLServerSQLDbSync ) type Type = original.Type const ( TypeConnectionInfo Type = original.TypeConnectionInfo TypeMiSQLConnectionInfo Type = original.TypeMiSQLConnectionInfo TypeMySQLConnectionInfo Type = original.TypeMySQLConnectionInfo TypePostgreSQLConnectionInfo Type = original.TypePostgreSQLConnectionInfo TypeSQLConnectionInfo Type = original.TypeSQLConnectionInfo ) type UpdateActionType = original.UpdateActionType const ( AddedOnTarget UpdateActionType = original.AddedOnTarget ChangedOnTarget UpdateActionType = original.ChangedOnTarget DeletedOnTarget UpdateActionType = original.DeletedOnTarget ) type ValidationStatus = original.ValidationStatus const ( ValidationStatusCompleted ValidationStatus = original.ValidationStatusCompleted ValidationStatusCompletedWithIssues ValidationStatus = original.ValidationStatusCompletedWithIssues ValidationStatusDefault ValidationStatus = original.ValidationStatusDefault ValidationStatusFailed ValidationStatus = original.ValidationStatusFailed ValidationStatusInitialized ValidationStatus = original.ValidationStatusInitialized ValidationStatusInProgress ValidationStatus = original.ValidationStatusInProgress ValidationStatusNotStarted ValidationStatus = original.ValidationStatusNotStarted ValidationStatusStopped ValidationStatus = original.ValidationStatusStopped ) type APIError = original.APIError type AvailableServiceSku = original.AvailableServiceSku type AvailableServiceSkuCapacity = original.AvailableServiceSkuCapacity type AvailableServiceSkuSku = original.AvailableServiceSkuSku type AzureActiveDirectoryApp = original.AzureActiveDirectoryApp type BackupFileInfo = original.BackupFileInfo type BackupSetInfo = original.BackupSetInfo type BaseClient = original.BaseClient type BasicCommandProperties = original.BasicCommandProperties type BasicConnectToSourceSQLServerTaskOutput = original.BasicConnectToSourceSQLServerTaskOutput type BasicConnectionInfo = original.BasicConnectionInfo type BasicMigrateMySQLAzureDbForMySQLSyncTaskOutput = original.BasicMigrateMySQLAzureDbForMySQLSyncTaskOutput type BasicMigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput = original.BasicMigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput type BasicMigrateSQLServerSQLDbSyncTaskOutput = original.BasicMigrateSQLServerSQLDbSyncTaskOutput type BasicMigrateSQLServerSQLDbTaskOutput = original.BasicMigrateSQLServerSQLDbTaskOutput type BasicMigrateSQLServerSQLMISyncTaskOutput = original.BasicMigrateSQLServerSQLMISyncTaskOutput type BasicMigrateSQLServerSQLMITaskOutput = original.BasicMigrateSQLServerSQLMITaskOutput type BasicProjectTaskProperties = original.BasicProjectTaskProperties type BlobShare = original.BlobShare type CommandProperties = original.CommandProperties type ConnectToSourceMySQLTaskInput = original.ConnectToSourceMySQLTaskInput type ConnectToSourceMySQLTaskProperties = original.ConnectToSourceMySQLTaskProperties type ConnectToSourceNonSQLTaskOutput = original.ConnectToSourceNonSQLTaskOutput type ConnectToSourcePostgreSQLSyncTaskInput = original.ConnectToSourcePostgreSQLSyncTaskInput type ConnectToSourcePostgreSQLSyncTaskOutput = original.ConnectToSourcePostgreSQLSyncTaskOutput type ConnectToSourcePostgreSQLSyncTaskProperties = original.ConnectToSourcePostgreSQLSyncTaskProperties type ConnectToSourceSQLServerSyncTaskProperties = original.ConnectToSourceSQLServerSyncTaskProperties type ConnectToSourceSQLServerTaskInput = original.ConnectToSourceSQLServerTaskInput type ConnectToSourceSQLServerTaskOutput = original.ConnectToSourceSQLServerTaskOutput type ConnectToSourceSQLServerTaskOutputAgentJobLevel = original.ConnectToSourceSQLServerTaskOutputAgentJobLevel type ConnectToSourceSQLServerTaskOutputDatabaseLevel = original.ConnectToSourceSQLServerTaskOutputDatabaseLevel type ConnectToSourceSQLServerTaskOutputLoginLevel = original.ConnectToSourceSQLServerTaskOutputLoginLevel type ConnectToSourceSQLServerTaskOutputTaskLevel = original.ConnectToSourceSQLServerTaskOutputTaskLevel type ConnectToSourceSQLServerTaskProperties = original.ConnectToSourceSQLServerTaskProperties type ConnectToTargetAzureDbForMySQLTaskInput = original.ConnectToTargetAzureDbForMySQLTaskInput type ConnectToTargetAzureDbForMySQLTaskOutput = original.ConnectToTargetAzureDbForMySQLTaskOutput type ConnectToTargetAzureDbForMySQLTaskProperties = original.ConnectToTargetAzureDbForMySQLTaskProperties type ConnectToTargetAzureDbForPostgreSQLSyncTaskInput = original.ConnectToTargetAzureDbForPostgreSQLSyncTaskInput type ConnectToTargetAzureDbForPostgreSQLSyncTaskOutput = original.ConnectToTargetAzureDbForPostgreSQLSyncTaskOutput type ConnectToTargetAzureDbForPostgreSQLSyncTaskProperties = original.ConnectToTargetAzureDbForPostgreSQLSyncTaskProperties type ConnectToTargetSQLDbTaskInput = original.ConnectToTargetSQLDbTaskInput type ConnectToTargetSQLDbTaskOutput = original.ConnectToTargetSQLDbTaskOutput type ConnectToTargetSQLDbTaskProperties = original.ConnectToTargetSQLDbTaskProperties type ConnectToTargetSQLMISyncTaskInput = original.ConnectToTargetSQLMISyncTaskInput type ConnectToTargetSQLMISyncTaskOutput = original.ConnectToTargetSQLMISyncTaskOutput type ConnectToTargetSQLMISyncTaskProperties = original.ConnectToTargetSQLMISyncTaskProperties type ConnectToTargetSQLMITaskInput = original.ConnectToTargetSQLMITaskInput type ConnectToTargetSQLMITaskOutput = original.ConnectToTargetSQLMITaskOutput type ConnectToTargetSQLMITaskProperties = original.ConnectToTargetSQLMITaskProperties type ConnectToTargetSQLSQLDbSyncTaskInput = original.ConnectToTargetSQLSQLDbSyncTaskInput type ConnectToTargetSQLSQLDbSyncTaskProperties = original.ConnectToTargetSQLSQLDbSyncTaskProperties type ConnectionInfo = original.ConnectionInfo type DataIntegrityValidationResult = original.DataIntegrityValidationResult type DataItemMigrationSummaryResult = original.DataItemMigrationSummaryResult type Database = original.Database type DatabaseBackupInfo = original.DatabaseBackupInfo type DatabaseFileInfo = original.DatabaseFileInfo type DatabaseFileInput = original.DatabaseFileInput type DatabaseInfo = original.DatabaseInfo type DatabaseObjectName = original.DatabaseObjectName type DatabaseSummaryResult = original.DatabaseSummaryResult type DatabaseTable = original.DatabaseTable type Error = original.Error type ExecutionStatistics = original.ExecutionStatistics type FileShare = original.FileShare type GetProjectDetailsNonSQLTaskInput = original.GetProjectDetailsNonSQLTaskInput type GetTdeCertificatesSQLTaskInput = original.GetTdeCertificatesSQLTaskInput type GetTdeCertificatesSQLTaskOutput = original.GetTdeCertificatesSQLTaskOutput type GetTdeCertificatesSQLTaskProperties = original.GetTdeCertificatesSQLTaskProperties type GetUserTablesSQLSyncTaskInput = original.GetUserTablesSQLSyncTaskInput type GetUserTablesSQLSyncTaskOutput = original.GetUserTablesSQLSyncTaskOutput type GetUserTablesSQLSyncTaskProperties = original.GetUserTablesSQLSyncTaskProperties type GetUserTablesSQLTaskInput = original.GetUserTablesSQLTaskInput type GetUserTablesSQLTaskOutput = original.GetUserTablesSQLTaskOutput type GetUserTablesSQLTaskProperties = original.GetUserTablesSQLTaskProperties type MiSQLConnectionInfo = original.MiSQLConnectionInfo type MigrateMISyncCompleteCommandInput = original.MigrateMISyncCompleteCommandInput type MigrateMISyncCompleteCommandOutput = original.MigrateMISyncCompleteCommandOutput type MigrateMISyncCompleteCommandProperties = original.MigrateMISyncCompleteCommandProperties type MigrateMySQLAzureDbForMySQLSyncDatabaseInput = original.MigrateMySQLAzureDbForMySQLSyncDatabaseInput type MigrateMySQLAzureDbForMySQLSyncTaskInput = original.MigrateMySQLAzureDbForMySQLSyncTaskInput type MigrateMySQLAzureDbForMySQLSyncTaskOutput = original.MigrateMySQLAzureDbForMySQLSyncTaskOutput type MigrateMySQLAzureDbForMySQLSyncTaskOutputDatabaseError = original.MigrateMySQLAzureDbForMySQLSyncTaskOutputDatabaseError type MigrateMySQLAzureDbForMySQLSyncTaskOutputDatabaseLevel = original.MigrateMySQLAzureDbForMySQLSyncTaskOutputDatabaseLevel type MigrateMySQLAzureDbForMySQLSyncTaskOutputError = original.MigrateMySQLAzureDbForMySQLSyncTaskOutputError type MigrateMySQLAzureDbForMySQLSyncTaskOutputMigrationLevel = original.MigrateMySQLAzureDbForMySQLSyncTaskOutputMigrationLevel type MigrateMySQLAzureDbForMySQLSyncTaskOutputTableLevel = original.MigrateMySQLAzureDbForMySQLSyncTaskOutputTableLevel type MigrateMySQLAzureDbForMySQLSyncTaskProperties = original.MigrateMySQLAzureDbForMySQLSyncTaskProperties type MigratePostgreSQLAzureDbForPostgreSQLSyncDatabaseInput = original.MigratePostgreSQLAzureDbForPostgreSQLSyncDatabaseInput type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskInput = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskInput type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutput type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputDatabaseError = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputDatabaseError type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputDatabaseLevel = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputDatabaseLevel type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputError = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputError type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputMigrationLevel = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputMigrationLevel type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputTableLevel = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskOutputTableLevel type MigratePostgreSQLAzureDbForPostgreSQLSyncTaskProperties = original.MigratePostgreSQLAzureDbForPostgreSQLSyncTaskProperties type MigrateSQLServerSQLDbDatabaseInput = original.MigrateSQLServerSQLDbDatabaseInput type MigrateSQLServerSQLDbSyncDatabaseInput = original.MigrateSQLServerSQLDbSyncDatabaseInput type MigrateSQLServerSQLDbSyncTaskInput = original.MigrateSQLServerSQLDbSyncTaskInput type MigrateSQLServerSQLDbSyncTaskOutput = original.MigrateSQLServerSQLDbSyncTaskOutput type MigrateSQLServerSQLDbSyncTaskOutputDatabaseError = original.MigrateSQLServerSQLDbSyncTaskOutputDatabaseError type MigrateSQLServerSQLDbSyncTaskOutputDatabaseLevel = original.MigrateSQLServerSQLDbSyncTaskOutputDatabaseLevel type MigrateSQLServerSQLDbSyncTaskOutputError = original.MigrateSQLServerSQLDbSyncTaskOutputError type MigrateSQLServerSQLDbSyncTaskOutputMigrationLevel = original.MigrateSQLServerSQLDbSyncTaskOutputMigrationLevel type MigrateSQLServerSQLDbSyncTaskOutputTableLevel = original.MigrateSQLServerSQLDbSyncTaskOutputTableLevel type MigrateSQLServerSQLDbSyncTaskProperties = original.MigrateSQLServerSQLDbSyncTaskProperties type MigrateSQLServerSQLDbTaskInput = original.MigrateSQLServerSQLDbTaskInput type MigrateSQLServerSQLDbTaskOutput = original.MigrateSQLServerSQLDbTaskOutput type MigrateSQLServerSQLDbTaskOutputDatabaseLevel = original.MigrateSQLServerSQLDbTaskOutputDatabaseLevel type MigrateSQLServerSQLDbTaskOutputDatabaseLevelValidationResult = original.MigrateSQLServerSQLDbTaskOutputDatabaseLevelValidationResult type MigrateSQLServerSQLDbTaskOutputError = original.MigrateSQLServerSQLDbTaskOutputError type MigrateSQLServerSQLDbTaskOutputMigrationLevel = original.MigrateSQLServerSQLDbTaskOutputMigrationLevel type MigrateSQLServerSQLDbTaskOutputTableLevel = original.MigrateSQLServerSQLDbTaskOutputTableLevel type MigrateSQLServerSQLDbTaskOutputValidationResult = original.MigrateSQLServerSQLDbTaskOutputValidationResult type MigrateSQLServerSQLDbTaskProperties = original.MigrateSQLServerSQLDbTaskProperties type MigrateSQLServerSQLMIDatabaseInput = original.MigrateSQLServerSQLMIDatabaseInput type MigrateSQLServerSQLMISyncTaskInput = original.MigrateSQLServerSQLMISyncTaskInput type MigrateSQLServerSQLMISyncTaskOutput = original.MigrateSQLServerSQLMISyncTaskOutput type MigrateSQLServerSQLMISyncTaskOutputDatabaseLevel = original.MigrateSQLServerSQLMISyncTaskOutputDatabaseLevel type MigrateSQLServerSQLMISyncTaskOutputError = original.MigrateSQLServerSQLMISyncTaskOutputError type MigrateSQLServerSQLMISyncTaskOutputMigrationLevel = original.MigrateSQLServerSQLMISyncTaskOutputMigrationLevel type MigrateSQLServerSQLMISyncTaskProperties = original.MigrateSQLServerSQLMISyncTaskProperties type MigrateSQLServerSQLMITaskInput = original.MigrateSQLServerSQLMITaskInput type MigrateSQLServerSQLMITaskOutput = original.MigrateSQLServerSQLMITaskOutput type MigrateSQLServerSQLMITaskOutputAgentJobLevel = original.MigrateSQLServerSQLMITaskOutputAgentJobLevel type MigrateSQLServerSQLMITaskOutputDatabaseLevel = original.MigrateSQLServerSQLMITaskOutputDatabaseLevel type MigrateSQLServerSQLMITaskOutputError = original.MigrateSQLServerSQLMITaskOutputError type MigrateSQLServerSQLMITaskOutputLoginLevel = original.MigrateSQLServerSQLMITaskOutputLoginLevel type MigrateSQLServerSQLMITaskOutputMigrationLevel = original.MigrateSQLServerSQLMITaskOutputMigrationLevel type MigrateSQLServerSQLMITaskProperties = original.MigrateSQLServerSQLMITaskProperties type MigrateSQLServerSQLServerDatabaseInput = original.MigrateSQLServerSQLServerDatabaseInput type MigrateSyncCompleteCommandInput = original.MigrateSyncCompleteCommandInput type MigrateSyncCompleteCommandOutput = original.MigrateSyncCompleteCommandOutput type MigrateSyncCompleteCommandProperties = original.MigrateSyncCompleteCommandProperties type MigrationEligibilityInfo = original.MigrationEligibilityInfo type MigrationReportResult = original.MigrationReportResult type MigrationTableMetadata = original.MigrationTableMetadata type MigrationValidationDatabaseSummaryResult = original.MigrationValidationDatabaseSummaryResult type MigrationValidationOptions = original.MigrationValidationOptions type MySQLConnectionInfo = original.MySQLConnectionInfo type NameAvailabilityRequest = original.NameAvailabilityRequest type NameAvailabilityResponse = original.NameAvailabilityResponse type NonSQLDataMigrationTable = original.NonSQLDataMigrationTable type NonSQLDataMigrationTableResult = original.NonSQLDataMigrationTableResult type NonSQLMigrationTaskInput = original.NonSQLMigrationTaskInput type NonSQLMigrationTaskOutput = original.NonSQLMigrationTaskOutput type ODataError = original.ODataError type OperationsClient = original.OperationsClient type OrphanedUserInfo = original.OrphanedUserInfo type PostgreSQLConnectionInfo = original.PostgreSQLConnectionInfo type Project = original.Project type ProjectList = original.ProjectList type ProjectListIterator = original.ProjectListIterator type ProjectListPage = original.ProjectListPage type ProjectMetadata = original.ProjectMetadata type ProjectProperties = original.ProjectProperties type ProjectTask = original.ProjectTask type ProjectTaskProperties = original.ProjectTaskProperties type ProjectsClient = original.ProjectsClient type QueryAnalysisValidationResult = original.QueryAnalysisValidationResult type QueryExecutionResult = original.QueryExecutionResult type Quota = original.Quota type QuotaList = original.QuotaList type QuotaListIterator = original.QuotaListIterator type QuotaListPage = original.QuotaListPage type QuotaName = original.QuotaName type ReportableException = original.ReportableException type Resource = original.Resource type ResourceSku = original.ResourceSku type ResourceSkuCapabilities = original.ResourceSkuCapabilities type ResourceSkuCapacity = original.ResourceSkuCapacity type ResourceSkuCosts = original.ResourceSkuCosts type ResourceSkuRestrictions = original.ResourceSkuRestrictions type ResourceSkusClient = original.ResourceSkusClient type ResourceSkusResult = original.ResourceSkusResult type ResourceSkusResultIterator = original.ResourceSkusResultIterator type ResourceSkusResultPage = original.ResourceSkusResultPage type SQLConnectionInfo = original.SQLConnectionInfo type SQLMigrationTaskInput = original.SQLMigrationTaskInput type SQLServerSQLMISyncTaskInput = original.SQLServerSQLMISyncTaskInput type SchemaComparisonValidationResult = original.SchemaComparisonValidationResult type SchemaComparisonValidationResultType = original.SchemaComparisonValidationResultType type SelectedCertificateInput = original.SelectedCertificateInput type ServerProperties = original.ServerProperties type Service = original.Service type ServiceList = original.ServiceList type ServiceListIterator = original.ServiceListIterator type ServiceListPage = original.ServiceListPage type ServiceOperation = original.ServiceOperation type ServiceOperationDisplay = original.ServiceOperationDisplay type ServiceOperationList = original.ServiceOperationList type ServiceOperationListIterator = original.ServiceOperationListIterator type ServiceOperationListPage = original.ServiceOperationListPage type ServiceProperties = original.ServiceProperties type ServiceSku = original.ServiceSku type ServiceSkuList = original.ServiceSkuList type ServiceSkuListIterator = original.ServiceSkuListIterator type ServiceSkuListPage = original.ServiceSkuListPage type ServiceStatusResponse = original.ServiceStatusResponse type ServicesClient = original.ServicesClient type ServicesCreateOrUpdateFuture = original.ServicesCreateOrUpdateFuture type ServicesDeleteFuture = original.ServicesDeleteFuture type ServicesStartFuture = original.ServicesStartFuture type ServicesStopFuture = original.ServicesStopFuture type ServicesUpdateFuture = original.ServicesUpdateFuture type StartMigrationScenarioServerRoleResult = original.StartMigrationScenarioServerRoleResult type SyncMigrationDatabaseErrorEvent = original.SyncMigrationDatabaseErrorEvent type TaskList = original.TaskList type TaskListIterator = original.TaskListIterator type TaskListPage = original.TaskListPage type TasksClient = original.TasksClient type TrackedResource = original.TrackedResource type UsagesClient = original.UsagesClient type ValidateMigrationInputSQLServerSQLDbSyncTaskProperties = original.ValidateMigrationInputSQLServerSQLDbSyncTaskProperties type ValidateMigrationInputSQLServerSQLMISyncTaskInput = original.ValidateMigrationInputSQLServerSQLMISyncTaskInput type ValidateMigrationInputSQLServerSQLMISyncTaskOutput = original.ValidateMigrationInputSQLServerSQLMISyncTaskOutput type ValidateMigrationInputSQLServerSQLMISyncTaskProperties = original.ValidateMigrationInputSQLServerSQLMISyncTaskProperties type ValidateMigrationInputSQLServerSQLMITaskInput = original.ValidateMigrationInputSQLServerSQLMITaskInput type ValidateMigrationInputSQLServerSQLMITaskOutput = original.ValidateMigrationInputSQLServerSQLMITaskOutput type ValidateMigrationInputSQLServerSQLMITaskProperties = original.ValidateMigrationInputSQLServerSQLMITaskProperties type ValidateSyncMigrationInputSQLServerTaskInput = original.ValidateSyncMigrationInputSQLServerTaskInput type ValidateSyncMigrationInputSQLServerTaskOutput = original.ValidateSyncMigrationInputSQLServerTaskOutput type ValidationError = original.ValidationError type WaitStatistics = original.WaitStatistics func New(subscriptionID string) BaseClient { return original.New(subscriptionID) } func NewOperationsClient(subscriptionID string) OperationsClient { return original.NewOperationsClient(subscriptionID) } func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { return original.NewOperationsClientWithBaseURI(baseURI, subscriptionID) } func NewProjectListIterator(page ProjectListPage) ProjectListIterator { return original.NewProjectListIterator(page) } func NewProjectListPage(getNextPage func(context.Context, ProjectList) (ProjectList, error)) ProjectListPage { return original.NewProjectListPage(getNextPage) } func NewProjectsClient(subscriptionID string) ProjectsClient { return original.NewProjectsClient(subscriptionID) } func NewProjectsClientWithBaseURI(baseURI string, subscriptionID string) ProjectsClient { return original.NewProjectsClientWithBaseURI(baseURI, subscriptionID) } func NewQuotaListIterator(page QuotaListPage) QuotaListIterator { return original.NewQuotaListIterator(page) } func NewQuotaListPage(getNextPage func(context.Context, QuotaList) (QuotaList, error)) QuotaListPage { return original.NewQuotaListPage(getNextPage) } func NewResourceSkusClient(subscriptionID string) ResourceSkusClient { return original.NewResourceSkusClient(subscriptionID) } func NewResourceSkusClientWithBaseURI(baseURI string, subscriptionID string) ResourceSkusClient { return original.NewResourceSkusClientWithBaseURI(baseURI, subscriptionID) } func NewResourceSkusResultIterator(page ResourceSkusResultPage) ResourceSkusResultIterator { return original.NewResourceSkusResultIterator(page) } func NewResourceSkusResultPage(getNextPage func(context.Context, ResourceSkusResult) (ResourceSkusResult, error)) ResourceSkusResultPage { return original.NewResourceSkusResultPage(getNextPage) } func NewServiceListIterator(page ServiceListPage) ServiceListIterator { return original.NewServiceListIterator(page) } func NewServiceListPage(getNextPage func(context.Context, ServiceList) (ServiceList, error)) ServiceListPage { return original.NewServiceListPage(getNextPage) } func NewServiceOperationListIterator(page ServiceOperationListPage) ServiceOperationListIterator { return original.NewServiceOperationListIterator(page) } func NewServiceOperationListPage(getNextPage func(context.Context, ServiceOperationList) (ServiceOperationList, error)) ServiceOperationListPage { return original.NewServiceOperationListPage(getNextPage) } func NewServiceSkuListIterator(page ServiceSkuListPage) ServiceSkuListIterator { return original.NewServiceSkuListIterator(page) } func NewServiceSkuListPage(getNextPage func(context.Context, ServiceSkuList) (ServiceSkuList, error)) ServiceSkuListPage { return original.NewServiceSkuListPage(getNextPage) } func NewServicesClient(subscriptionID string) ServicesClient { return original.NewServicesClient(subscriptionID) } func NewServicesClientWithBaseURI(baseURI string, subscriptionID string) ServicesClient { return original.NewServicesClientWithBaseURI(baseURI, subscriptionID) } func NewTaskListIterator(page TaskListPage) TaskListIterator { return original.NewTaskListIterator(page) } func NewTaskListPage(getNextPage func(context.Context, TaskList) (TaskList, error)) TaskListPage { return original.NewTaskListPage(getNextPage) } func NewTasksClient(subscriptionID string) TasksClient { return original.NewTasksClient(subscriptionID) } func NewTasksClientWithBaseURI(baseURI string, subscriptionID string) TasksClient { return original.NewTasksClientWithBaseURI(baseURI, subscriptionID) } func NewUsagesClient(subscriptionID string) UsagesClient { return original.NewUsagesClient(subscriptionID) } func NewUsagesClientWithBaseURI(baseURI string, subscriptionID string) UsagesClient { return original.NewUsagesClientWithBaseURI(baseURI, subscriptionID) } func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { return original.NewWithBaseURI(baseURI, subscriptionID) } func PossibleAuthenticationTypeValues() []AuthenticationType { return original.PossibleAuthenticationTypeValues() } func PossibleBackupFileStatusValues() []BackupFileStatus { return original.PossibleBackupFileStatusValues() } func PossibleBackupModeValues() []BackupMode { return original.PossibleBackupModeValues() } func PossibleBackupTypeValues() []BackupType { return original.PossibleBackupTypeValues() } func PossibleCommandStateValues() []CommandState { return original.PossibleCommandStateValues() } func PossibleCommandTypeValues() []CommandType { return original.PossibleCommandTypeValues() } func PossibleDatabaseCompatLevelValues() []DatabaseCompatLevel { return original.PossibleDatabaseCompatLevelValues() } func PossibleDatabaseFileTypeValues() []DatabaseFileType { return original.PossibleDatabaseFileTypeValues() } func PossibleDatabaseMigrationStageValues() []DatabaseMigrationStage { return original.PossibleDatabaseMigrationStageValues() } func PossibleDatabaseMigrationStateValues() []DatabaseMigrationState { return original.PossibleDatabaseMigrationStateValues() } func PossibleDatabaseStateValues() []DatabaseState { return original.PossibleDatabaseStateValues() } func PossibleErrorTypeValues() []ErrorType { return original.PossibleErrorTypeValues() } func PossibleLoginMigrationStageValues() []LoginMigrationStage { return original.PossibleLoginMigrationStageValues() } func PossibleLoginTypeValues() []LoginType { return original.PossibleLoginTypeValues() } func PossibleMigrationStateValues() []MigrationState { return original.PossibleMigrationStateValues() } func PossibleMigrationStatusValues() []MigrationStatus { return original.PossibleMigrationStatusValues() } func PossibleMySQLTargetPlatformTypeValues() []MySQLTargetPlatformType { return original.PossibleMySQLTargetPlatformTypeValues() } func PossibleNameCheckFailureReasonValues() []NameCheckFailureReason { return original.PossibleNameCheckFailureReasonValues() } func PossibleObjectTypeValues() []ObjectType { return original.PossibleObjectTypeValues() } func PossibleProjectProvisioningStateValues() []ProjectProvisioningState { return original.PossibleProjectProvisioningStateValues() } func PossibleProjectSourcePlatformValues() []ProjectSourcePlatform { return original.PossibleProjectSourcePlatformValues() } func PossibleProjectTargetPlatformValues() []ProjectTargetPlatform { return original.PossibleProjectTargetPlatformValues() } func PossibleResourceSkuCapacityScaleTypeValues() []ResourceSkuCapacityScaleType { return original.PossibleResourceSkuCapacityScaleTypeValues() } func PossibleResourceSkuRestrictionsReasonCodeValues() []ResourceSkuRestrictionsReasonCode { return original.PossibleResourceSkuRestrictionsReasonCodeValues() } func PossibleResourceSkuRestrictionsTypeValues() []ResourceSkuRestrictionsType { return original.PossibleResourceSkuRestrictionsTypeValues() } func PossibleResultCodeValues() []ResultCode { return original.PossibleResultCodeValues() } func PossibleResultTypeBasicConnectToSourceSQLServerTaskOutputValues() []ResultTypeBasicConnectToSourceSQLServerTaskOutput { return original.PossibleResultTypeBasicConnectToSourceSQLServerTaskOutputValues() } func PossibleResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputValues() []ResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutput { return original.PossibleResultTypeBasicMigrateMySQLAzureDbForMySQLSyncTaskOutputValues() } func PossibleResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputValues() []ResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutput { return original.PossibleResultTypeBasicMigrateSQLServerSQLDbSyncTaskOutputValues() } func PossibleResultTypeBasicMigrateSQLServerSQLDbTaskOutputValues() []ResultTypeBasicMigrateSQLServerSQLDbTaskOutput { return original.PossibleResultTypeBasicMigrateSQLServerSQLDbTaskOutputValues() } func PossibleResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputValues() []ResultTypeBasicMigrateSQLServerSQLMISyncTaskOutput { return original.PossibleResultTypeBasicMigrateSQLServerSQLMISyncTaskOutputValues() } func PossibleResultTypeBasicMigrateSQLServerSQLMITaskOutputValues() []ResultTypeBasicMigrateSQLServerSQLMITaskOutput { return original.PossibleResultTypeBasicMigrateSQLServerSQLMITaskOutputValues() } func PossibleResultTypeValues() []ResultType { return original.PossibleResultTypeValues() } func
() []SQLSourcePlatform { return original.PossibleSQLSourcePlatformValues() } func PossibleSchemaMigrationStageValues() []SchemaMigrationStage { return original.PossibleSchemaMigrationStageValues() } func PossibleServerLevelPermissionsGroupValues() []ServerLevelPermissionsGroup { return original.PossibleServerLevelPermissionsGroupValues() } func PossibleServiceProvisioningStateValues() []ServiceProvisioningState { return original.PossibleServiceProvisioningStateValues() } func PossibleServiceScalabilityValues() []ServiceScalability { return original.PossibleServiceScalabilityValues() } func PossibleSeverityValues() []Severity { return original.PossibleSeverityValues() } func PossibleSyncDatabaseMigrationReportingStateValues() []SyncDatabaseMigrationReportingState { return original.PossibleSyncDatabaseMigrationReportingStateValues() } func PossibleSyncTableMigrationStateValues() []SyncTableMigrationState { return original.PossibleSyncTableMigrationStateValues() } func PossibleTaskStateValues() []TaskState { return original.PossibleTaskStateValues() } func PossibleTaskTypeValues() []TaskType { return original.PossibleTaskTypeValues() } func PossibleTypeValues() []Type { return original.PossibleTypeValues() } func PossibleUpdateActionTypeValues() []UpdateActionType { return original.PossibleUpdateActionTypeValues() } func PossibleValidationStatusValues() []ValidationStatus { return original.PossibleValidationStatusValues() } func UserAgent() string { return original.UserAgent() + " profiles/latest" } func Version() string { return original.Version() }
PossibleSQLSourcePlatformValues
oauth2-gen.go
// Copyright 2021 Google LLC. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Code generated file. DO NOT EDIT. // Package oauth2 provides access to the Google OAuth2 API. // // For product documentation, see: https://developers.google.com/identity/protocols/oauth2/ // // Creating a client // // Usage example: // // import "github.com/danielchristian-tokped/google-api-go-client/oauth2/v2" // ... // ctx := context.Background() // oauth2Service, err := oauth2.NewService(ctx) // // In this example, Google Application Default Credentials are used for authentication. // // For information on how to create and obtain Application Default Credentials, see https://developers.google.com/identity/protocols/application-default-credentials. // // Other authentication options // // By default, all available scopes (see "Constants") are used to authenticate. To restrict scopes, use option.WithScopes: // // oauth2Service, err := oauth2.NewService(ctx, option.WithScopes(oauth2.OpenIDScope)) // // To use an API key for authentication (note: some APIs do not support API keys), use option.WithAPIKey: // // oauth2Service, err := oauth2.NewService(ctx, option.WithAPIKey("AIza...")) // // To use an OAuth token (e.g., a user token obtained via a three-legged OAuth flow), use option.WithTokenSource: // // config := &oauth2.Config{...} // // ... // token, err := config.Exchange(ctx, ...) // oauth2Service, err := oauth2.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token))) // // See https://godoc.org/github.com/danielchristian-tokped/google-api-go-client/option/ for details on options. package oauth2 // import "github.com/danielchristian-tokped/google-api-go-client/oauth2/v2" import ( "bytes" "context" "encoding/json" "errors" "fmt" "io" "net/http" "net/url" "strconv" "strings" googleapi "github.com/danielchristian-tokped/google-api-go-client/googleapi" gensupport "github.com/danielchristian-tokped/google-api-go-client/internal/gensupport" option "github.com/danielchristian-tokped/google-api-go-client/option" internaloption "github.com/danielchristian-tokped/google-api-go-client/option/internaloption" htransport "github.com/danielchristian-tokped/google-api-go-client/transport/http" ) // Always reference these packages, just in case the auto-generated code // below doesn't. var _ = bytes.NewBuffer var _ = strconv.Itoa var _ = fmt.Sprintf var _ = json.NewDecoder var _ = io.Copy var _ = url.Parse var _ = gensupport.MarshalJSON var _ = googleapi.Version var _ = errors.New var _ = strings.Replace var _ = context.Canceled var _ = internaloption.WithDefaultEndpoint const apiId = "oauth2:v2" const apiName = "oauth2" const apiVersion = "v2" const basePath = "https://www.googleapis.com/" // OAuth2 scopes used by this API. const ( // View your email address UserinfoEmailScope = "https://www.googleapis.com/auth/userinfo.email" // See your personal info, including any personal info you've made // publicly available UserinfoProfileScope = "https://www.googleapis.com/auth/userinfo.profile" // Associate you with your personal info on Google OpenIDScope = "openid" ) // NewService creates a new Service. func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) { scopesOption := option.WithScopes( "https://www.googleapis.com/auth/userinfo.email", "https://www.googleapis.com/auth/userinfo.profile", "openid", ) // NOTE: prepend, so we don't override user-specified scopes. opts = append([]option.ClientOption{scopesOption}, opts...) opts = append(opts, internaloption.WithDefaultEndpoint(basePath)) client, endpoint, err := htransport.NewClient(ctx, opts...) if err != nil { return nil, err } s, err := New(client) if err != nil { return nil, err } if endpoint != "" { s.BasePath = endpoint } return s, nil } // New creates a new Service. It uses the provided http.Client for requests. // // Deprecated: please use NewService instead. // To provide a custom HTTP client, use option.WithHTTPClient. // If you are using github.com/danielchristian-tokped/google-api-go-client/googleapis/transport.APIKey, use option.WithAPIKey with NewService instead. func New(client *http.Client) (*Service, error) { if client == nil { return nil, errors.New("client is nil") } s := &Service{client: client, BasePath: basePath} s.Userinfo = NewUserinfoService(s) return s, nil } type Service struct { client *http.Client BasePath string // API endpoint base URL UserAgent string // optional additional User-Agent fragment Userinfo *UserinfoService } func (s *Service) userAgent() string { if s.UserAgent == "" { return googleapi.UserAgent } return googleapi.UserAgent + " " + s.UserAgent } func NewUserinfoService(s *Service) *UserinfoService { rs := &UserinfoService{s: s} rs.V2 = NewUserinfoV2Service(s) return rs } type UserinfoService struct { s *Service V2 *UserinfoV2Service } func NewUserinfoV2Service(s *Service) *UserinfoV2Service
type UserinfoV2Service struct { s *Service Me *UserinfoV2MeService } func NewUserinfoV2MeService(s *Service) *UserinfoV2MeService { rs := &UserinfoV2MeService{s: s} return rs } type UserinfoV2MeService struct { s *Service } type Tokeninfo struct { // Audience: Who is the intended audience for this token. In general the // same as issued_to. Audience string `json:"audience,omitempty"` // Email: The email address of the user. Present only if the email scope // is present in the request. Email string `json:"email,omitempty"` // ExpiresIn: The expiry time of the token, as number of seconds left // until expiry. ExpiresIn int64 `json:"expires_in,omitempty"` // IssuedTo: To whom was the token issued to. In general the same as // audience. IssuedTo string `json:"issued_to,omitempty"` // Scope: The space separated list of scopes granted to this token. Scope string `json:"scope,omitempty"` // UserId: The obfuscated user id. UserId string `json:"user_id,omitempty"` // VerifiedEmail: Boolean flag which is true if the email address is // verified. Present only if the email scope is present in the request. VerifiedEmail bool `json:"verified_email,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Audience") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Audience") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Tokeninfo) MarshalJSON() ([]byte, error) { type NoMethod Tokeninfo raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } type Userinfo struct { // Email: The user's email address. Email string `json:"email,omitempty"` // FamilyName: The user's last name. FamilyName string `json:"family_name,omitempty"` // Gender: The user's gender. Gender string `json:"gender,omitempty"` // GivenName: The user's first name. GivenName string `json:"given_name,omitempty"` // Hd: The hosted domain e.g. example.com if the user is Google apps // user. Hd string `json:"hd,omitempty"` // Id: The obfuscated ID of the user. Id string `json:"id,omitempty"` // Link: URL of the profile page. Link string `json:"link,omitempty"` // Locale: The user's preferred locale. Locale string `json:"locale,omitempty"` // Name: The user's full name. Name string `json:"name,omitempty"` // Picture: URL of the user's picture image. Picture string `json:"picture,omitempty"` // VerifiedEmail: Boolean flag which is true if the email address is // verified. Always verified because we only return the user's primary // email address. // // Default: true VerifiedEmail *bool `json:"verified_email,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Email") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Email") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Userinfo) MarshalJSON() ([]byte, error) { type NoMethod Userinfo raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // method id "oauth2.tokeninfo": type TokeninfoCall struct { s *Service urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // Tokeninfo: func (s *Service) Tokeninfo() *TokeninfoCall { c := &TokeninfoCall{s: s, urlParams_: make(gensupport.URLParams)} return c } // AccessToken sets the optional parameter "access_token": func (c *TokeninfoCall) AccessToken(accessToken string) *TokeninfoCall { c.urlParams_.Set("access_token", accessToken) return c } // IdToken sets the optional parameter "id_token": func (c *TokeninfoCall) IdToken(idToken string) *TokeninfoCall { c.urlParams_.Set("id_token", idToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *TokeninfoCall) Fields(s ...googleapi.Field) *TokeninfoCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *TokeninfoCall) Context(ctx context.Context) *TokeninfoCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *TokeninfoCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *TokeninfoCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20210727") for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) c.urlParams_.Set("prettyPrint", "false") urls := googleapi.ResolveRelative(c.s.BasePath, "oauth2/v2/tokeninfo") urls += "?" + c.urlParams_.Encode() req, err := http.NewRequest("POST", urls, body) if err != nil { return nil, err } req.Header = reqHeaders return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "oauth2.tokeninfo" call. // Exactly one of *Tokeninfo or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *Tokeninfo.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *TokeninfoCall) Do(opts ...googleapi.CallOption) (*Tokeninfo, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Tokeninfo{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "httpMethod": "POST", // "id": "oauth2.tokeninfo", // "parameters": { // "access_token": { // "location": "query", // "type": "string" // }, // "id_token": { // "location": "query", // "type": "string" // } // }, // "path": "oauth2/v2/tokeninfo", // "response": { // "$ref": "Tokeninfo" // } // } } // method id "oauth2.userinfo.get": type UserinfoGetCall struct { s *Service urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // Get: func (r *UserinfoService) Get() *UserinfoGetCall { c := &UserinfoGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *UserinfoGetCall) Fields(s ...googleapi.Field) *UserinfoGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *UserinfoGetCall) IfNoneMatch(entityTag string) *UserinfoGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *UserinfoGetCall) Context(ctx context.Context) *UserinfoGetCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *UserinfoGetCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *UserinfoGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20210727") for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) c.urlParams_.Set("prettyPrint", "false") urls := googleapi.ResolveRelative(c.s.BasePath, "oauth2/v2/userinfo") urls += "?" + c.urlParams_.Encode() req, err := http.NewRequest("GET", urls, body) if err != nil { return nil, err } req.Header = reqHeaders return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "oauth2.userinfo.get" call. // Exactly one of *Userinfo or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Userinfo.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *UserinfoGetCall) Do(opts ...googleapi.CallOption) (*Userinfo, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Userinfo{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "httpMethod": "GET", // "id": "oauth2.userinfo.get", // "path": "oauth2/v2/userinfo", // "response": { // "$ref": "Userinfo" // }, // "scopes": [ // "openid", // "https://www.googleapis.com/auth/userinfo.email", // "https://www.googleapis.com/auth/userinfo.profile" // ] // } } // method id "oauth2.userinfo.v2.me.get": type UserinfoV2MeGetCall struct { s *Service urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // Get: func (r *UserinfoV2MeService) Get() *UserinfoV2MeGetCall { c := &UserinfoV2MeGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *UserinfoV2MeGetCall) Fields(s ...googleapi.Field) *UserinfoV2MeGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *UserinfoV2MeGetCall) IfNoneMatch(entityTag string) *UserinfoV2MeGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *UserinfoV2MeGetCall) Context(ctx context.Context) *UserinfoV2MeGetCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *UserinfoV2MeGetCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *UserinfoV2MeGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) reqHeaders.Set("x-goog-api-client", "gl-go/"+gensupport.GoVersion()+" gdcl/20210727") for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) c.urlParams_.Set("prettyPrint", "false") urls := googleapi.ResolveRelative(c.s.BasePath, "userinfo/v2/me") urls += "?" + c.urlParams_.Encode() req, err := http.NewRequest("GET", urls, body) if err != nil { return nil, err } req.Header = reqHeaders return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "oauth2.userinfo.v2.me.get" call. // Exactly one of *Userinfo or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Userinfo.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *UserinfoV2MeGetCall) Do(opts ...googleapi.CallOption) (*Userinfo, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Userinfo{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "httpMethod": "GET", // "id": "oauth2.userinfo.v2.me.get", // "path": "userinfo/v2/me", // "response": { // "$ref": "Userinfo" // }, // "scopes": [ // "openid", // "https://www.googleapis.com/auth/userinfo.email", // "https://www.googleapis.com/auth/userinfo.profile" // ] // } }
{ rs := &UserinfoV2Service{s: s} rs.Me = NewUserinfoV2MeService(s) return rs }
display.rs
use std::collections::HashMap; use std::io::{stdout, Error}; use termion::{event::Key, input::MouseTerminal, raw::IntoRawMode, screen::AlternateScreen}; use tokio::sync::mpsc::error::TryRecvError; use tui::backend::TermionBackend; use tui::layout::{Constraint, Direction, Layout}; use tui::style::{Color, Modifier, Style}; use tui::widgets::{Block, Borders, Gauge, List, Text}; use tui::Terminal;
use crate::RequestResult; pub struct Monitor { pub report_receiver: tokio::sync::mpsc::Receiver<anyhow::Result<RequestResult>>, pub start: std::time::Instant, pub fps: usize, } impl Monitor { pub async fn monitor(mut self) -> Result<Vec<anyhow::Result<RequestResult>>, Error> { let stdout = stdout().into_raw_mode()?; let stdout = MouseTerminal::from(stdout); let stdout = AlternateScreen::from(stdout); let backend = TermionBackend::new(stdout); let mut terminal = Terminal::new(backend)?; terminal.hide_cursor()?; let mut all: Vec<anyhow::Result<RequestResult>> = Vec::new(); let mut status_dist: HashMap<hyper::StatusCode, usize> = HashMap::new(); 'outer: loop { loop { match self.report_receiver.try_recv() { Ok(report) => { if let Ok(report) = report.as_ref() { *status_dist.entry(report.status).or_default() += 1; } all.push(report); } Err(TryRecvError::Empty) => { break; } Err(TryRecvError::Closed) => { break 'outer; } } } terminal .draw(|mut f| { let chunks = Layout::default() .direction(Direction::Vertical) .margin(2) .constraints([Constraint::Percentage(100)].as_ref()) .split(f.size()); let tasks = status_dist .iter() .map(|(status, count)| Text::raw(format!("{} - {}", status, count))); let mut task_list = List::new(tasks) .block(Block::default().borders(Borders::ALL).title("List")); f.render(&mut task_list, chunks[0]); }) .unwrap(); // maybe just keep looping until Event::Input matches Key::Char('q') } Ok(all) } }
hold_position.rs
// this file is auto-generated by hap-codegen use async_trait::async_trait; use serde::Serialize; use serde_json::json; use crate::{ characteristic::{ AsyncCharacteristicCallbacks, Characteristic, CharacteristicCallbacks, Format, HapCharacteristic, HapCharacteristicSetup, HapType, OnReadFn, OnReadFuture, OnUpdateFn, OnUpdateFuture, Perm, Unit, }, pointer, Error, Result, }; /// Hold Position Characteristic. #[derive(Debug, Default, Serialize)] pub struct HoldPositionCharacteristic(Characteristic<bool>); impl HoldPositionCharacteristic { /// Creates a new Hold Position Characteristic. pub fn new(id: u64, accessory_id: u64) -> Self { Self(Characteristic::<bool> { id, accessory_id, hap_type: HapType::HoldPosition, format: Format::Bool, perms: vec![ Perm::PairedWrite, ], ..Default::default() }) } } #[async_trait] impl HapCharacteristic for HoldPositionCharacteristic { fn get_id(&self) -> u64 { self.0.get_id() } fn get_type(&self) -> HapType { self.0.get_type() } fn get_format(&self) -> Format { self.0.get_format() } fn get_perms(&self) -> Vec<Perm> { self.0.get_perms() } fn get_event_notifications(&self) -> Option<bool> { self.0.get_event_notifications() } fn set_event_notifications(&mut self, event_notifications: Option<bool>) { self.0.set_event_notifications(event_notifications) } async fn get_value(&mut self) -> Result<serde_json::Value> { let value = self.0.get_value().await?; Ok(json!(value)) } async fn set_value(&mut self, value: serde_json::Value) -> Result<()> { let v; // for whatever reason, the controller is setting boolean values either as a boolean or as an integer if self.0.format == Format::Bool && value.is_number() { let num_v: u8 = serde_json::from_value(value)?; if num_v == 0 { v = serde_json::from_value(json!(false))?; } else if num_v == 1 { v = serde_json::from_value(json!(true))?; } else { return Err(Error::InvalidValue(self.get_format())); } } else { v = serde_json::from_value(value).map_err(|_| Error::InvalidValue(self.get_format()))?; } self.0.set_value(v).await
fn get_unit(&self) -> Option<Unit> { self.0.get_unit() } fn get_max_value(&self) -> Option<serde_json::Value> { self.0.get_max_value().map(|v| json!(v)) } fn get_min_value(&self) -> Option<serde_json::Value> { self.0.get_min_value().map(|v| json!(v)) } fn get_step_value(&self) -> Option<serde_json::Value> { self.0.get_step_value().map(|v| json!(v)) } fn get_max_len(&self) -> Option<u16> { self.0.get_max_len() } } impl HapCharacteristicSetup for HoldPositionCharacteristic { fn set_event_emitter(&mut self, event_emitter: Option<pointer::EventEmitter>) { self.0.set_event_emitter(event_emitter) } } impl CharacteristicCallbacks<bool> for HoldPositionCharacteristic { fn on_read(&mut self, f: Option<impl OnReadFn<bool>>) { self.0.on_read(f) } fn on_update(&mut self, f: Option<impl OnUpdateFn<bool>>) { self.0.on_update(f) } } impl AsyncCharacteristicCallbacks<bool> for HoldPositionCharacteristic { fn on_read_async(&mut self, f: Option<impl OnReadFuture<bool>>) { self.0.on_read_async(f) } fn on_update_async(&mut self, f: Option<impl OnUpdateFuture<bool>>) { self.0.on_update_async(f) } }
}
instr_kshiftlb.rs
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn
() { run_test( &Instruction { mnemonic: Mnemonic::KSHIFTLB, operand1: Some(Direct(K3)), operand2: Some(Direct(K5)), operand3: Some(Literal8(16)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 227, 121, 50, 221, 16], OperandSize::Dword, ) } #[test] fn kshiftlb_2() { run_test( &Instruction { mnemonic: Mnemonic::KSHIFTLB, operand1: Some(Direct(K1)), operand2: Some(Direct(K7)), operand3: Some(Literal8(58)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 227, 121, 50, 207, 58], OperandSize::Qword, ) }
kshiftlb_1
rqworker.py
from distutils.version import LooseVersion import os import importlib import logging import sys from django.core.management.base import BaseCommand from django.utils.version import get_version from django_rq.queues import get_queues from django_rq.workers import get_exception_handlers from redis.exceptions import ConnectionError from rq import use_connection from rq.utils import ColorizingStreamHandler # Setup logging for RQWorker if not already configured logger = logging.getLogger('rq.worker') if not logger.handlers: logger.setLevel(logging.DEBUG) formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt='%H:%M:%S') handler = ColorizingStreamHandler() handler.setFormatter(formatter) logger.addHandler(handler) # Copied from rq.utils def import_attribute(name): """Return an attribute from a dotted path name (e.g. "path.to.func").""" module_name, attribute = name.rsplit('.', 1) module = importlib.import_module(module_name) return getattr(module, attribute) class Command(BaseCommand): """ Runs RQ workers on specified queues. Note that all queues passed into a single rqworker command must share the same connection. Example usage: python manage.py rqworker high medium low """ args = '<queue queue ...>' def add_arguments(self, parser): parser.add_argument('--worker-class', action='store', dest='worker_class', default='rq.Worker', help='RQ Worker class to use') parser.add_argument('--pid', action='store', dest='pid', default=None, help='PID file to write the worker`s pid into') parser.add_argument('--burst', action='store_true', dest='burst', default=False, help='Run worker in burst mode') parser.add_argument('--name', action='store', dest='name', default=None, help='Name of the worker') parser.add_argument('--queue-class', action='store', dest='queue_class', default='django_rq.queues.DjangoRQ', help='Queues class to use') parser.add_argument('--worker-ttl', action='store', type=int, dest='worker_ttl', default=420, help='Default worker timeout to be used') if LooseVersion(get_version()) >= LooseVersion('1.10'): parser.add_argument('args', nargs='*', type=str, help='The queues to work on, separated by space') def handle(self, *args, **options): pid = options.get('pid') if pid: with open(os.path.expanduser(pid), "w") as fp: fp.write(str(os.getpid())) try: # Instantiate a worker worker_class = import_attribute(options['worker_class'])
queues = get_queues(*args, queue_class=import_attribute(options['queue_class'])) w = worker_class( queues, connection=queues[0].connection, name=options['name'], exception_handlers=get_exception_handlers() or None, default_worker_ttl=options['worker_ttl'] ) # Call use_connection to push the redis connection into LocalStack # without this, jobs using RQ's get_current_job() will fail use_connection(w.connection) w.work(burst=options.get('burst', False)) except ConnectionError as e: print(e) sys.exit(1)
image_transfer.go
/* Copyright © 2020 esakat <[email protected]> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package pkg import ( "bufio" "context" "encoding/base64" "encoding/json" "fmt" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/ecr" distreference "github.com/docker/distribution/reference" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/client" "github.com/docker/docker/pkg/jsonmessage" "github.com/vbauerster/mpb" "io/ioutil" "strings" "sync" "time" ) // convert image path func ConvertImagePathForECR(imageName, region, accountId string) string { return fmt.Sprintf("%s.dkr.ecr.%s.amazonaws.com/%s", accountId, region, imageName) } // main func of transfer func ImageTransfer(pullImageName, region, accountId string, wg *sync.WaitGroup, bar *mpb.Bar, resultMsg chan<- string) { defer wg.Done() // Step1. Pull Docker image from external registry. cl, err := client.NewEnvClient() if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } opts := types.ImagePullOptions{} ctx := context.Background() image, err := SeparateImageName(pullImageName) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } resp, err := cl.ImagePull(ctx, pullImageName, opts) if err != nil { if err == distreference.ErrNameNotCanonical { imageNameLen := strings.Split(image.RepositoryName, "/") var pullingImagePrefix string switch len(imageNameLen) { case 1: pullingImagePrefix = "docker.io/library/" default: pullingImagePrefix = "docker.io/" } resp, err = cl.ImagePull(ctx, pullingImagePrefix+image.RepositoryName+":"+image.Tag, opts) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err)
return } } defer resp.Close() jsonmessage.DisplayJSONMessagesStream(resp, ioutil.Discard, 0, false, nil) scanner := bufio.NewScanner(resp) for scanner.Scan() { } bar.Increment() // Step2. Create repository in ECR ecrSvc := ecr.New(session.New(&aws.Config{Region: aws.String(region)})) repositoryInfo := ecr.CreateRepositoryInput{ RepositoryName: aws.String(image.RepositoryName), } _, err = ecrSvc.CreateRepository(&repositoryInfo) if err != nil { if awsErr, ok := err.(awserr.Error); ok { if awsErr.Code() != ecr.ErrCodeRepositoryAlreadyExistsException { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } } } bar.Increment() // Step3. Get authorization for ECR loginAuth, err := ecrSvc.GetAuthorizationToken(&ecr.GetAuthorizationTokenInput{}) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } decodedData, _ := base64.StdEncoding.DecodeString(*loginAuth.AuthorizationData[0].AuthorizationToken) decodedString := string(decodedData) // AuthorizationToken format is "user:password" authList := strings.Split(decodedString, ":") if len(authList) != 2 { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, "cannot get registry login token") return } username := authList[0] password := authList[1] serverAddress := *loginAuth.AuthorizationData[0].ProxyEndpoint auth := types.AuthConfig{ Username: username, Password: password, ServerAddress: serverAddress, } _, err = cl.RegistryLogin(context.Background(), auth) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } bar.Increment() // Step4. Tag image as ECR filtMap := map[string][]string{"reference": {image.RepositoryName + ":" + image.Tag}} filtBytes, _ := json.Marshal(filtMap) filt, err := filters.FromParam(string(filtBytes)) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } listOptions := types.ImageListOptions{ All: false, Filters: filt, } img, err := cl.ImageList(ctx, listOptions) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } newImageTag := ConvertImagePathForECR(pullImageName, region, accountId) err = cl.ImageTag(ctx, img[0].ID, newImageTag) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } bar.Increment() // Step5. Push image into ECR authJson := struct { Username string Password string }{ Username: username, Password: password, } authBytes, _ := json.Marshal(authJson) authTokenBase64 := base64.StdEncoding.EncodeToString(authBytes) pushOpts := types.ImagePushOptions{ RegistryAuth: authTokenBase64, } resp, err = cl.ImagePush(ctx, newImageTag, pushOpts) if err != nil { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err) return } scanner = bufio.NewScanner(resp) for scanner.Scan() { } bar.Increment() resultMsg <- fmt.Sprintf("%s transfer to %s", pullImageName, newImageTag) // wait a few time, to display progress 100% time.Sleep(1 * time.Second) }
return } } else { resultMsg <- fmt.Sprintf("%s failed to transfer. error message: %v", pullImageName, err)
orchestrator.rs
//! Opinionated orchestrator for services which communicate via IPC and are not expected to exit //! //! It allows to start and control processes, handling all the necessary boilerplate: //! - Running within async runtime //! - Uses tokio::process::Command with predefined params //! to execute commands //! - Uses log with info+ levels to //! - Uses ipc-channel to establish communication from and to processes //! //! # Panicing //! //! Orchestrator will panic if .connect() called outside of async runtime as it spawns handlers. //! //! # Example //! //! This example shows how Orchestrator allows to add custom handlers for process stdout: //! //! ``` //! use tokio::process::{Command, ChildStdout}; //! use ipc_orchestrator::Orchestrator; //! //! use std::sync::atomic::{AtomicBool, Ordering}; //! static CALLED: AtomicBool = AtomicBool::new(false); //! //! use tokio::io::{AsyncBufReadExt, BufReader}; //! async fn mock_log_handler(reader: ChildStdout, name: String) -> anyhow::Result<()> { //! let mut reader = BufReader::new(reader).lines(); //! assert_eq!(reader.next_line().await?.unwrap(), "testbed"); //! CALLED.store(true, Ordering::Relaxed); //! Ok(()) //! } //! //! // from within async runtime: //! # tokio::runtime::Runtime::new().unwrap().block_on(async { //! let mut orchestrator = Orchestrator::from_handlers(mock_log_handler).ipc(false); //! let mut cmd = Command::new("echo"); //! cmd.arg("testbed"); //! orchestrator.start("start", &mut cmd); //! let orchestra = orchestrator.connect().await.unwrap(); //! // it supposes never existing processes //! // hence it will give error on when any process exit or stdout was closed //! orchestra.run().await.unwrap_err(); //! assert!(CALLED.load(Ordering::Relaxed)); //! # }); //! ``` use crate::connected::ConnectedOrchestrator; use crate::logger::default_log_handler; use crate::should_not_complete; use crate::{Bridge, Channel, Process}; use anyhow::{anyhow, Context}; use futures::future::{try_join_all, Fuse, Future, FutureExt, TryFuture, TryJoinAll}; use futures::{pin_mut, select}; use ipc_channel::ipc::IpcOneShotServer; use log::{debug, error, info, warn}; use std::collections::HashMap; use std::pin::Pin; use std::process::Stdio; use tokio::process::ChildStdout; use tokio::process::Command; type BFR<R> = Pin<Box<dyn Future<Output = anyhow::Result<R>>>>; /// Create default orchestrator /// /// Default orchestrator comes with `default_log_handler` /// /// Default log handler will read lines from process stdout /// and log them with info level adding process name pub fn orchestrator() -> Orchestrator<impl Future<Output = anyhow::Result<()>>> { Orchestrator::from_handlers(default_log_handler) } /// Orchestrator which is in progress of starting up pub struct Orchestrator<LF: TryFuture> { pub processes: HashMap<String, Process>, loggers: Vec<LF>, bridges: Vec<BFR<Bridge>>, ipc: bool, rust_backtrace: bool, logger: fn(ChildStdout, String) -> LF, } impl<LF: TryFuture> Orchestrator<LF> { /// Create orchestrator with provided log handler /// /// Log handler is a function: `fn(ChildStdout, String) -> impl TryFuture` /// Provided future should process ChildStdout until eof, /// returning with anyhow::Result<()> pub fn from_handlers(logger: fn(ChildStdout, String) -> LF) -> Self { Self { processes: HashMap::new(), loggers: Vec::new(), bridges: Vec::new(), ipc: false, rust_backtrace: false, logger, } } } impl<LF> Orchestrator<LF> where LF: Future<Output = anyhow::Result<()>>, { /// Start provided command with communication channel /// As opinionated executor for all the processes Orchestrator provides following setup: /// 1. Start IpcOneShotServer and provide server name to process via /// commandline argument `--orchestrator-ch` /// 2. cmd.kill_on_drop(true) - process will exit if orchestrator's handle is dropped /// 3. cmd.stdout(Stdio::piped()) - stdout will be logged as info!(target: &name, ...) pub fn start(&mut self, name: &str, cmd: &mut Command) -> anyhow::Result<()> { if self.processes.contains_key(name) { return Err(anyhow::anyhow!("process named `{}` already started", name)); } let (server, server_name) = IpcOneShotServer::new().context("Failed to start IpcOneShotServer")?; cmd.kill_on_drop(true).stdout(Stdio::piped()); if self.ipc { cmd.env("IPC_SERVER", server_name); } if self.rust_backtrace { cmd.env("RUST_BACKTRACE", "1"); } debug!(target: "orchestrator", "Starting {} {:?}", name, cmd); let mut child = cmd.spawn()?; // Redirect command output to stdout - quick and dirty logging let stdout = child .stdout .take() .ok_or_else(|| anyhow!("child did not provide a handle to stdout"))?; self.loggers.push((self.logger)(stdout, name.to_owned())); self.processes.insert( name.to_owned(), Process { name: name.to_owned(), child, }, ); // Spawning Ipc Server to accept incoming channel from child process if self.ipc { self.bridges .push(Box::pin(ipc_handler(server, name.to_owned()))); } Ok(()) } /// Connect to processes IPC channels /// Resulting ConnectedOrchestrator can be used to further setup handlers /// over processes bridges pub async fn connect(self) -> anyhow::Result<ConnectedOrchestrator<Fuse<TryJoinAll<LF>>>> { let Orchestrator { mut processes, bridges, loggers, .. } = self; let processes: Vec<BFR<()>> = processes .drain() .map(|(_k, v)| v) .map(may_exit_process_handler) .collect(); // Main future executor, had to implement due to customized pipeline // Wait for all bridges to connect to server and pass ipc handles let bridges = try_join_all(bridges).fuse(); // Wait for all logs to complete or any of them to fail let mut loggers = Box::pin(try_join_all(loggers).fuse()); //let i: u32 = loggers; // Wait for all processes to complete or any of them to fail let mut processes = Box::pin(try_join_all(processes).fuse()); pin_mut!(bridges); let res = select!( res = bridges => match res { Ok(channels) => { Ok(channels) }, Err(err) => { error!("failed to establish connection: {}", err); Err(err.into()) } }, res = processes => should_not_complete!("processes", res), res = loggers => should_not_complete!("logs", res), ); match res { Ok(channels) => Ok(ConnectedOrchestrator::new(channels, processes, loggers)), Err(err) => { error!(target: "orchestrator", "{}", &err); Err(err) } } } } impl<LF: TryFuture> Orchestrator<LF> { /// Setup IPC channel /// Will pass IpcOneShotServer name via `--orchestrator-ch` pub fn ipc(mut self, ipc: bool) -> Self
/// Start child process with RUST_BACKTRACE=1 env option pub fn rust_backtrace(mut self, backtrace: bool) -> Self { self.rust_backtrace = backtrace; self } } async fn ipc_handler(server: IpcOneShotServer<Channel>, name: String) -> anyhow::Result<Bridge> { let name1 = name.clone(); let server = tokio::task::spawn_blocking(move || { server .accept() .unwrap_or_else(|err| todo!("failed to establish connection from {}: {}", name1, err)) }); let name = name.clone(); server .map(|res| match res { Ok((_, channel)) => Ok(Bridge { channel, name }), Err(err) => Err(err.into()), }) .await } #[allow(dead_code)] fn never_exit_process_handler(p: Process) -> BFR<()> { let Process { child, name } = p; let name1 = name.clone(); Box::pin( child .inspect(move |status| warn!(target: &name1, "exiting {:?}", status)) .map(move |status| match status { Ok(n) => Err(anyhow!( "process `{}` finish with {}, closing pipeline", name, n )), Err(err) => Err(err.into()), }), ) } fn may_exit_process_handler(p: Process) -> BFR<()> { let Process { child, name } = p; let name1 = name.clone(); Box::pin( child .inspect(move |status| warn!(target: &name1, "exiting {:?}", status)) .map(move |status| match status { Ok(n) if n.success() => Ok(()), Ok(n) => Err(anyhow!( "process `{}` finish with {}, closing pipeline", name, n )), Err(err) => Err(err.into()), }), ) }
{ self.ipc = ipc; self }
models.py
from operator import mod from django.db import models from django.db.models.base import Model # Create your models here. class Users(models.Model):
class Item(models.Model): name = models.CharField(max_length=30) pic = models.ImageField(upload_to="market/images") description = models.CharField(max_length=200) price = models.FloatField() wanted = models.IntegerField() seller = models.ForeignKey(to=Users, on_delete=models.CASCADE)
username = models.CharField(max_length=50) phone = models.CharField(max_length=32,null=True,blank=True) avatar = models.CharField(max_length=20) address = models.CharField(max_length=32,null=True,blank=True) password = models.CharField(max_length=32) wantlist = models.CharField(max_length=100,null=True,blank=True)
mode.go
package mode import ( "bssh/app/mode/etcd" "bssh/app/mode/local" "bssh/conf" "fmt" ) // 配置的接口 type IModeConf interface {
// 获取配置 func GetConfig(cFlag conf.CFlags) (confBase *conf.Base) { conf.MainLogger.Info(fmt.Sprintf("%s mode", cFlag.Mode)) var mode IModeConf // 判断启动配置文件方式 switch cFlag.Mode { case "etcd": mode = etcd.NewConf(cFlag.RemoteConf) case "local": mode = local.NewConf(cFlag.LocalConf) default: panic("err mode") } _confBase, err := mode.GetConfig() if err != nil { panic("err") } confBase = &_confBase //运行监听远程配置变化更新程序 mode.Watch(confBase) return }
GetConfig() (conf.Base, error) Watch(*conf.Base) }
crateresolve1-3.rs
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags:-C extra-filename=-3 #![crate_name = "crateresolve1"] #![crate_type = "lib"] pub fn
() -> int { 30 }
f
types.ts
import { TransactionInstruction } from "@solana/web3.js"; export const PROGRAM_IDS: string[] = [ "6RWe1TGwvojnbAynyWrHzm3GgHf7AmX7kLQTJG7vHCfb", // mainnet / testnet / devnet "2n2dsFSgmPcZ8jkmBZLGUM2nzuFqcBGQ3JEEj6RJJcEg", // testnet - legacy "9tdctNJuFsYZ6VrKfKEuwwbPp4SFdFw3jYBZU8QUtzeX", // testnet - legacy "CrRvVBS4Hmj47TPU3cMukurpmCUYUrdHYxTQBxncBGqw", // testnet - legacy "BSfTAcBdqmvX5iE2PW88WFNNp2DHhLUaBKk5WrnxVkcJ", // devnet - legacy "H1E1G7eD5Rrcy43xvDxXCsjkRggz7MWNMLGJ8YNzJ8PM", // devnet - legacy "CMoteLxSPVPoc7Drcggf3QPg3ue8WPpxYyZTg77UGqHo", // devnet - legacy "EEuPz4iZA5reBUeZj6x1VzoiHfYeHMppSCnHZasRFhYo", // devnet - legacy "5rdpyt5iGfr68qt28hkefcFyF4WtyhTwqKDmHSBG8GZx", // localnet ]; const INSTRUCTION_LOOKUP: { [key: number]: string } = { 0: "Initialize Swap", 1: "Swap",
4: "Deposit Single Token Type Exact Amount In", 5: "Withdraw Single Token Type Exact Amount Out", }; export function isTokenSwapInstruction( instruction: TransactionInstruction ): boolean { return PROGRAM_IDS.includes(instruction.programId.toBase58()); } export function parseTokenSwapInstructionTitle( instruction: TransactionInstruction ): string { const code = instruction.data[0]; if (!(code in INSTRUCTION_LOOKUP)) { throw new Error(`Unrecognized Token Swap instruction code: ${code}`); } return INSTRUCTION_LOOKUP[code]; }
2: "Deposit All Token Types", 3: "Withdraw All Token Types",
mixins.py
import os import fcntl from string import Template from django.core.urlresolvers import NoReverseMatch, reverse from django.db.models.loading import get_model from django.forms import ModelChoiceField, HiddenInput from django import forms from cyder.base.utils import filter_by_ctnr class DisplayMixin(object): # Knobs justs = { 'pk_just': 10, 'rhs_just': 1, 'ttl_just': 6, 'rdtype_just': 7, 'rdclass_just': 3, 'prio_just': 2, 'lhs_just': 61, 'extra_just': 1 } def bind_render_record(self, pk=False, custom=None): kwargs = vars(self) if custom: for key, value in custom.items(): kwargs[key] = value template = Template(self.template).substitute(**self.justs) bind_name = self.fqdn + "." if not self.ttl: self.ttl = 3600 return template.format(bind_name=bind_name, rdtype=self.rdtype, rdclass='IN', **kwargs) class ObjectUrlMixin(object): """ This is a mixin that adds important url methods to a model. This class uses the ``_meta.db_table`` instance variable of an object to calculate URLs. Because of this, you must use the app label of your class when declaring urls in your urls.py. """ @classmethod def get_list_url(cls): """ Return the 'list' url of an object. Class method since don't need specific instance of object. """ return reverse(cls._meta.db_table) @classmethod def get_create_url(cls): """Return the create url of the type of object (to be posted to).""" return cls.get_list_url() def get_update_url(self): """Return the update url of an object.""" return reverse(self._meta.db_table + '-update', args=[self.pk]) def get_delete_url(self): """Return the delete url of an object.""" return reverse('delete') def get_detail_url(self): """Return the detail url of an object.""" try: return reverse(self._meta.db_table + '-detail', args=[self.pk]) except NoReverseMatch: return '' def get_table_update_url(self): """Return the editableGrid update url of an object.""" try: return reverse(self._meta.db_table + '-table-update', args=[self.pk]) except NoReverseMatch: return '' def details(self): """ Return base details with generic postback URL for editable tables. """ return {'url': self.get_table_update_url()} class UsabilityFormMixin(object): def append_required_all(self): for fieldname, field in self.fields.items(): if self.fields[fieldname].required is True: if self.fields[fieldname].label is None: fname = fieldname.replace('_', ' ') self.fields[fieldname].label = fname.capitalize() + '*' else: self.fields[fieldname].label += '*' def alphabetize_all(self): for fieldname, field in self.fields.items(): if hasattr(field, 'queryset'): self.fields[fieldname].queryset = field.queryset.order_by( *field.queryset.model.sort_fields) def filter_by_ctnr_all(self, request): from cyder.core.ctnr.models import Ctnr from cyder.cydns.domain.models import Domain ctnr = request.session['ctnr'] for fieldname, field in self.fields.items(): if not hasattr(field, 'queryset'): continue queryset = self.fields[fieldname].queryset if queryset.model is Ctnr: ctnrs = set(c.pk for c in request.session['ctnrs']) for pk in [1, 2]: if pk in ctnrs: ctnrs.remove(pk) if self.fields[fieldname].initial: ctnrs.add(self.fields[fieldname].initial.pk) queryset = queryset.filter(pk__in=ctnrs) else: queryset = filter_by_ctnr(ctnr=ctnr, objects=field.queryset).distinct() if queryset.count() == 1: self.fields[fieldname].initial = queryset[0] self.fields[fieldname].queryset = queryset def autoselect_system(self): System = get_model('cyder', 'system') if 'system' in self.initial: self.fields['system'] = ModelChoiceField( widget=HiddenInput(), empty_label='', queryset=System.objects.filter(pk=int(self.initial['system']))) elif 'system' in self.fields: del(self.fields['system']) def autoselect_ctnr(self, request): if 'ctnr' not in self.fields: return ctnr = request.session['ctnr'] if ctnr.name != "global": if 'ctnr' not in self.initial: self.fields['ctnr'].initial = request.session['ctnr'] self.fields['ctnr'].widget = HiddenInput() def make_usable(self, request): self.autoselect_system() self.autoselect_ctnr(request) if 'ctnr' in request.session:
self.append_required_all() class MutexMixin(object): def __enter__(self): self.lock() return self def __exit__(self, exc_type, exc_value, traceback): self.unlock() def lock(self): if not os.path.exists(os.path.dirname(self.lock_file)): os.makedirs(os.path.dirname(self.lock_file)) self.log_debug("Attempting to lock {0}..." .format(self.lock_file)) self.lock_fd = open(self.lock_file, 'w') try: fcntl.flock(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError as exc_value: self.lock_fd.close() # IOError: [Errno 11] Resource temporarily unavailable if exc_value[0] == 11: with open(self.pid_file, 'r') as pid_fd: self._lock_failure(pid_fd.read()) else: raise self.log_debug("Lock acquired") try: with open(self.pid_file, 'w') as pid_fd: pid_fd.write(unicode(os.getpid())) except IOError as exc_value: # IOError: [Errno 2] No such file or directory if exc_value[0] == 2: self.error( "Failed to acquire lock on {0}, but the process that has " "it hasn't written the PID file ({1}) yet.".format( self.lock_file, self.pid_file)) else: raise def unlock(self): if not self.lock_fd: return False self.log_debug("Releasing lock ({0})...".format(self.lock_file)) fcntl.flock(self.lock_fd, fcntl.LOCK_UN) self.lock_fd.close() os.remove(self.pid_file) os.remove(self.lock_file) self.log_debug("Unlock complete") return True def _lock_failure(self, pid): self.error('Failed to acquire lock on {0}. Process {1} currently ' 'has it.'.format(self.lock_file, pid))
self.filter_by_ctnr_all(request) self.alphabetize_all()
main.rs
struct AlwaysEqual; fn main()
{ let subject = AlwaysEqual; }
translate-test.ts
it('should translate', () => { expect(t('Hello')).toBe('Hello'); }); it('should translate with string interpolation', () => { expect(t('Hello {name}', { name: 'World' })).toBe('Hello World'); expect(t('Hello {name}', { name: 123 })).toBe('Hello 123'); expect(t('Hello {name}', { name: true })).toBe('Hello true'); });
it('should not substitute empty values', () => { expect(t('Hello {name}', { name: null })).toBe('Hello {name}'); expect(t('Hello {name}', { name: undefined })).toBe('Hello {name}'); expect(t('Hello {name}', {})).toBe('Hello {name}'); });
298.js
var x = BigInt ( - 1n ) ;
error.rs
use bytemuck::Contiguous; use solana_program::program_error::ProgramError; use num_enum::IntoPrimitive; use thiserror::Error; pub type LyraeResult<T = ()> = Result<T, LyraeError>; #[repr(u8)] #[derive(Debug, Clone, Eq, PartialEq, Copy)] pub enum SourceFileId { Processor = 0, State = 1, Critbit = 2, Queue = 3, Matching = 4, Oracle = 5, } impl std::fmt::Display for SourceFileId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { SourceFileId::Processor => write!(f, "src/processor.rs"), SourceFileId::State => write!(f, "src/state.rs"), SourceFileId::Critbit => write!(f, "src/critbit"), SourceFileId::Queue => write!(f, "src/queue.rs"), SourceFileId::Matching => write!(f, "src/matching.rs"), SourceFileId::Oracle => write!(f, "src/oracle.rs"), } } } #[derive(Error, Debug, PartialEq, Eq)] pub enum LyraeError { #[error(transparent)] ProgramError(#[from] ProgramError), #[error("{lyrae_error_code}; {source_file_id}:{line}")] LyraeErrorCode { lyrae_error_code: LyraeErrorCode, line: u32, source_file_id: SourceFileId }, } #[derive(Debug, Error, Clone, Copy, PartialEq, Eq, IntoPrimitive)] #[repr(u32)] pub enum LyraeErrorCode { #[error("LyraeErrorCode::InvalidCache")] // 0 InvalidCache, #[error("LyraeErrorCode::InvalidOwner")] InvalidOwner, #[error("LyraeErrorCode::InvalidGroupOwner")] InvalidGroupOwner, #[error("LyraeErrorCode::InvalidSignerKey")] InvalidSignerKey, #[error("LyraeErrorCode::InvalidAdminKey")] InvalidAdminKey, #[error("LyraeErrorCode::InvalidVault")] InvalidVault, #[error("LyraeErrorCode::MathError")] MathError, #[error("LyraeErrorCode::InsufficientFunds")] InsufficientFunds, #[error("LyraeErrorCode::InvalidToken")] InvalidToken, #[error("LyraeErrorCode::InvalidMarket")] InvalidMarket, #[error("LyraeErrorCode::InvalidProgramId")] // 10 InvalidProgramId, #[error("LyraeErrorCode::GroupNotRentExempt")] GroupNotRentExempt, #[error("LyraeErrorCode::OutOfSpace")] OutOfSpace, #[error("LyraeErrorCode::TooManyOpenOrders Reached the maximum number of open orders for this market")] TooManyOpenOrders, #[error("LyraeErrorCode::AccountNotRentExempt")] AccountNotRentExempt, #[error("LyraeErrorCode::ClientIdNotFound")] ClientIdNotFound, #[error("LyraeErrorCode::InvalidNodeBank")] InvalidNodeBank, #[error("LyraeErrorCode::InvalidRootBank")] InvalidRootBank, #[error("LyraeErrorCode::MarginBasketFull")] MarginBasketFull, #[error("LyraeErrorCode::NotLiquidatable")] NotLiquidatable, #[error("LyraeErrorCode::Unimplemented")] // 20 Unimplemented, #[error("LyraeErrorCode::PostOnly")] PostOnly, #[error("LyraeErrorCode::Bankrupt Invalid instruction for bankrupt account")] Bankrupt, #[error("LyraeErrorCode::InsufficientHealth")] InsufficientHealth, #[error("LyraeErrorCode::InvalidParam")] InvalidParam, #[error("LyraeErrorCode::InvalidAccount")] InvalidAccount, #[error("LyraeErrorCode::InvalidAccountState")] InvalidAccountState, #[error("LyraeErrorCode::SignerNecessary")] SignerNecessary, #[error("LyraeErrorCode::InsufficientLiquidity Not enough deposits in this node bank")] InsufficientLiquidity, #[error("LyraeErrorCode::InvalidOrderId")] InvalidOrderId, #[error("LyraeErrorCode::InvalidOpenOrdersAccount")] // 30 InvalidOpenOrdersAccount, #[error("LyraeErrorCode::BeingLiquidated Invalid instruction while being liquidated")] BeingLiquidated, #[error("LyraeErrorCode::InvalidRootBankCache Cache the root bank to resolve")] InvalidRootBankCache, #[error("LyraeErrorCode::InvalidPriceCache Cache the oracle price to resolve")] InvalidPriceCache, #[error("LyraeErrorCode::InvalidPerpMarketCache Cache the perp market to resolve")] InvalidPerpMarketCache, #[error("LyraeErrorCode::TriggerConditionFalse The trigger condition for this TriggerOrder is not met")] TriggerConditionFalse, #[error("LyraeErrorCode::InvalidSeeds Invalid seeds. Unable to create PDA")] InvalidSeeds, #[error("LyraeErrorCode::InvalidOracleType The oracle account was not recognized")] InvalidOracleType, #[error("LyraeErrorCode::InvalidOraclePrice")] InvalidOraclePrice, #[error("LyraeErrorCode::MaxAccountsReached The maximum number of accounts for this group has been reached")] MaxAccountsReached, #[error("LyraeErrorCode::Default Check the source code for more info")] // 40 Default = u32::MAX_VALUE, } impl From<LyraeError> for ProgramError { fn from(e: LyraeError) -> ProgramError { match e { LyraeError::ProgramError(pe) => pe, LyraeError::LyraeErrorCode { lyrae_error_code, line: _, source_file_id: _ } => { ProgramError::Custom(lyrae_error_code.into()) } } } } impl From<serum_dex::error::DexError> for LyraeError { fn from(de: serum_dex::error::DexError) -> Self { let pe: ProgramError = de.into(); pe.into() } } #[inline] pub fn check_assert( cond: bool, lyrae_error_code: LyraeErrorCode, line: u32, source_file_id: SourceFileId, ) -> LyraeResult<()>
#[macro_export] macro_rules! declare_check_assert_macros { ($source_file_id:expr) => { #[allow(unused_macros)] macro_rules! check { ($cond:expr, $err:expr) => { check_assert($cond, $err, line!(), $source_file_id) }; } #[allow(unused_macros)] macro_rules! check_eq { ($x:expr, $y:expr, $err:expr) => { check_assert($x == $y, $err, line!(), $source_file_id) }; } #[allow(unused_macros)] macro_rules! throw { () => { LyraeError::LyraeErrorCode { lyrae_error_code: LyraeErrorCode::Default, line: line!(), source_file_id: $source_file_id, } }; } #[allow(unused_macros)] macro_rules! throw_err { ($err:expr) => { LyraeError::LyraeErrorCode { lyrae_error_code: $err, line: line!(), source_file_id: $source_file_id, } }; } #[allow(unused_macros)] macro_rules! math_err { () => { LyraeError::LyraeErrorCode { lyrae_error_code: LyraeErrorCode::MathError, line: line!(), source_file_id: $source_file_id, } }; } }; }
{ if cond { Ok(()) } else { Err(LyraeError::LyraeErrorCode { lyrae_error_code, line, source_file_id }) } }
face.py
import torch import matplotlib.image as img import cv2 import dlib from imutils.face_utils import * import numpy as np # image = img.imread("extra//test.jpg") # image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2BGR) # opencvImage dlib_path = 'extra//shape_predictor_68_face_landmarks.dat' def
(img): global detector, landmark_predictor # 宣告臉部偵測器,以及載入預訓練的臉部特徵點模型 detector = dlib.get_frontal_face_detector() landmark_predictor = dlib.shape_predictor(dlib_path) # 產生臉部識別 face_rects = detector(img, 1) for i, d in enumerate(face_rects): # 讀取框左上右下座標 x1 = d.left() y1 = d.top() x2 = d.right() y2 = d.bottom() # 根據此座標範圍讀取臉部特徵點 shape = landmark_predictor(img, d) # 將特徵點轉為numpy shape = shape_to_np(shape) # (68,2) # 透過dlib挖取臉孔部分,將臉孔圖片縮放至256*256的大小,並存放於pickle檔中 # 人臉圖像部分呢。很簡單,只要根據畫框的位置切取即可crop_img = img[y1:y2, x1:x2, :] crop_img = img[y1:y2, x1:x2, :] try: resize_img = cv2.resize(crop_img, (512, 512)) # cv2.imshow("OpenCV",resize_img) # cv2.waitKey() return resize_img except: return np.array([0]) return np.array([0]) def predict_image(logger, image, model): try: face = get_face(image) # predict target face = torch.tensor(face, dtype=torch.float32)/255 # normalize face = face.permute(2, 0, 1).unsqueeze(0).cuda() # model = torch.load('run\SCUT\pre_googlenet\experiment_6\pre_googlenet.pkl') # model.load_state_dict(torch.load('run\SCUT\pre_googlenet\experiment_6\checkpoint.pth.tar')['state_dict']) outputs = model(face) # [bsz, c, h, w] _, predicted = torch.max(outputs.data, 1) score = int(predicted.item()) * 20 # logger.info("Predict Score : {}".format(score)) return score except Exception as e: # print(e) return 0
get_face
slick.js
// slick.js // General configuration and initiation of the 'slick' image carousel.
$('.single-item').slick({ autoplay: false, autoplaySpeed: 5000, slidesToShow: 1, slidesToScroll: 1, dots: true, }); })
$(document).ready(function() {
main.js
// Check viewport of container, rahter its tablet or phone function
() { if ($(".responsive-tablet").css("float") == "none" ){ // you're on media query tablet resized viewport } if ($(".responsive-phone").css("float") == "none" ){ // you're on media query phone resized viewport } }; $(document).ready(function() { checkSize(); $(window).resize(function(event) { checkSize(); }); // End of window.resize }); // End of document.ready
checkSize
DeckOfCards.py
#author: Christoffer Norell #contact: [email protected] #This is a simple simulator of a deck of cards I made for fun. #The values in the dictionaries are there for better comparison during games. import random
#Using dictionaries to represent values. #The color-values was taken from bridge-order: #http://pokerterms.com/bridge-order.html colors = [{'Hearts': 0 },{'Diamonds': 1},{'Clubs': 2},{'Spades':3}] values = [{'Two':2},{'Three': 3},{'Four':4},{'Five':5},{'Six': 6},{'Seven': 7}, {'Eight': 8}, {'Nine': 9} , {'Ten': 10},{'Jack': 11} , {'Queen':12}, {'King':13} , {'Ace':14}] class Card(): def __init__(self,value,color): self.color = color self.value = value def show(self): return (self.color, self.value) class Deck(): def __init__(self): self.deck = [] for x in range(len(colors)): for y in range(len(values)): self.deck.append(Card(colors[x],values[y])) def shuffle(self): random.shuffle(self.deck) def hand_card(self): card = self.deck.pop() return card def hand_cards(self, amount): tmp = [] if amount <= len(self.deck): for x in range(amount): tmp.append(self.hand_card()) return tmp else: print("out of cards") return None
thrift_proxy.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: envoy/config/filter/network/thrift_proxy/v2alpha1/thrift_proxy.proto package v2 import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" import _ "github.com/lyft/protoc-gen-validate/validate" import _ "github.com/gogo/protobuf/gogoproto" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf type ThriftProxy_TransportType int32 const ( // For every new connection, the Thrift proxy will determine which transport to use. ThriftProxy_AUTO_TRANSPORT ThriftProxy_TransportType = 0 // The Thrift proxy will assume the client is using the Thrift framed transport. ThriftProxy_FRAMED ThriftProxy_TransportType = 1 // The Thrift proxy will assume the client is using the Thrift unframed transport. ThriftProxy_UNFRAMED ThriftProxy_TransportType = 2 ) var ThriftProxy_TransportType_name = map[int32]string{ 0: "AUTO_TRANSPORT", 1: "FRAMED", 2: "UNFRAMED", } var ThriftProxy_TransportType_value = map[string]int32{ "AUTO_TRANSPORT": 0, "FRAMED": 1, "UNFRAMED": 2, } func (x ThriftProxy_TransportType) String() string { return proto.EnumName(ThriftProxy_TransportType_name, int32(x)) } func (ThriftProxy_TransportType) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{0, 0} } type ThriftProxy_ProtocolType int32 const ( // For every new connection, the Thrift proxy will determine which protocol to use. // N.B. The older, non-strict binary protocol is not included in automatic protocol // detection. ThriftProxy_AUTO_PROTOCOL ThriftProxy_ProtocolType = 0 // The Thrift proxy will assume the client is using the Thrift binary protocol. ThriftProxy_BINARY ThriftProxy_ProtocolType = 1 // The Thrift proxy will assume the client is using the Thrift non-strict binary protocol. ThriftProxy_LAX_BINARY ThriftProxy_ProtocolType = 2 // The Thrift proxy will assume the client is using the Thrift compact protocol. ThriftProxy_COMPACT ThriftProxy_ProtocolType = 3 ) var ThriftProxy_ProtocolType_name = map[int32]string{ 0: "AUTO_PROTOCOL", 1: "BINARY", 2: "LAX_BINARY", 3: "COMPACT", } var ThriftProxy_ProtocolType_value = map[string]int32{ "AUTO_PROTOCOL": 0, "BINARY": 1, "LAX_BINARY": 2, "COMPACT": 3, } func (x ThriftProxy_ProtocolType) String() string { return proto.EnumName(ThriftProxy_ProtocolType_name, int32(x)) } func (ThriftProxy_ProtocolType) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{0, 1} } // [#protodoc-title: Extensions Thrift Proxy] // Thrift Proxy filter configuration. // [#comment:next free field: 5] type ThriftProxy struct { // Supplies the type of transport that the Thrift proxy should use. Defaults to `AUTO_TRANSPORT`. Transport ThriftProxy_TransportType `protobuf:"varint,2,opt,name=transport,enum=envoy.config.filter.network.thrift_proxy.v2alpha1.ThriftProxy_TransportType" json:"transport,omitempty"` // Supplies the type of protocol that the Thrift proxy should use. Defaults to `AUTO_PROTOCOL`. Protocol ThriftProxy_ProtocolType `protobuf:"varint,3,opt,name=protocol,enum=envoy.config.filter.network.thrift_proxy.v2alpha1.ThriftProxy_ProtocolType" json:"protocol,omitempty"` // The human readable prefix to use when emitting statistics. StatPrefix string `protobuf:"bytes,1,opt,name=stat_prefix,json=statPrefix" json:"stat_prefix,omitempty"` // The route table for the connection manager is static and is specified in this property. RouteConfig *RouteConfiguration `protobuf:"bytes,4,opt,name=route_config,json=routeConfig" json:"route_config,omitempty"` } func (m *ThriftProxy) Reset() { *m = ThriftProxy{} } func (m *ThriftProxy) String() string { return proto.CompactTextString(m) } func (*ThriftProxy) ProtoMessage() {} func (*ThriftProxy) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} } func (m *ThriftProxy) GetTransport() ThriftProxy_TransportType { if m != nil { return m.Transport } return ThriftProxy_AUTO_TRANSPORT } func (m *ThriftProxy) GetProtocol() ThriftProxy_ProtocolType { if m != nil { return m.Protocol } return ThriftProxy_AUTO_PROTOCOL } func (m *ThriftProxy) GetStatPrefix() string { if m != nil { return m.StatPrefix } return "" } func (m *ThriftProxy) GetRouteConfig() *RouteConfiguration { if m != nil { return m.RouteConfig } return nil } func init()
func init() { proto.RegisterFile("envoy/config/filter/network/thrift_proxy/v2alpha1/thrift_proxy.proto", fileDescriptor1) } var fileDescriptor1 = []byte{ // 407 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x90, 0x41, 0x8f, 0x93, 0x40, 0x14, 0xc7, 0x77, 0x28, 0xae, 0xdb, 0x47, 0xb7, 0xc1, 0x89, 0x89, 0xa4, 0x07, 0x43, 0x7a, 0x6a, 0x3c, 0x0c, 0x59, 0x3c, 0x7b, 0x00, 0x76, 0x4d, 0x8c, 0xdd, 0x42, 0xc6, 0x69, 0xa2, 0x5e, 0x08, 0x56, 0x68, 0x89, 0x84, 0x21, 0xd3, 0x29, 0xb6, 0x57, 0x4f, 0x7e, 0x0f, 0x0f, 0x7e, 0x07, 0x4f, 0x7e, 0x1d, 0xbf, 0x85, 0x61, 0xa0, 0xb6, 0x8d, 0xa7, 0xea, 0xed, 0xcd, 0x7b, 0x8f, 0xdf, 0xef, 0xf1, 0x87, 0xdb, 0xb4, 0xac, 0xf9, 0xce, 0x59, 0xf0, 0x32, 0xcb, 0x97, 0x4e, 0x96, 0x17, 0x32, 0x15, 0x4e, 0x99, 0xca, 0xcf, 0x5c, 0x7c, 0x72, 0xe4, 0x4a, 0xe4, 0x99, 0x8c, 0x2b, 0xc1, 0xb7, 0x3b, 0xa7, 0x76, 0x93, 0xa2, 0x5a, 0x25, 0x37, 0x27, 0x5d, 0x52, 0x09, 0x2e, 0x39, 0xbe, 0x51, 0x14, 0xd2, 0x52, 0x48, 0x4b, 0x21, 0x1d, 0x85, 0x9c, 0xec, 0xef, 0x29, 0xa3, 0x17, 0xe7, 0x8b, 0x05, 0xdf, 0xc8, 0xb4, 0x35, 0x8e, 0x9e, 0xd4, 0x49, 0x91, 0x7f, 0x4c, 0x64, 0xea, 0xec, 0x8b, 0x6e, 0xf0, 0x78, 0xc9, 0x97, 0x5c, 0x95, 0x4e, 0x53, 0xb5, 0xdd, 0xf1, 0x77, 0x1d, 0x0c, 0xa6, 0xa0, 0x51, 0xc3, 0xc4, 0x35, 0xf4, 0xa5, 0x48, 0xca, 0x75, 0xc5, 0x85, 0xb4, 0x34, 0x1b, 0x4d, 0x86, 0xee, 0x94, 0x9c, 0xfd, 0x13, 0xe4, 0x08, 0x49, 0xd8, 0x9e, 0xc7, 0x76, 0x55, 0xea, 0xc3, 0x8f, 0x5f, 0x3f, 0x7b, 0x0f, 0xbe, 0x20, 0xcd, 0x44, 0xf4, 0xa0, 0xc2, 0x6b, 0xb8, 0x52, 0x07, 0x2d, 0x78, 0x61, 0xf5, 0x94, 0xf6, 0xf5, 0x7f, 0x6a, 0xa3, 0x0e, 0xf7, 0x97, 0xf5, 0x8f, 0x08, 0x3f, 0x03, 0x63, 0x2d, 0x93, 0x86, 0x92, 0x66, 0xf9, 0xd6, 0x42, 0x36, 0x9a, 0xf4, 0xfd, 0x7e, 0xb3, 0xaa, 0x0b, 0xcd, 0x46, 0x14, 0x9a, 0x69, 0xa4, 0x86, 0x78, 0x05, 0x03, 0x15, 0x73, 0xdc, 0xde, 0x63, 0xe9, 0x36, 0x9a, 0x18, 0xee, 0xdd, 0x3f, 0x1c, 0x49, 0x1b, 0x4c, 0xa0, 0x3e, 0xd8, 0x88, 0x44, 0xe6, 0xbc, 0xa4, 0x86, 0x38, 0xf4, 0xc6, 0x01, 0x5c, 0x9f, 0x44, 0x86, 0x31, 0x0c, 0xbd, 0x39, 0x0b, 0x63, 0x46, 0xbd, 0xd9, 0x9b, 0x28, 0xa4, 0xcc, 0xbc, 0xc0, 0x00, 0x97, 0x2f, 0xa9, 0x77, 0x7f, 0x77, 0x6b, 0x22, 0x3c, 0x80, 0xab, 0xf9, 0xac, 0x7b, 0x69, 0x23, 0xfd, 0xeb, 0xb7, 0xa7, 0x17, 0xe3, 0x08, 0x06, 0xc7, 0x01, 0xe0, 0x47, 0x70, 0xad, 0x18, 0x11, 0x0d, 0x59, 0x18, 0x84, 0xd3, 0x16, 0xe1, 0xbf, 0x9a, 0x79, 0xf4, 0x9d, 0x89, 0xf0, 0x10, 0x60, 0xea, 0xbd, 0x8d, 0xbb, 0xb7, 0x86, 0x0d, 0x78, 0x18, 0x84, 0xf7, 0x91, 0x17, 0x30, 0xb3, 0xd7, 0x12, 0x7d, 0xfd, 0xbd, 0x56, 0xbb, 0x1f, 0x2e, 0x55, 0x78, 0xcf, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x04, 0xac, 0xa1, 0xb9, 0x1f, 0x03, 0x00, 0x00, }
{ proto.RegisterType((*ThriftProxy)(nil), "envoy.config.filter.network.thrift_proxy.v2alpha1.ThriftProxy") proto.RegisterEnum("envoy.config.filter.network.thrift_proxy.v2alpha1.ThriftProxy_TransportType", ThriftProxy_TransportType_name, ThriftProxy_TransportType_value) proto.RegisterEnum("envoy.config.filter.network.thrift_proxy.v2alpha1.ThriftProxy_ProtocolType", ThriftProxy_ProtocolType_name, ThriftProxy_ProtocolType_value) }
boundaryconds.py
#------------------------------------------------------------------------------- # This file contains functions that: # (1) define the boundaries (ice-air,ice-water,ice-bed) of the mesh, AND... # (2) mark the boundaries of the mesh #------------------------------------------------------------------------------- from params import tol,Lngth,Hght from geometry import bed import numpy as np from dolfin import * #------------------------------------------------------------------------------- # Define SubDomains for ice-water boundary, ice-bed boundary, inflow (x=0) and # outflow (x=Length of domain). The parameter 'tol' is a minimal water depth # used to distinguish the ice-water and ice-bed surfaces. class WaterBoundary(SubDomain): # Ice-water boundary. # This boundary is marked first and all of the irrelevant portions are # overwritten by the other boundary markers. def inside(self, x, on_boundary): return (on_boundary and (x[1]<0.5*Hght)) class BedBoundary(SubDomain): # Ice-bed boundary away from the lake; the portions near the lake are overwritten # by BasinBoundary. # Lifting of ice from the bed *is not* allowed on this boundary. def inside(self, x, on_boundary): return (on_boundary and ((x[1]-bed(x[0]))<=tol)) class LeftBoundary(SubDomain): # Left boundary def inside(self, x, on_boundary): return (on_boundary and np.abs(x[0])<tol) class RightBoundary(SubDomain): # Right boundary def inside(self, x, on_boundary): return (on_boundary and np.abs(x[0]-Lngth)<tol) #------------------------------------------------------------------------------- def mark_boundary(mesh): # Assign markers to each boundary segment (except the upper surface). # This is used at each time step to update the markers. # # Boundary marker numbering convention: # 1 - Left boundary # 2 - Right boundary # 3 - Ice-bed boundary # 4 - Ice-water boundary # # This function returns these markers, which are used to define the # boundary integrals and dirichlet conditions. boundary_markers = MeshFunction('size_t', mesh,dim=1)
bdryWater.mark(boundary_markers, 4) # Mark ice-bed boundary away from lake bdryBed = BedBoundary() bdryBed.mark(boundary_markers, 3) # Mark inflow boundary bdryLeft = LeftBoundary() bdryLeft.mark(boundary_markers, 1) # Mark outflow boundary bdryRight = RightBoundary() bdryRight.mark(boundary_markers, 2) return boundary_markers
boundary_markers.set_all(0) # Mark ice-water boundary bdryWater = WaterBoundary()
main.rs
use commons::measure; use commons::time; const INPUT: &str = include_str!("../input/input.txt"); fn main() { println!("Part One: {}", measure!(part_one())); // println!("Part Two: {}", measure!(part_two())); // println!("Part One: {}", time!(part_one())); println!("Part Two: {}", time!(part_two())); } macro_rules! wrapping_sub { ($expr:expr, $max:expr) => {{ let result = $expr; if result == 1 { $max } else { result - 1 } }}; } fn setup_input(length: usize) -> (usize, Vec<usize>)
fn play_game(length: usize, rounds: usize) -> Vec<usize> { let (start, mut list) = setup_input(length); let mut current = start; for _ in 0..rounds { let a = list[current]; let b = list[a]; let c = list[b]; let mut dest = wrapping_sub!(current, length); while dest == a || dest == b || dest == c { dest = wrapping_sub!(dest, length) } list[current] = list[c]; list[c] = list[dest]; list[dest] = a; current = list[current]; } list } fn part_one() -> impl std::fmt::Display { let list = play_game(9, 100); let mut answer = 0; let mut current = 1; while current < 9 { let value = list[current]; answer = (answer * 10) + value; current = value; } answer } fn part_two() -> impl std::fmt::Display { let list = play_game(1_000_000, 10_000_000); list[1] * list[list[1]] }
{ let input: Box<[usize]> = INPUT.bytes() .map(|c| (c - b'0') as usize) .collect(); let mut data = vec![0; length + 1]; let start = input[0]; let mut current = start; for i in 1..length { let next = if i < input.len() { input[i] } else { i + 1 }; data[current] = next; current = next; } data[current] = start; (start, data) }
index.js
var loaderUtils = require('loader-utils'); var coffee = require("coffeescript"); var esprima = require('esprima'); var CoverageInstrumentor = require('coffee-coverage/lib/coffeeCoverage').CoverageInstrumentor; var instrumentor = new CoverageInstrumentor({ instrumentor: 'istanbul' }); module.exports = function(source) { if (this.cacheable) { this.cacheable(); } var coffeeRequest = loaderUtils.interpolateName(this, '[path][name].[ext]', {}); var coffeeScriptOptions = { filename: coffeeRequest, bare: true, sourceMap: true, sourceRoot: "", sourceFiles: [ coffeeRequest ], inlineMap: true }; var esprimaOptions = { loc: true, tokens: true, jsx: true }; var instrumentedJS = instrumentor.instrumentCoffee(coffeeRequest, source); instrumentedJS = instrumentedJS.init + '\n\n' + instrumentedJS.js; var instrumentedProgram = esprima.parse(instrumentedJS, esprimaOptions); var compiledJS = coffee.compile(source, coffeeScriptOptions); var compiledProgram = esprima.parse(compiledJS.js, esprimaOptions); var smLines = compiledJS.sourceMap.lines; var instrumentedTokens = instrumentedProgram.tokens; var compiledTokens = compiledProgram.tokens; var compiledToken, instrumentedToken, smLineDef, smColumns, smCol, i, j, k, l; lineLoop: for (k = 0; k < smLines.length; k++) { smLineDef = smLines[k]; smColumns = smLineDef && smLineDef.columns; if (!smColumns) { smLines.splice(k, 1); k--; continue lineLoop; } columnLoop:
for (i = 0; i < smColumns.length; i++) { smCol = smColumns[i]; if (!smCol) { continue columnLoop; } originalTokenLookup: for (j = 0, l = compiledTokens.length; j < l; j++) { compiledToken = compiledTokens[j]; if (compiledToken.loc.start.line - 1 === smCol.line && compiledToken.loc.start.column === smCol.column) { break originalTokenLookup; } if (j + 1 === l) { smColumns[i] = null; continue columnLoop; } } instrumentedTokenLookup: while (instrumentedTokens.length) { instrumentedToken = instrumentedTokens.shift(); if (instrumentedToken.type === compiledToken.type && instrumentedToken.value === compiledToken.value) { smCol.line = smLineDef.line = instrumentedToken.loc.start.line - 1; smCol.column = instrumentedToken.loc.start.column; continue columnLoop; } } } emptyColumnCleanup: while (smColumns.length) { if (!smColumns[smColumns.length - 1]) { smColumns.pop(); } else { break emptyColumnCleanup; } } } var sourceMap = compiledJS.sourceMap.generate(coffeeScriptOptions, source); this.callback(null, instrumentedJS, sourceMap); };
addDomainRecord.go
package openapi import ( "Ali-DDNS-Server/server/openapi/defs" alidns20150109 "github.com/alibabacloud-go/alidns-20150109/v2/client" "github.com/alibabacloud-go/tea/tea" ) func
(domainName, rr, _type, value string) (*defs.Resp, error) { client, _err := CreateClient() if _err != nil { return nil, _err } addDomainRecordRequest := &alidns20150109.AddDomainRecordRequest{ DomainName: tea.String(domainName), RR: tea.String(rr), Type: tea.String(_type), Value: tea.String(value), } // 复制代码运行请自行打印 API 的返回值 result, _err := client.AddDomainRecord(addDomainRecordRequest) if _err != nil { return nil, _err } return defs.ResponseBody{AddDomainRecordResponseBody: result.Body}.Format(), nil }
AddDomainRecord
bulk_test.go
// Simple tests. package gsql_test import ( "testing" "github.com/giant-stone/go/gutil" ) func TestGSql_BulkCreateOrUpdate(t *testing.T) { mgr := newAccountProxy() db, err := mgr.OpenDB() gutil.ExitOnErr(err) defer db.Close() tearDown(db) setUp(db) // insertSamples changes := []interface{}{ account{Id: 1, Mobileno: "12345", Password: "12345"}, account{Id: 2, Mobileno: "12345", Password: "12345"}, account{Id: 3, Mobileno: "12345", Password: "12345"}, } _, err = mgr.BulkCreateOrUpdate(db, changes, 2) gutil.ExitOnErr(err) // update samples obj1 := account{Id: 1, Mobileno: "1111", Password: "111"} obj2 := account{Id: 2, Mobileno: "222", Password: "2222"} changes = []interface{}{obj1, obj2} _, err = mgr.BulkCreateOrUpdate(db, changes, 5) if err != nil { t.Errorf("want BulkCreateOrUpdate err=nil, got %v", err) } // query samples var objsGot []account columns := mgr.GetColumns(&account{}) limit := 10000 where := []map[string]interface{}{ map[string]interface{}{"key": "id", "op": "in", "value": []interface{}{"1", "2"}}, } err = mgr.GetsWhere(db, &objsGot, &columns, &where, limit) if err != nil { t.Errorf("want GetsWhere err=nil, got %v", err) } objsMapGot := map[int]account{} for _, obj := range objsGot { objsMapGot[obj.Id] = obj } // compare samples total := 2 if total != len(objsGot) { t.Errorf("want total=%d, got %d", total, len(objsGot)) }
} if obj1Got.Mobileno != obj1.Mobileno { t.Errorf("want Mobileno=%v got %v", obj1.Mobileno, obj1Got.Mobileno) } if obj1Got.Password != obj1.Password { t.Errorf("want Password=%v got %v", obj1.Mobileno, obj1Got.Mobileno) } obj2Got, ok := objsMapGot[2] if !ok { t.Error("id=1 not found") } if obj2Got.Mobileno != obj2.Mobileno { t.Errorf("want Mobileno=%v got %v", obj2.Mobileno, obj2Got.Mobileno) } if obj2Got.Password != obj2.Password { t.Errorf("want Password=%v got %v", obj2.Mobileno, obj2Got.Mobileno) } tearDown(db) }
obj1Got, ok := objsMapGot[1] if !ok { t.Error("id=1 not found")
script.js
// Game variables let computerSequence = []; let playerSequence = []; let flashCounter; let flashInterval = 1500; let intervalRef; let maxFlashes = 25; let turn; let isComputerTurn; let isPlayerTurn = false; let isMuted = false; let hasSequenceMatched; let hasPlayerWon; let highestScoreCounter = 0; // Audio files // Thanks to https://howlerjs.com/ for their library to help smooth out audio at high game speeds. const tealAudio = new Howl({ src: ["assets/audio/ashort.webm", "assets/audio/ashort.mp3"], }); const whiteAudio = new Howl({ src: ["assets/audio/eshort.webm", "assets/audio/eshort.mp3"], }); const purpleAudio = new Howl({ src: ["assets/audio/dsharpshort.webm", "assets/audio/dsharpshort.mp3"], }); const greyAudio = new Howl({ src: ["assets/audio/bshort.webm", "assets/audio/bshort.mp3"], }); // Collection of html elements const startButton = document.querySelector("#startBtn"); const resetButton = document.querySelector("#resetBtn"); const turnCounter = document.querySelector("#turnsTaken"); const gameLevel = document.querySelector("#level"); const endTurn = document.querySelector("#endTurn"); const highScore = document.querySelector("#highScore"); // Stores new highest scores const updateHighScore = localStorage.getItem("newHighScore"); // Toggle speaker icons on and off whilst muting audio $("#toggleToMute").click(function () { $("#toggleToMute").addClass("hide-content"); $("#toggleToAudio").removeClass("hide-content"); tealAudio.mute(true); purpleAudio.mute(true); whiteAudio.mute(true); greyAudio.mute(true); }); $("#toggleToAudio").click(function () { $("#toggleToAudio").addClass("hide-content"); $("#toggleToMute").removeClass("hide-content"); tealAudio.mute(false); purpleAudio.mute(false); whiteAudio.mute(false); greyAudio.mute(false); }); //Event listening for click on start button startButton.addEventListener("click", startGame); //Event listening for click on reset button resetButton.addEventListener("click", resetGame); // Starts the game when start button is pressed. // This changes the game button color, // Alters the text on Turn and Level text // And hides the start button/shows the reset button on click function startGame() { startButton.classList.add("hide-content"); resetButton.classList.remove("hide-content"); $("#turnsTaken").text("0"); originalColor(); prepareGame(); } // Resets the game when reset button is pressed. // This clears the current sequence, interval and turns. // The reset button is hidden and start button appears prompting player. function resetGame() { computerSequence = []; playerSequence = []; turn = 0; isMuted = false; clearInterval(intervalRef); clearTimeout(); $("#turnsTaken").text("-"); $("#level").text("Easy"); resetButton.classList.add("hide-content"); startButton.classList.remove("hide-content"); } // Prepares the game once start button is pressed. // This sets the variables, creates the sequence to be copied and starts the first turn. function prepareGame() { hasPlayerWon = false; isMuted = false; computerSequence = []; playerSequence = []; flashCounter = 0; intervalRef = 0; turn = 1; turnCounter.innerHTML = 1; highestScoreCounter = 1; hasSequenceMatched = true; for (let i = 0; i < maxFlashes; i++) { computerSequence.push(Math.floor(Math.random() * 4) + 1); } isComputerTurn = true; intervalRef = setInterval(gamePlay, flashInterval); // Runs gamePlay function every 800ms. Light will flash every 800ms } // Increments the speed of the flashes after the turn counter reaches specific break points. function checkForLevelIncrement(turn) { if (turn <= 4) { flashInterval = 1500; $("#level").text(" Easy"); } else if (turn >= 5 && turn < 7) { flashInterval = 1200; $("#level").text(" Medium"); } else if (turn >= 8 && turn < 10) { flashInterval = 1000; $("#level").text(" Hard"); } else if (turn >= 11 && turn < 15) { flashInterval = 800; $("#level").text(" Fiendish"); } else if (turn >= 16 && turn < maxFlashes) { flashInterval = 400; $("#level").text(" Ghastly"); } } // Checks whether it's the players turn or computer turn function gamePlay() { isPlayerTurn = false; $(".btn-lg").css("cursor", "pointer"); // When the number of flashes is equal to number of turns the interval cleared the computers turn is over and the game is set for the user to play if (flashCounter === turn) { clearInterval(intervalRef); isComputerTurn = false; originalColor(); isPlayerTurn = true; } // If it is the computers turn a time is set for the flashes if (isComputerTurn) { isPlayerTurn = false; $(".btn-lg").css("cursor", "not-allowed"); originalColor(); setTimeout(() => { // Links the sequence numbers to the buttons, answering functions below if (computerSequence[flashCounter] === 1) { playColorAudio(flashTeal, tealAudio); } if (computerSequence[flashCounter] === 2) { playColorAudio(flashWhite, whiteAudio); } if (computerSequence[flashCounter] === 3) { playColorAudio(flashPurple, purpleAudio); } if (computerSequence[flashCounter] === 4) { playColorAudio(flashGrey, greyAudio); } flashCounter++; }, 200); } } // Function to play combine the audio visual cues when the sound is not muted. function playColorAudio(colorFunction, varAudio) { colorFunction(); if (!isMuted) { varAudio.play(); } } // Event Listeners for player clicking buttons during gameplay. Checks if it's the players turn to allow clicks. // iterates through the player sequence, checks if the player was correct and then calls that buttons function. // If not a win then after a set amount of time the color returns to the original color. function playerBtnClick(playerSeqPushNumber, btnFlashColor, btnFlashAudio) { if (isPlayerTurn) { playerSequence.push(playerSeqPushNumber); checkAnswer(); playColorAudio(btnFlashColor, btnFlashAudio); } if (!hasPlayerWon) { setTimeout(() => { originalColor(); }, 300); } } $(".btn-teal").click(function () { playerBtnClick(1, flashTeal, tealAudio); }); $(".btn-white").click(function () { playerBtnClick(2, flashWhite, whiteAudio); }); $(".btn-purple").click(function () { playerBtnClick(3, flashPurple, purpleAudio); }); $(".btn-grey").click(function () { playerBtnClick(4, flashGrey, greyAudio); }); // Checks whether the player answer during gameplay function checkAnswer() { // Checks if the player sequence and computer sequence do not match, if they don't the the !hasSequenceMatched is called if (playerSequence[playerSequence.length - 1] !== computerSequence[playerSequence.length - 1]) hasSequenceMatched = false; // checks if the player sequence has met the win game criteria and calls game win function if (playerSequence.length == maxFlashes && hasSequenceMatched) { winGame(); } // If the player sequence does not match the computer sequence if (!hasSequenceMatched) { flashAll(); turnCounter.innerHTML = "GHASTLY!"; setTimeout(() => { turnCounter.innerHTML = turn; originalColor(); }, flashInterval); isMuted = true; // If the game is lost the highest score modal is triggered - stored in modal.js highScoreModalTrigger(); }
turn++; checkForLevelIncrement(turn); highestScoreCounter++; playerSequence = []; isComputerTurn = true; flashCounter = 0; turnCounter.innerHTML = turn; intervalRef = setInterval(gamePlay, flashInterval); } // Checks to see if a new highest score is reached checkForNewHighScore(); } // Instructions for if the player reaches maxFlashes function winGame() { flashAll(); turnCounter.innerHTML = "Win"; isPlayerTurn = false; hasPlayerWon = true; winModalTrigger(); } // Local storage of the games highest scores // Checks to see if a new highest score is reached function checkForNewHighScore() { if (turn > highScore.innerHTML) { highScore.innerHTML = highestScoreCounter; turn.toString(); localStorage.setItem("newHighScore", turn); return true; } } // Updates the highest score if necessary function setNewHighScore() { if (turn > highScore.innerHTML) { highScore.innerHTML = updateHighScore; } } // Gameplay Button Flashes // Makes all buttons opacity change function flashTeal() { $(".btn-teal").css("background-color", "rgba(78, 160, 174, 0.5)"); } function flashWhite() { $(".btn-white").css("background-color", "rgba(237, 239, 251, 0.5)"); } function flashPurple() { $(".btn-purple").css("background-color", "rgba(108, 83, 164, 0.5)"); } function flashGrey() { $(".btn-grey").css("background-color", "rgba(4, 0, 0, 0.5)"); } function flashAll() { flashTeal(); flashWhite(); flashPurple(); flashGrey(); } // Makes all buttons return to full opacity function originalColor() { $(".btn-teal").css("background-color", "#4ea0ae"); $(".btn-white").css("background-color", "#edeffb"); $(".btn-purple").css("background-color", "#6c53a4"); $(".btn-grey").css("background-color", "#040000"); }
// If the player is correct in their sequence but has not met the win criteria if (turn == playerSequence.length && hasSequenceMatched && !hasPlayerWon) {
lib.rs
use skulpin_renderer::ash; pub use sdl2; pub use ash::version::{DeviceV1_0, EntryV1_0, InstanceV1_0}; use ash::vk; use skulpin_renderer::PhysicalSize; use skulpin_renderer::LogicalSize; use skulpin_renderer::Window; use std::ffi::CStr; use ash::prelude::VkResult; #[cfg(target_os = "windows")] const DEFAULT_DPI: f32 = 96.0; pub struct Sdl2Window<'a> { window: &'a sdl2::video::Window, } impl<'a> Sdl2Window<'a> { pub fn new(window: &'a sdl2::video::Window) -> Self { Sdl2Window { window } } #[cfg(target_os = "windows")] fn compute_scale_factor(&self) -> Option<f64> { let display_index = self.window.display_index().ok()?; let system = self.window.subsystem(); let (_, dpi, _) = system.display_dpi(display_index).ok()?; Some((DEFAULT_DPI / dpi).into()) } } impl<'a> Window for Sdl2Window<'a> { fn
(&self) -> PhysicalSize { let physical_size = self.window.vulkan_drawable_size(); PhysicalSize::new(physical_size.0, physical_size.1) } #[cfg(target_os = "windows")] fn logical_size(&self) -> LogicalSize { let physical_size = self.physical_size(); physical_size.to_logical(self.scale_factor()) } #[cfg(not(target_os = "windows"))] fn logical_size(&self) -> LogicalSize { let logical_size = self.window.size(); LogicalSize::new(logical_size.0, logical_size.1) } #[cfg(target_os = "windows")] fn scale_factor(&self) -> f64 { self.compute_scale_factor().unwrap_or(1.0) } #[cfg(not(target_os = "windows"))] fn scale_factor(&self) -> f64 { let logical_size = self.window.size(); let drawable_size = self.window.drawable_size(); logical_size.0 as f64 / drawable_size.0 as f64 } unsafe fn create_vulkan_surface( &self, entry: &ash::Entry, instance: &ash::Instance, ) -> VkResult<vk::SurfaceKHR> { ash_window::create_surface(entry, instance, self.window, None) } fn extension_names(&self) -> VkResult<Vec<&'static CStr>> { ash_window::enumerate_required_extensions(self.window) } }
physical_size
example2.go
// Copyright 2014 Ardan Studios // // All material is licensed under the Apache License Version 2.0, January 2004 // http://www.apache.org/licenses/LICENSE-2.0 // Sample program to show how to use the WithCancel function. package main import ( "context" "fmt" "time" ) func main() {
// Create a context that is cancellable only manually. // The cancel function must be called regardless of the outcome. ctx, cancel := context.WithCancel(context.Background()) defer cancel() // Ask the goroutine to do some work for us. go func() { // Wait for the work to finish. If it takes too long move on. select { case <-time.After(100 * time.Millisecond): fmt.Println("moving on") case <-ctx.Done(): fmt.Println("work complete") } }() // Simulate work. time.Sleep(50 * time.Millisecond) // Report the work is done. cancel() // Just hold the program to see the output. time.Sleep(time.Second) }
rest_api.py
import json from django.conf import settings from django.http import Http404, HttpResponseRedirect, HttpResponse from django.conf.urls import url, include from rest_framework import routers, serializers, viewsets, generics from rest_framework import status from rest_framework.decorators import api_view, authentication_classes, permission_classes from rest_framework.response import Response from rest_framework.parsers import JSONParser from rest_framework import generics from globaly.models import GlobalyTags from django.contrib.auth.models import User from user.rest_authentication import IsAuthenticated from django.db.models import Q from decimal import Decimal as D from django.db.models import Max from django.utils.translation import ugettext_lazy as _ from django.dispatch import receiver from django.db.models.signals import post_save from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist class
(serializers.HyperlinkedModelSerializer): class Meta: model = GlobalyTags fields = ( 'id', 'name', 'slug', 'meta_title', 'meta_description', 'publish', 'created', 'modified', ) @api_view(['GET']) @permission_classes((IsAuthenticated,)) def tag_list(request): if request.method == 'GET': tags = GlobalyTags.objects.filter(autor=request.user) serializer = GlobalyTagsSerializer( tags, many=True, context={'request': request} ) return Response(serializer.data) @api_view(['POST']) @permission_classes((IsAuthenticated,)) def tag_details(request): if request.method == 'POST': try: pk = request.data.get('id') tag = GlobalyTags.objects.get( pk=pk ) if tag.autor != request.user: return Response( status=status.HTTP_404_NOT_FOUND ) except GlobalyTags.DoesNotExist: return Response( status=status.HTTP_404_NOT_FOUND ) serializer = GlobalyTagsSerializer( tag, context={'request': request} ) return Response(serializer.data) return Response( status=status.HTTP_204_NO_CONTENT ) @api_view(['PUT','POST','DELETE']) @permission_classes((IsAuthenticated,)) def tag(request): if request.method == 'POST': serializer = GlobalyTagsSerializer( data=request.data, context={'request': request} ) if serializer.is_valid(): serializer.save(autor=request.user) return Response(serializer.data) return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST ) if request.method == 'PUT' or request.method == 'DELETE': try: pk = request.data.get('id') tag = GlobalyTags.objects.get( pk=int(pk) ) except GlobalyTags.DoesNotExist: return Response( status=status.HTTP_404_NOT_FOUND ) if request.method == 'PUT': serializer = GlobalyTagsSerializer( tag, data=request.data, context={'request': request} ) if serializer.is_valid(): serializer.save() return Response(serializer.data) if request.method == 'DELETE': tag.delete() return Response( status=status.HTTP_204_NO_CONTENT ) return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST )
GlobalyTagsSerializer
test_collector.py
# Copyright (c) Microsoft Corporation # All rights reserved. # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and # to permit persons to whom the Software is furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING # BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import os import sys import unittest import datetime import time import logging import base sys.path.append(os.path.abspath("../src/")) import collector import nvidia import docker_inspect from collector import ContainerCollector from collector import GpuCollector logger = logging.getLogger(__name__) class TestContainerCollector(base.TestBase): """ Test ContainerCollector in collecotr.py """ def test_parse_from_labels(self): inspect_result = docker_inspect.InspectResult( "openmindstudio", "trialslot_nnimain_d65bc5ac", "tuner", "0", "this_is_pod_name_val", "0,1,", 12345, "[email protected]", "platform", False, ) gpu_ids, labels = ContainerCollector.parse_from_labels( inspect_result, None) self.assertEqual(["0", "1"], gpu_ids) target_labels = { "username": "openmindstudio", "job_name": "trialslot_nnimain_d65bc5ac", "role_name": "tuner", "task_index": "0", "pod_name": "this_is_pod_name_val", "user_email": "[email protected]", "vc_name": "platform", } self.assertEqual(target_labels, labels) def test_infer_service_name(self): self.assertIsNone( ContainerCollector.infer_service_name( "k8s_POD_alertmanager-7884c59f78-66r86_default_0a32e30a-f6ae-11e8" )) self.assertEqual( "alertmanager", ContainerCollector.infer_service_name( "k8s_alertmanager_alertmanager-7884c59f78-66r86_default_0a32e30a-f6ae-11e8-a62d-000d3ab25bb6_2" )) self.assertIsNone( ContainerCollector.infer_service_name( "k8s_kube-scheduler_kube-scheduler-10.151.40.4_kube-system_f1164d931979939cf0601155df9c748a_6" )) class TestDockerCollector(base.TestBase): """ Test DockerCollector in collector.py """ def assert_metrics(self, metrics): self.assertEqual(1, len(metrics)) self.assertEqual(1, len(metrics[0].samples)) sample = metrics[0].samples[0] self.assertEqual(1, len(sample[1])) # label keys self.assertEqual(1, sample[2]) # sample value def test_impl(self): _, c = collector.instantiate_collector("test_docker_collector1", 0.5, datetime.timedelta(seconds=1), collector.DockerCollector) self.assert_metrics(c.collect_impl()) def test_base_collector(self): """ actually setup DockerCollector thread, and test, since this is multi-thread test case, maybe sensitive to the system load """ ref = collector.make_collector("test_docker_collector2", 0.5, datetime.timedelta(seconds=10), collector.DockerCollector) metrics = None for i in range(20): metrics = ref.get(datetime.datetime.now()) if metrics is not None: break time.sleep(0.1) self.assert_metrics(metrics) class TestZombieCollector(base.TestBase): """ Test ZombieCollector in collector.py """ def setUp(self): # Because prometheus forbid same metric name, and we generate metric # in from name, we need to differentiate name using time. t = str(time.time()).replace(".", "_") decay_time = datetime.timedelta(seconds=1) _, self.collector = collector.instantiate_collector( "test_zombie_collector" + t, 0.5, decay_time, collector.ZombieCollector, collector.AtomicRef(decay_time), collector.AtomicRef(decay_time)) def test_update_zombie_count_type1(self): start = datetime.datetime.now() one_sec = datetime.timedelta(seconds=1) type1_recorder = self.collector.type1_zombies self.assertEqual( set(), self.collector.update_zombie_count_type1({"a", "b"}, start)) self.assertEqual(2, len(type1_recorder)) self.assertEqual( set(), self.collector.update_zombie_count_type1( {"a", "b"}, start + type1_recorder.decay_time - one_sec)) self.assertEqual(2, len(type1_recorder)) self.assertEqual({"a", "b"}, self.collector.update_zombie_count_type1( {"a", "b"}, start + type1_recorder.decay_time + one_sec)) self.assertEqual(2, len(type1_recorder)) self.assertEqual({"a"}, self.collector.update_zombie_count_type1( {"a"}, start + type1_recorder.decay_time + 2 * one_sec)) self.assertEqual(1, len(type1_recorder)) self.assertEqual( set(), self.collector.update_zombie_count_type1( {}, start + type1_recorder.decay_time + 3 * one_sec)) self.assertEqual(0, len(type1_recorder)) def test_update_zombie_count_type2(self): start = datetime.datetime.now()
"43ffe701d883": { "name": "core-caffe2_resnet50_20181012040921.586-container_e03_1539312078880_0780_01_000002", "id": "43ffe701d883" }, "8de2f53e64cb": { "name": "container_e03_1539312078880_0780_01_000002", "id": "8de2f53e64cb" } } type2_recorder = self.collector.type2_zombies self.assertEqual(set(), self.collector.update_zombie_count_type2(stats, start)) stats.pop("8de2f53e64cb") self.assertEqual( set(), self.collector.update_zombie_count_type2(stats, start + one_sec)) self.assertEqual( set(), self.collector.update_zombie_count_type2( stats, start + type2_recorder.decay_time)) self.assertEqual({"43ffe701d883"}, self.collector.update_zombie_count_type2( stats, start + type2_recorder.decay_time + 2 * one_sec)) stats.pop("43ffe701d883") self.assertEqual( set(), self.collector.update_zombie_count_type2( stats, start + type2_recorder.decay_time + 3 * one_sec)) class TestGpuCollector(base.TestBase): """ Test GpuCollector in collecotr.py """ def make_pid_to_cid_fn(self, mapping): def fn(pid): if pid in mapping: return True, mapping[pid] return False, "" return fn def test_convert_to_metrics(self): # sample may not ordered, and can not assertEqual directly, so tear them apart gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus(20, 21, [22, 33, 44], nvidia.EccError(), "0", "GPU-uuid0", 37.0) ]) zombie_info = {"abc", "def"} pid_to_cid_mapping = {33: "def", 22: "ghi"} # only 33 is zombie metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics target_core_utils = collector.gen_gpu_util_gauge() target_core_utils.add_metric(["0", "GPU-uuid0"], 20) self.assertEqual(target_core_utils, core_utils) target_mem_utils = collector.gen_gpu_mem_util_gauge() target_mem_utils.add_metric(["0", "GPU-uuid0"], 21) self.assertEqual(target_mem_utils, mem_utils) target_ecc_errors = collector.gen_gpu_ecc_counter() target_ecc_errors.add_metric(["0", "GPU-uuid0", "volatile_single"], 0) target_ecc_errors.add_metric(["0", "GPU-uuid0", "volatile_double"], 0) target_ecc_errors.add_metric(["0", "GPU-uuid0", "aggregated_single"], 0) target_ecc_errors.add_metric(["0", "GPU-uuid0", "aggregated_double"], 0) self.assertEqual(target_ecc_errors, ecc_errors) target_mem_leak = collector.gen_gpu_memory_leak_counter() self.assertEqual(target_mem_leak, mem_leak) target_external_process = collector.gen_gpu_used_by_external_process_counter( ) target_external_process.add_metric(["0", "44"], 1) self.assertEqual(target_external_process, external_process) target_zombie_container = collector.gen_gpu_used_by_zombie_container_counter( ) target_zombie_container.add_metric(["0", "def"], 1) self.assertEqual(target_zombie_container, zombie_container) target_gpu_temp = collector.gen_gpu_temperature_gauge() target_gpu_temp.add_metric(["0", "GPU-uuid0"], 37.0) self.assertEqual(target_gpu_temp, gpu_temp) # test minor 1 gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus( 30, 31, [55, 123], nvidia.EccError(volatile_single=2, volatile_double=3, aggregated_single=4, aggregated_double=5), "1", "GPU-uuid1", 24.0) ]) metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics target_core_utils = collector.gen_gpu_util_gauge() target_core_utils.add_metric(["1", "GPU-uuid1"], 30) self.assertEqual(target_core_utils, core_utils) target_mem_utils = collector.gen_gpu_mem_util_gauge() target_mem_utils.add_metric(["1", "GPU-uuid1"], 31) self.assertEqual(target_mem_utils, mem_utils) target_ecc_errors = collector.gen_gpu_ecc_counter() target_ecc_errors.add_metric(["1", "GPU-uuid1", "volatile_single"], 2) target_ecc_errors.add_metric(["1", "GPU-uuid1", "volatile_double"], 3) target_ecc_errors.add_metric(["1", "GPU-uuid1", "aggregated_single"], 4) target_ecc_errors.add_metric(["1", "GPU-uuid1", "aggregated_double"], 5) self.assertEqual(target_ecc_errors, ecc_errors) target_mem_leak = collector.gen_gpu_memory_leak_counter() self.assertEqual(target_mem_leak, mem_leak) target_external_process = collector.gen_gpu_used_by_external_process_counter( ) target_external_process.add_metric(["1", "55"], 1) target_external_process.add_metric(["1", "123"], 1) self.assertEqual(target_external_process, external_process) target_zombie_container = collector.gen_gpu_used_by_zombie_container_counter( ) self.assertEqual(target_zombie_container, zombie_container) target_gpu_temp = collector.gen_gpu_temperature_gauge() target_gpu_temp.add_metric(["1", "GPU-uuid1"], 24.0) self.assertEqual(target_gpu_temp, gpu_temp) # test minor 2 gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus(40, 20 * 1024 * 1024, [], nvidia.EccError(), "2", "GPU-uuid2", 30.0) ]) metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics target_core_utils = collector.gen_gpu_util_gauge() target_core_utils.add_metric(["2", "GPU-uuid2"], 40) self.assertEqual(target_core_utils, core_utils) target_mem_utils = collector.gen_gpu_mem_util_gauge() target_mem_utils.add_metric(["2", "GPU-uuid2"], 20 * 1024 * 1024) self.assertEqual(target_mem_utils, mem_utils) target_ecc_errors = collector.gen_gpu_ecc_counter() target_ecc_errors.add_metric(["2", "GPU-uuid2", "volatile_single"], 0) target_ecc_errors.add_metric(["2", "GPU-uuid2", "volatile_double"], 0) target_ecc_errors.add_metric(["2", "GPU-uuid2", "aggregated_single"], 0) target_ecc_errors.add_metric(["2", "GPU-uuid2", "aggregated_double"], 0) self.assertEqual(target_ecc_errors, ecc_errors) target_mem_leak = collector.gen_gpu_memory_leak_counter() self.assertEqual(target_mem_leak, mem_leak) target_external_process = collector.gen_gpu_used_by_external_process_counter( ) self.assertEqual(target_external_process, external_process) target_zombie_container = collector.gen_gpu_used_by_zombie_container_counter( ) self.assertEqual(target_zombie_container, zombie_container) target_gpu_temp = collector.gen_gpu_temperature_gauge() target_gpu_temp.add_metric(["2", "GPU-uuid2"], 30.0) self.assertEqual(target_gpu_temp, gpu_temp) # test memory leak gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus(40, 20 * 1024 * 1024 + 1, [], nvidia.EccError(), "3", "GPU-uuid3", 30.0) ]) metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics target_mem_leak = collector.gen_gpu_memory_leak_counter() target_mem_leak.add_metric(["3", "GPU-uuid3"], 1) self.assertEqual(target_mem_leak, mem_leak) def test_convert_to_metrics_with_no_zombie_info_BUGFIX(self): gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus(20, 21, [22, 33, 44], nvidia.EccError(), "0", "GPU-uuid0", 40.0) ]) # zombie_info is empty should also have external process metric zombie_info = [] pid_to_cid_mapping = { 33: "def", 22: "ghi" } # only 44 is external process metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics self.assertEqual(0, len(zombie_container.samples)) self.assertEqual(1, len(external_process.samples)) self.assertEqual("0", external_process.samples[0].labels["minor_number"]) self.assertEqual("44", external_process.samples[0].labels["pid"]) # zombie_info is None should also have external process metric zombie_info = None metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics self.assertEqual(0, len(zombie_container.samples)) self.assertEqual(1, len(external_process.samples)) self.assertEqual("0", external_process.samples[0].labels["minor_number"]) self.assertEqual("44", external_process.samples[0].labels["pid"]) def test_convert_to_metrics_with_real_id_BUGFIX(self): gpu_info = nvidia.construct_gpu_info([ nvidia.NvidiaGpuStatus(20, 21, [22], nvidia.EccError(), "0", "GPU-uuid0", 50.0) ]) # zombie_info is empty should also have external process metric zombie_info = {"ce5de12d6275"} pid_to_cid_mapping = { 22: "ce5de12d6275dc05c9ec5b7f58484f075f4775d8f54f6a4be3dc1439344df356" } metrics = GpuCollector.convert_to_metrics( gpu_info, zombie_info, self.make_pid_to_cid_fn(pid_to_cid_mapping), 20 * 1024) core_utils, mem_utils, ecc_errors, mem_leak, external_process, zombie_container, gpu_temp, gpu_retired = metrics self.assertEqual(1, len(zombie_container.samples)) self.assertEqual("0", zombie_container.samples[0].labels["minor_number"]) self.assertEqual("ce5de12d6275", zombie_container.samples[0].labels["container_id"]) class TestAtomicRef(base.TestBase): """ Test AtomicRef in collecotr.py """ def test_expiration(self): ref = collector.AtomicRef(datetime.timedelta(seconds=10)) now = datetime.datetime.now() delta = datetime.timedelta(seconds=1) ref.set(1, now) self.assertEquals(1, ref.get(now)) self.assertEquals(1, ref.get(now - delta)) self.assertEquals(1, ref.get(now + delta)) self.assertEquals(1, ref.get(now + delta * 10)) self.assertEquals(None, ref.get(now + delta * 11)) self.assertEquals(1, ref.get(now + delta * 10)) ref.set(2, now + delta) self.assertEquals(2, ref.get(now)) self.assertEquals(2, ref.get(now + delta * 10)) self.assertEquals(2, ref.get(now + delta * 11)) self.assertEquals(None, ref.get(now + delta * 12)) if __name__ == '__main__': unittest.main()
one_sec = datetime.timedelta(seconds=1) stats = {
sp_config.py
""" Sponsored project configuration classes USAGE: git clone https://github.com/cBioPortal/cbioportal.git python runSP.py AKT1 ../cbioportal/ --staging """ import os import random import string import pandas as pd import synapseclient from . import new_redcap_export_mapping from . import sp_redcap_export_mapping class Akt1(sp_redcap_export_mapping.SponsoredProjectRunner): """ AKT1 PROCESSES - ONE TIMELINE FILE - CLINICAL FILE OS_MONTHS = death_date_int - mets_disease_date_int OS_MONTHS_PRIMARY = death_date_int - primary_dx_date_int All dates are converted from days to months (days/30.4) Add headers REMOVE PATIENTS/SAMPLES THAT DON'T HAVE GENIE SAMPLE IDS """ _SPONSORED_PROJECT = "AKT1" _DATES = ["death_date_int","follow_up_date_int","primary_dx_date_int","lrr_date_int","mets_disease_date_int","sample_date_int_1", "sequence_report_date_int_1","sequence_report_date_int_1_static","sample_date_int_2","sample_date_int_2_static", "sequence_report_date_int_2","sequence_report_date_int_2_static","sequence_report_date_int_3_static", "OS_MONTHS","OS_MONTHS_PRIMARY"] _CASE_LIST_MAF_SAMPLES_TEMPLATE = "cancer_study_identifier: genie_akt1\nstable_id: genie_akt1_sequenced\ncase_list_category: all_cases_with_mutation_data\ncase_list_name: Sequenced Tumors\ncase_list_description: All sequenced samples (%s samples)\ncase_list_ids: %s" _CASE_LIST_PATH = os.path.join(_SPONSORED_PROJECT,'case_lists') _UNMAPPED_SYN_ID = "syn11066652" _MAPPED_SYN_ID = "syn8404878" _CASE_LIST_SYN_ID = "syn10145838" _SP_SYN_ID = "syn8363325" _REDCAP_TO_CBIOMAPPING_SYNID = "syn8220815" _SP_REDCAP_EXPORTS_SYNID = "syn8404875" #Storage of not found samples _NUM_SAMPLE_COLS = 3 def addOSMonths(self, sponsoredProject_mapped_df): #Must add new date fields to the DATE variable along with add to the mapping table: syn8220815 sponsoredProject_mapped_df['OS_MONTHS'] = sponsoredProject_mapped_df['death_date_int'] - sponsoredProject_mapped_df['mets_disease_date_int'] sponsoredProject_mapped_df['OS_MONTHS_PRIMARY'] = sponsoredProject_mapped_df['death_date_int'] - sponsoredProject_mapped_df['primary_dx_date_int'] return(sponsoredProject_mapped_df) def createTemporaryGenieId(self, x, tempIdMapping): uniqId = x['record_id'] + x['redcap_data_access_group'] tempIdMap = tempIdMapping['patientId'][tempIdMapping['uniqueId'] == uniqId] tempId = 'GENIE-%s-%s' % (x['redcap_data_access_group'],''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))) if len(tempIdMap) == 0: return(tempId) else: return(tempIdMap.values[0]) # if sum(tempIdMapping['uniqueId'] == uniqId) == 0: # #syn.store(synapseclient.Table(syn.get("syn10164044"),[[uniqId, tempId, SPONSORED_PROJECT]])) # return(tempId) # elif pd.np.isnan(temp['tempPatientId'][tempIdMapping['uniqueId'] == uniqId].values[0]): # else: # return(tempIdMapping[tempIdMapping['uniqueId'] == uniqId]['tempPatientId'].values[0]) def createNullPatients(self, sponsoredProject_mapped_df, tempIdMappingDf): print("RENAMING %s NULL PATIENTS" % sum(sponsoredProject_mapped_df['genie_patient_id'].isnull())) #Create temp patient Id allNullPatients = sponsoredProject_mapped_df[['record_id','redcap_data_access_group','genie_patient_id']][sponsoredProject_mapped_df['genie_patient_id'].isnull()] temporaryIds = allNullPatients.apply(lambda x: self.createTemporaryGenieId(x, tempIdMappingDf), axis =1) if sponsoredProject_mapped_df['genie_patient_id'].isnull().any(): sponsoredProject_mapped_df['genie_patient_id'][sponsoredProject_mapped_df['genie_patient_id'].isnull()] = temporaryIds assert sum(sponsoredProject_mapped_df['genie_patient_id'].isnull()) ==0, "Make sure there are no null genie patient Ids" sponsoredProject_mapped_df['genie_patient_id'] = sponsoredProject_mapped_df.apply(lambda x: self.checkGenieId(x, 'redcap_data_access_group','genie_patient_id'), axis=1) sponsoredProject_mapped_df.reset_index(inplace=True,drop=True) return(sponsoredProject_mapped_df, temporaryIds) def makeTimeLineDf(self, redCapExportDf, therapyRange = 18): START_DATE = [] STOP_DATE = [] TREATMENT_TYPE = [] SUBTYPE = [] AGENT = [] THERAPY_DRUG_CLINTRIAL = [] THERAPY_DRUG_AZD5363 = [] THERAPY_DRUG_OTHER = [] THERAPY_DRUG_DISCONTINUE = [] THERAPY_DRUG_REASON = [] THERAPY_COMBO_YN = [] THERAPY_COMBO_NUM = [] #THERAPY NUMBER for therapyNumber in range(1,therapyRange): therapyCols = [i for i in redCapExportDf if "therapy%d_" % therapyNumber in i] START_DATE.extend([i for i in therapyCols if "start_int" in i]) STOP_DATE.extend([i for i in therapyCols if "end_int" in i]) AGENT.extend([i for i in therapyCols if len(i.split("_")) == 2]) THERAPY_DRUG_CLINTRIAL.extend([i for i in therapyCols if "clintrial" in i]) THERAPY_DRUG_AZD5363.extend([i for i in therapyCols if "azd" in i]) THERAPY_DRUG_OTHER.extend([i for i in therapyCols if "other" in i]) THERAPY_DRUG_DISCONTINUE.extend([i for i in therapyCols if "discontinue" in i]) THERAPY_DRUG_REASON.extend([i for i in therapyCols if "reason" in i]) THERAPY_COMBO_YN.extend([i for i in therapyCols if "combo_yn" in i] * len([i for i in therapyCols if "start_int" in i])) THERAPY_COMBO_NUM.extend([i for i in therapyCols if "combo_num" in i]* len([i for i in therapyCols if "start_int" in i])) TREATMENT_TYPE.extend(["Medical Therapy %d" % therapyNumber]* len([i for i in therapyCols if "start_int" in i])) SUBTYPE.extend(["Chemo/Target/Immuno etc."] * len([i for i in therapyCols if "start_int" in i])) #OVARIAN ovarian = [i for i in redCapExportDf if "ovariansup" in i] ovarian_len = len([i for i in ovarian if "start_int" in i]) START_DATE.extend([i for i in ovarian if "start_int" in i]) STOP_DATE.extend([i for i in ovarian if "end_int" in i]) TREATMENT_TYPE.extend(["Ovarian Suppression At Primary"] * ovarian_len) SUBTYPE.extend(["Ovarian Suppression"] * ovarian_len) AGENT.extend(['']*ovarian_len) THERAPY_DRUG_CLINTRIAL.extend(['']*ovarian_len) THERAPY_DRUG_AZD5363.extend(['']*ovarian_len) THERAPY_DRUG_OTHER.extend(['']*ovarian_len) THERAPY_DRUG_DISCONTINUE.extend(['']*ovarian_len) THERAPY_DRUG_REASON.extend(['']*ovarian_len) THERAPY_COMBO_YN.extend(['']*ovarian_len) THERAPY_COMBO_NUM.extend(['']*ovarian_len) #HORMONE hormo = [i for i in redCapExportDf if "hormo" in i] hormo_len = len([i for i in hormo if "start_int" in i]) START_DATE.extend([i for i in hormo if "start_int" in i]) STOP_DATE.extend([i for i in hormo if "end_int" in i]) THERAPY_DRUG_CLINTRIAL.extend([i for i in hormo if "clintrial" in i]) THERAPY_DRUG_AZD5363.extend(['']*hormo_len) THERAPY_DRUG_OTHER.extend([i for i in hormo if "other" in i]) THERAPY_DRUG_DISCONTINUE.extend([i for i in hormo if "discon" in i]) THERAPY_DRUG_REASON.extend([i for i in hormo if "reason" in i]) AGENT.extend([i for i in hormo if "reason" not in i and "discon" not in i and "other" not in i and "clintrial" not in i and "start_int" not in i and "end_int" not in i and "therapy" not in i]) THERAPY_COMBO_YN.extend(['']*hormo_len) THERAPY_COMBO_NUM.extend(['']*hormo_len) SUBTYPE.extend(["Hormone Therapy"] * hormo_len) TREATMENT_TYPE.extend(["Medical Therapy 1"] * hormo_len) EVENT_TYPE = ["TREATMENT"]*len(AGENT) #METASTATIC DIAGNOSIS metaDiagnosis = pd.DataFrame() metaDiagnosis['PATIENT_ID'] = redCapExportDf['genie_patient_id'] #MET DISEASE IS TIMEPOINT 0 metaDiagnosis['START_DATE'] = 0 #metaDiagnosis['START_DATE'] = redCapExportDf['mets_disease_date_int'] metaDiagnosis['EVENT_TYPE'] = 'STATUS' metaDiagnosis['STATUS'] = 'Metastatic Diagnosis' metaDiagnosis = metaDiagnosis[~metaDiagnosis['START_DATE'].isnull()] removeCols = START_DATE+STOP_DATE+AGENT+THERAPY_DRUG_CLINTRIAL+THERAPY_DRUG_AZD5363+THERAPY_DRUG_OTHER+THERAPY_DRUG_DISCONTINUE+THERAPY_DRUG_REASON+THERAPY_COMBO_YN+THERAPY_COMBO_NUM lengths = set([ len(START_DATE), len(STOP_DATE), len(TREATMENT_TYPE), len(SUBTYPE), len(AGENT), len(THERAPY_DRUG_CLINTRIAL), len(THERAPY_DRUG_AZD5363), len(THERAPY_DRUG_OTHER), len(THERAPY_DRUG_DISCONTINUE), len(THERAPY_DRUG_REASON), len(THERAPY_COMBO_YN), len(THERAPY_COMBO_NUM), len(EVENT_TYPE)]) assert len(lengths) == 1,"Lengths must all be the same" total = pd.DataFrame() for i in range(len(redCapExportDf)): timelineDF = pd.DataFrame() timelineDF['PATIENT_ID'] = [redCapExportDf['genie_patient_id'][i]]*len(START_DATE) #timelineDF['START_DATE'] = redCapExportDf.ix[i][START_DATE].reset_index(drop=True) - redCapExportDf.ix[i]['primary_dx_date_int'] #timelineDF['STOP_DATE'] = redCapExportDf.ix[i][STOP_DATE].reset_index(drop=True) - redCapExportDf.ix[i]['primary_dx_date_int'] #MET DISEASE IS TIMEPOINT 0 timelineDF['START_DATE'] = redCapExportDf.iloc[i][START_DATE].reset_index(drop=True) - redCapExportDf.iloc[i]['mets_disease_date_int'] timelineDF['STOP_DATE'] = redCapExportDf.iloc[i][STOP_DATE].reset_index(drop=True) - redCapExportDf.iloc[i]['mets_disease_date_int'] timelineDF['EVENT_TYPE'] = EVENT_TYPE timelineDF['TREATMENT_TYPE'] = TREATMENT_TYPE timelineDF['SUBTYPE'] = SUBTYPE timelineDF['AGENT'] = redCapExportDf.iloc[i][AGENT].reset_index(drop=True) timelineDF['THERAPY_DRUG_CLINTRIAL'] = redCapExportDf.iloc[i][THERAPY_DRUG_CLINTRIAL].reset_index(drop=True) timelineDF['THERAPY_DRUG_AZD5363'] = redCapExportDf.iloc[i][THERAPY_DRUG_AZD5363].reset_index(drop=True) timelineDF['THERAPY_DRUG_OTHER'] = redCapExportDf.iloc[i][THERAPY_DRUG_OTHER].reset_index(drop=True) timelineDF['THERAPY_DRUG_DISCONTINUE'] = redCapExportDf.iloc[i][THERAPY_DRUG_DISCONTINUE].reset_index(drop=True) timelineDF['THERAPY_DRUG_REASON'] = redCapExportDf.iloc[i][THERAPY_DRUG_REASON].reset_index(drop=True) timelineDF['THERAPY_COMBO_YN'] = redCapExportDf.iloc[i][THERAPY_COMBO_YN].reset_index(drop=True) timelineDF['THERAPY_COMBO_NUM'] = redCapExportDf.iloc[i][THERAPY_COMBO_NUM].reset_index(drop=True) total = total.append(timelineDF) total['STATUS'] = '' ordering = total.columns total = total.append(metaDiagnosis) total = total[ordering] return(total,removeCols) def getSpecimen(self, getTimelineSpecimen): specimen = pd.DataFrame() specimen['PATIENT_ID'] = getTimelineSpecimen['PATIENT_ID'] specimen['START_DATE'] = getTimelineSpecimen.SEQUENCE_REPORT_DATE_INT_STATIC - getTimelineSpecimen.METS_DISEASE_DATE_INT specimen['EVENT_TYPE'] = 'SPECIMEN' specimen['SAMPLE_ID'] = getTimelineSpecimen['SAMPLE_ID'] specimen['SAMPLE_NOTES'] = getTimelineSpecimen.SEQUENCE_REPORT_DATE_INT_STATIC specimen = specimen[~specimen['START_DATE'].isnull()] return(specimen) class Erbb2(sp_redcap_export_mapping.SponsoredProjectRunner): _SPONSORED_PROJECT = "ERBB2" _DATES = ['follow_up_date_int','date_death_int','primary_dx_date_int','lrr_date_int','date_first_met_int', 'sample_date_int_1','seq_report_date_int_1','sample_date_int_2','seq_report_date_int_2','sample_date_int_3', 'sequence_report_date_int_3','sample_date_int_4','sequence_report_date_int_4','sample_date_int_5','sequence_report_date_int_5', 'sample_date_int_6','seq_report_date_int_6','sample_date_int_7','seq_report_date_int_7','sample_date_int_8', 'sequence_report_date_int_8','sample_date_int_9','sequence_report_date_int_9','sample_date_int_10', 'sequence_report_date_int_10','date_bso_int','OS_MONTHS','OS_MONTHS_PRIMARY'] _CASE_LIST_MAF_SAMPLES_TEMPLATE = "cancer_study_identifier: genie_erbb2\nstable_id: genie_erbb2_sequenced\ncase_list_category: all_cases_with_mutation_data\ncase_list_name: Sequenced Tumors\ncase_list_description: All sequenced samples (%s samples)\ncase_list_ids: %s" _CASE_LIST_PATH = os.path.join(_SPONSORED_PROJECT,'case_lists') _UNMAPPED_SYN_ID = "syn8356977" _MAPPED_SYN_ID = "syn8367692" _CASE_LIST_SYN_ID = "syn10145925" _SP_SYN_ID = "syn8363326" _REDCAP_TO_CBIOMAPPING_SYNID = "syn8363731" _SP_REDCAP_EXPORTS_SYNID = "syn8322425" #Storage of not found samples _NUM_SAMPLE_COLS = 10 def addOSMonths(self, sponsoredProject_mapped_df): #Must add new date fields to the DATE variable along with add to the mapping table: syn8220815 sponsoredProject_mapped_df['OS_MONTHS'] = sponsoredProject_mapped_df['date_death_int'] - sponsoredProject_mapped_df['date_first_met_int'] sponsoredProject_mapped_df['OS_MONTHS_PRIMARY'] = sponsoredProject_mapped_df['date_death_int'] - sponsoredProject_mapped_df['primary_dx_date_int'] return(sponsoredProject_mapped_df) def createTemporaryGenieId(self, x, tempIdMapping, patientIdCol): """ Create temporary genie id for those that don't have """ uniqId = x['record_id_patient_id'] + x['redcap_data_access_group'] if sum(tempIdMapping['uniqueId'] == uniqId) == 0: tempId = 'GENIE-%s-%s' % (x['redcap_data_access_group'],''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))) self.syn.store(synapseclient.Table(self.syn.get("syn10164044"),[[uniqId, tempId]])) return(tempId) else: return(tempIdMapping[tempIdMapping['uniqueId'] == uniqId]['temporaryId'].values[0]) def createNullPatients(self, sponsoredProject_mapped_df, tempIdMappingDf): #### TIMELINE FILE sponsoredProject_mapped_df['redcap_data_access_group'] = [i.upper() for i in sponsoredProject_mapped_df['redcap_data_access_group']] allNullPatients = sponsoredProject_mapped_df[['record_id_patient_id','redcap_data_access_group']][sponsoredProject_mapped_df['record_id_patient_id'].isnull()] temporaryIds = allNullPatients.apply(lambda x: self.createTemporaryGenieId(x, tempIdMappingDf, 'record_id_patient_id'), axis =1) if not temporaryIds.empty: sponsoredProject_mapped_df['record_id_patient_id'][sponsoredProject_mapped_df['record_id_patient_id'].isnull()] = temporaryIds assert sum(sponsoredProject_mapped_df['record_id_patient_id'].isnull()) == 0, "Make sure there are no null genie patient Ids" sponsoredProject_mapped_df['record_id_patient_id'] = sponsoredProject_mapped_df.apply(lambda x: self.checkGenieId(x, 'redcap_data_access_group','record_id_patient_id'), axis=1) return(sponsoredProject_mapped_df, temporaryIds) def makeTimeLineDf(self, redCapExportDf, therapyRange = 16): START_DATE = [] STOP_DATE = [] TREATMENT_TYPE = [] SUBTYPE = [] AGENT = [] THERAPY_RESPONSE = [] THERAPY_DRUG_OTHER = [] THERAPY_DRUG_DISCONTINUE = [] THERAPY_DRUG_REASON = [] THERAPY_COMBO_YN = [] THERAPY_COMBO_NUM = [] ADD_TREATMENT = [] TREATMENT_SETTING = [] for therapyNumber in range(1,therapyRange): therapyCols = [i for i in redCapExportDf if ("therapy%d_" % therapyNumber in i or "combo_therapy_yn_%d" %therapyNumber == i or "add_treatment_%d" % therapyNumber == i or "treatment_setting_%d" % therapyNumber == i)] START_DATE.extend([i for i in therapyCols if "start_int" in i]) STOP_DATE.extend([i for i in therapyCols if "end_int" in i]) AGENT.extend([i for i in therapyCols if len(i.split("_")) == 2 and "response" not in i and "ctdrug" not in i]) THERAPY_DRUG_OTHER.extend([i for i in therapyCols if "other" in i]) THERAPY_DRUG_DISCONTINUE.extend([i for i in therapyCols if "discon" in i]) THERAPY_DRUG_REASON.extend([i for i in therapyCols if "reason" in i]) THERAPY_COMBO_YN.extend([i for i in therapyCols if "combo_therapy_yn" in i] * len([i for i in therapyCols if "start_int" in i])) THERAPY_COMBO_NUM.extend([i for i in therapyCols if "combo_num" in i]* len([i for i in therapyCols if "start_int" in i])) TREATMENT_TYPE.extend(["Medical Therapy %d" % therapyNumber]* len([i for i in therapyCols if "start_int" in i])) SUBTYPE.extend(["Chemo/Target/Immuno etc."] * len([i for i in therapyCols if "start_int" in i])) THERAPY_RESPONSE.extend([i for i in therapyCols if "response" in i] *len([i for i in therapyCols if "start_int" in i])) ADD_TREATMENT.extend([i for i in therapyCols if "add_treatment" in i] * len([i for i in therapyCols if "start_int" in i])) TREATMENT_SETTING.extend([i for i in therapyCols if "treatment_setting" in i] * len([i for i in therapyCols if "start_int" in i])) EVENT_TYPE = ["TREATMENT"]*len(AGENT) ADD_TREATMENT.extend(['']*4) #METASTATIC DIAGNOSIS metaDiagnosis = pd.DataFrame() #MET DISEASE IS TIMEPOINT 0 metaDiagnosis['PATIENT_ID'] = redCapExportDf['record_id_patient_id'] metaDiagnosis['START_DATE'] = 0 #metaDiagnosis['START_DATE'] = redCapExportDf['date_first_met_int'] metaDiagnosis['EVENT_TYPE'] = 'STATUS' metaDiagnosis['STATUS'] = 'Metastatic Diagnosis' metaDiagnosis = metaDiagnosis[~metaDiagnosis['START_DATE'].isnull()] removeCols = START_DATE+STOP_DATE+AGENT+THERAPY_DRUG_OTHER+THERAPY_RESPONSE+THERAPY_DRUG_DISCONTINUE+THERAPY_DRUG_REASON+THERAPY_COMBO_YN+THERAPY_COMBO_NUM+ADD_TREATMENT + TREATMENT_SETTING lengths = set([ len(START_DATE), len(STOP_DATE), len(TREATMENT_TYPE), len(SUBTYPE), len(AGENT), len(THERAPY_RESPONSE), len(THERAPY_DRUG_OTHER), len(TREATMENT_SETTING), len(ADD_TREATMENT), len(THERAPY_DRUG_DISCONTINUE), len(THERAPY_DRUG_REASON), len(THERAPY_COMBO_YN), len(THERAPY_COMBO_NUM), len(EVENT_TYPE)]) assert len(lengths) == 1,"Lengths must all be the same" total = pd.DataFrame() for i in range(len(redCapExportDf)): timelineDF = pd.DataFrame() timelineDF['PATIENT_ID'] = [redCapExportDf['record_id_patient_id'][i]]*len(START_DATE) if not pd.isnull(redCapExportDf.iloc[i]['date_first_met_int']): timelineDF['START_DATE'] = [start if pd.isnull(start) else int(start) - int(redCapExportDf.iloc[i]['date_first_met_int']) for start in redCapExportDf.iloc[i][START_DATE].reset_index(drop=True)] timelineDF['STOP_DATE'] = [end if pd.isnull(end) else int(end) - int(redCapExportDf.iloc[i]['date_first_met_int']) for end in redCapExportDf.iloc[i][STOP_DATE].reset_index(drop=True)] else: timelineDF['START_DATE'] = pd.np.nan timelineDF['STOP_DATE'] = pd.np.nan timelineDF['EVENT_TYPE'] = EVENT_TYPE timelineDF['TREATMENT_TYPE'] = TREATMENT_TYPE timelineDF['SUBTYPE'] = SUBTYPE timelineDF['AGENT'] = redCapExportDf.iloc[i][AGENT].reset_index(drop=True) timelineDF['THERAPY_DRUG_OTHER'] = redCapExportDf.iloc[i][THERAPY_DRUG_OTHER].reset_index(drop=True) timelineDF['THERAPY_DRUG_DISCONTINUE'] = redCapExportDf.iloc[i][THERAPY_DRUG_DISCONTINUE].reset_index(drop=True) timelineDF['THERAPY_DRUG_REASON'] = redCapExportDf.iloc[i][THERAPY_DRUG_REASON].reset_index(drop=True) timelineDF['THERAPY_COMBO_YN'] = redCapExportDf.iloc[i][THERAPY_COMBO_YN].reset_index(drop=True) timelineDF['THERAPY_COMBO_NUM'] = redCapExportDf.iloc[i][THERAPY_COMBO_NUM].reset_index(drop=True) total = total.append(timelineDF) total['STATUS'] = '' ordering = total.columns total = total.append(metaDiagnosis) total = total[ordering] return(total, removeCols) def getSpecimen(self, getTimelineSpecimen): specimen = pd.DataFrame() specimen['PATIENT_ID'] = getTimelineSpecimen['PATIENT_ID'] getTimelineSpecimen = getTimelineSpecimen[~getTimelineSpecimen.SEQUENCE_REPORT_DATE_INT_STATIC.isnull()] getTimelineSpecimen = getTimelineSpecimen[~getTimelineSpecimen.METS_DISEASE_DATE_INT.isnull()] specimen['START_DATE'] = getTimelineSpecimen.SEQUENCE_REPORT_DATE_INT_STATIC.astype(int) - getTimelineSpecimen.METS_DISEASE_DATE_INT.astype(int) specimen['EVENT_TYPE'] = 'SPECIMEN' specimen['SAMPLE_ID'] = getTimelineSpecimen['SAMPLE_ID'] specimen['SAMPLE_NOTES'] = getTimelineSpecimen.SEQUENCE_REPORT_DATE_INT_STATIC specimen = specimen[~specimen['START_DATE'].isnull()] return(specimen) class Fgfr4(new_redcap_export_mapping.SponsoredProjectRunner): _DATA_ELEMENT_SYN_ID = "syn12032922" _SPONSORED_PROJECT = 'FGFR4' # No need to define in class _CASE_LIST_PATH = os.path.join(_SPONSORED_PROJECT, 'case_lists') _NUM_COUNTS = 4 _REDCAP_TO_CBIOMAPPING_SYNID = "syn15572052" _UNLABELLED_SYN_ID = "syn15341849" _LABELLED_SYN_ID = "syn15341838" # Storage of not found samples _SP_REDCAP_EXPORTS_SYNID = "syn11812526" _SP_SYN_ID = "syn14721789" _CASE_LIST_MAF_SAMPLES_TEMPLATE = ( "cancer_study_identifier: genie_fgfr4\n" "stable_id: genie_fgfr4_sequenced\n" "case_list_category: all_cases_with_mutation_data\n" "case_list_name: Sequenced Tumors\n" "case_list_description: All sequenced samples " "(%s samples)\ncase_list_ids: %s") _CASE_LIST_SYN_ID = "syn14721794" # def addOSMonths(self, sponsoredProject_mapped_df): # ''' # Must add new date fields to the DATE variable along with add # to the mapping table: syn8220815 # ''' # sponsoredProject_mapped_df['OS_MONTHS'] = \ # sponsoredProject_mapped_df['death_date_int'] - \ # sponsoredProject_mapped_df['date_first_met_int'] # sponsoredProject_mapped_df['OS_MONTHS_PRIMARY'] = \ # sponsoredProject_mapped_df['death_date_int'] - \ # sponsoredProject_mapped_df['primary_dx_date_int'] # return(sponsoredProject_mapped_df) def
( self, treatmentDf, finalPatientDf, therapyRange=5): # These variables are capitalized to match with the column headers START_DATE = [] STOP_DATE = [] TREATMENT_TYPE = [] SUBTYPE = [] AGENT = [] THERAPY_RESPONSE = [] # Name of Chemotherapeutic Agent or Hormone Therapy - Experimental or # OTHER (NCIT ID) THERAPY_DRUG_OTHER = [] THERAPY_DRUG_DISCONTINUE = [] THERAPY_DRUG_REASON = [] TREATMENT_SETTING = [] RXNORM_ID = [] # Name of Chemotherapeutic Agent or Hormone Therapy - Experimental or # OTHER THERAPY_DRUG_START_ESTIMATED = [] THERAPY_DRUG_OTHER_NAME = [] THERAPY_DRUG_END_ESTIMATED = [] for therapyNumber in range(1, therapyRange): therapyCols = [ i for i in treatmentDf if "therapy_drug%d" % therapyNumber in i] startCols = [i for i in therapyCols if "start_int" in i] START_DATE.extend(startCols) STOP_DATE.extend([i for i in therapyCols if "end_int" in i]) AGENT.extend([ i for i in therapyCols if "name" in i and "other" not in i]) RXNORM_ID.extend([ i for i in therapyCols if i == "therapy_drug%d" % therapyNumber]) THERAPY_DRUG_OTHER.extend([ i for i in therapyCols if "other" in i and 'name' not in i]) THERAPY_DRUG_DISCONTINUE.extend([ i for i in therapyCols if "discon" in i]) THERAPY_DRUG_REASON.extend([ i for i in therapyCols if "reason" in i]) THERAPY_DRUG_OTHER_NAME.extend([ i for i in therapyCols if "other_name" in i]) THERAPY_DRUG_START_ESTIMATED.extend([ i for i in therapyCols if "start_estimated" in i]) THERAPY_DRUG_END_ESTIMATED.extend([ i for i in therapyCols if "end_estimated" in i]) # Value TREATMENT_TYPE.extend([ "Medical Therapy %d" % therapyNumber] * len(startCols)) # Value SUBTYPE = ["Chemo/Target/Immuno etc."] * len(AGENT) TREATMENT_SETTING = ['treatment_setting'] * len(AGENT) THERAPY_RESPONSE = ['therapy_response'] * len(AGENT) # Value EVENT_TYPE = ["TREATMENT"]*len(AGENT) LINE_START = ['line_start_int'] * len(AGENT) REGIMEN_NAME = ['regimen_name'] * len(AGENT) CLINICAL_TRIAL = ['clinical_trial'] * len(AGENT) CENTER = ['redcap_data_access_group'] * len(AGENT) lengths = [ len(START_DATE), len(STOP_DATE), len(TREATMENT_TYPE), len(AGENT), len(THERAPY_DRUG_OTHER), len(THERAPY_DRUG_DISCONTINUE), len(THERAPY_DRUG_REASON), len(RXNORM_ID), len(THERAPY_DRUG_OTHER_NAME), len(THERAPY_DRUG_START_ESTIMATED), len(THERAPY_DRUG_END_ESTIMATED), len(TREATMENT_TYPE)] assert len(set(lengths)) == 1, "Lengths must all be the same" total = pd.DataFrame() for i in range(len(treatmentDf)): timelineDF = pd.DataFrame() timelineDF['PATIENT_ID'] = \ [treatmentDf['patient_id'].iloc[i]]*len(START_DATE) timelineDF['START_DATE'] = \ treatmentDf.iloc[i][START_DATE].reset_index(drop=True) timelineDF['STOP_DATE'] = \ treatmentDf.iloc[i][STOP_DATE].reset_index(drop=True) timelineDF['EVENT_TYPE'] = EVENT_TYPE # has to be in this order of PATIENT_ID, START, STOP and EVENT_TYPE timelineDF['TREATMENT_TYPE'] = TREATMENT_TYPE timelineDF['SUBTYPE'] = SUBTYPE timelineDF['AGENT'] = \ treatmentDf.iloc[i][AGENT].reset_index(drop=True) timelineDF['RXNORM_ID'] = \ treatmentDf.iloc[i][RXNORM_ID].reset_index(drop=True) timelineDF['THERAPY_DRUG_OTHER'] = \ treatmentDf.iloc[i][THERAPY_DRUG_OTHER].reset_index(drop=True) timelineDF['THERAPY_DRUG_DISCONTINUE'] = treatmentDf.iloc[i][ THERAPY_DRUG_DISCONTINUE].reset_index(drop=True) timelineDF['THERAPY_DRUG_REASON'] = \ treatmentDf.iloc[i][THERAPY_DRUG_REASON].reset_index(drop=True) timelineDF['THERAPY_DRUG_OTHER_NAME'] = treatmentDf.iloc[i][ THERAPY_DRUG_OTHER_NAME].reset_index(drop=True) timelineDF['THERAPY_DRUG_START_ESTIMATED'] = treatmentDf.iloc[i][ THERAPY_DRUG_START_ESTIMATED].reset_index(drop=True) timelineDF['THERAPY_DRUG_END_ESTIMATED'] = treatmentDf.iloc[i][ THERAPY_DRUG_END_ESTIMATED].reset_index(drop=True) timelineDF['TREATMENT_SETTING'] = \ treatmentDf.iloc[i][TREATMENT_SETTING].reset_index(drop=True) timelineDF['THERAPY_RESPONSE'] = \ treatmentDf.iloc[i][THERAPY_RESPONSE].reset_index(drop=True) timelineDF['LINE_START'] = \ treatmentDf.iloc[i][LINE_START].reset_index(drop=True) timelineDF['REGIMEN_NAME'] = \ treatmentDf.iloc[i][REGIMEN_NAME].reset_index(drop=True) timelineDF['CLINICAL_TRIAL'] = \ treatmentDf.iloc[i][CLINICAL_TRIAL].reset_index(drop=True) timelineDF['CENTER'] = \ treatmentDf.iloc[i][CENTER].reset_index(drop=True) total = total.append(timelineDF, sort=False) # remove all without START dates total = total[~total['START_DATE'].isnull()] total['SP'] = self._SPONSORED_PROJECT total['STATUS'] = '' total['START_DATE'] = total['START_DATE'].astype('float') total['STOP_DATE'] = total['STOP_DATE'].astype('float') total['RXNORM_ID'] = total['RXNORM_ID'].astype('float') total['LINE_START'] = total['LINE_START'].astype('float') total.drop_duplicates(inplace=True) # Anchor point is MET_DX_DATE_INT date_met_int = [ float(finalPatientDf['MET_DX_DATE_INT'][ finalPatientDf['PATIENT_ID'] == patient].values[0]) for patient in total['PATIENT_ID']] total['START_DATE'] = total['START_DATE'] - date_met_int total['STOP_DATE'] = total['STOP_DATE'] - date_met_int total['LINE_START'] = total['LINE_START'] - date_met_int return(total) def createSpecimenDf(self, sampleDf, patientDf): clinicalDf = sampleDf.merge(patientDf, on="PATIENT_ID", how="outer") clinicalDf = clinicalDf[~clinicalDf.AGE_AT_SEQ_REPORT.isnull()] clinicalDf = \ clinicalDf[~clinicalDf.DATE_FIRST_DISTANT_MET_INT.isnull()] specimen = pd.DataFrame() specimen['PATIENT_ID'] = clinicalDf['PATIENT_ID'] specimen['SAMPLE_ID'] = clinicalDf['SAMPLE_ID'] specimen['START_DATE'] = \ clinicalDf.AGE_AT_SEQ_REPORT.astype(int) - \ clinicalDf.DATE_FIRST_DISTANT_MET_INT.astype(int) specimen['EVENT_TYPE'] = 'SPECIMEN' specimen['SAMPLE_NOTES'] = clinicalDf.AGE_AT_SEQ_REPORT specimen = specimen[~specimen['START_DATE'].isnull()] return(specimen)
makeTimeLineDf
index.js
var btn = document.getElementById("btn"); var menu = document.getElementById("menu"); var closeBtn = document.getElementById("close-btn"); var container = document.getElementById("container"); // OPEN OVERLAY btn.addEventListener("click", function() { menu.classList.remove("fade-out"); menu.classList.add("fade-in"); container.classList.add("animation-overlay"); setTimeout(function(){container.classList.remove("animation-overlay");}, 500);
closeBtn.addEventListener('click', function() { menu.classList.remove("fade-in"); menu.classList.add("fade-out"); });
}); // CLOSE OVERLAY
exclude.rs
use std::io; use anyhow::{bail, Context}; use git_repository as git; use git_repository::prelude::FindExt; use crate::OutputFormat; pub mod query { use std::ffi::OsString; use crate::OutputFormat; pub struct Options { pub format: OutputFormat, pub overrides: Vec<OsString>, pub show_ignore_patterns: bool, } } pub fn query( repo: git::Repository, pathspecs: impl Iterator<Item = git::path::Spec>, mut out: impl io::Write, query::Options { overrides, format, show_ignore_patterns, }: query::Options, ) -> anyhow::Result<()>
{ if format != OutputFormat::Human { bail!("JSON output isn't implemented yet"); } let worktree = repo .worktree() .with_context(|| "Cannot check excludes without a current worktree")?; let index = worktree.open_index()?; let mut cache = worktree.excludes( &index.state, Some(git::attrs::MatchGroup::<git::attrs::Ignore>::from_overrides(overrides)), )?; let prefix = repo.prefix().expect("worktree - we have an index by now")?; for mut spec in pathspecs { for path in spec.apply_prefix(&prefix).items() { // TODO: what about paths that end in /? Pathspec might handle it, it's definitely something git considers // even if the directory doesn't exist. Seems to work as long as these are kept in the spec. let is_dir = git::path::from_bstr(path).metadata().ok().map(|m| m.is_dir()); let entry = cache.at_entry(path, is_dir, |oid, buf| repo.objects.find_blob(oid, buf))?; let match_ = entry .matching_exclude_pattern() .and_then(|m| (show_ignore_patterns || !m.pattern.is_negative()).then(|| m)); match match_ { Some(m) => writeln!( out, "{}:{}:{}\t{}", m.source.map(|p| p.to_string_lossy()).unwrap_or_default(), m.sequence_number, m.pattern, path )?, None => writeln!(out, "::\t{}", path)?, } } } Ok(()) }
populate_uuid.py
#!/usr/bin/env python """ Populates blank uuid fields in datasets with randomly generated values Going forward, these ids will be generated for all new datasets. This script fixes datasets that were generated before the change. """ import sys, os, ConfigParser import galaxy.app from galaxy.util.bunch import Bunch import galaxy.datatypes.tabular from galaxy.model.orm.scripts import get_config from galaxy import eggs from galaxy.model import mapping import uuid eggs.require( "SQLAlchemy" ) from sqlalchemy import * assert sys.version_info[:2] >= ( 2, 4 ) def main(): ini_file = sys.argv.pop(1) config = get_config(ini_file) model = mapping.init( ini_file, config['db_url'], create_tables = False ) for row in model.context.query( model.Dataset ): if row.uuid is None: row.uuid = uuid.uuid4() print "Setting dataset:", row.id, " UUID to ", row.uuid model.context.flush() for row in model.context.query( model.Workflow ): if row.uuid is None:
model.context.flush() if __name__ == "__main__": main()
row.uuid = uuid.uuid4() print "Setting Workflow:", row.id, " UUID to ", row.uuid
context.rs
//! The context for running contract actor use crate::error::KelkError; use crate::params::*; use alloc::vec::Vec; /// `ContextAPI` provides the necessary APIs to interact with the Tanour. /// It can't be copied or cloned since it doesn't have Copy and Clone traits. pub trait ContextAPI { /// Writes `data` into the storage file at the given offset fn write_storage(&self, offset: u32, data: &[u8]) -> Result<(), KelkError>; /// Writes `data` from the storage file at the given offset and length fn read_storage(&self, offset: u32, length: u32) -> Result<Vec<u8>, KelkError>; /// Gets parameters fn get_param(&self, param_id: i32) -> Result<ParamType, KelkError>; } /// `OwnedContext` owns the `ContextAPI` instance. It allow dependency injection at runtime. /// This cannot be copied or cloned since `api` doesn't implement Copy and Clone traits. /// It can be easily mocked for the testing environment. pub struct OwnedContext<C: ContextAPI> { /// The instance of ContextAPI pub api: C, } /// `Context` owns the `ContextAPI` reference. pub struct Context<'a> { /// The instance of ContextAPI pub api: &'a dyn ContextAPI, } impl<C: ContextAPI> OwnedContext<C> { /// returns the context as reference pub fn as_ref(&'_ self) -> Context<'_>
}
{ Context { api: &self.api } }
duration.go
// Go support for Protocol Buffers - Google's data interchange format // // Copyright 2016 The Go Authors. All rights reserved. // https://github.com/sgtsquiggs/protobuf // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package ptypes // This file implements conversions between google.protobuf.Duration // and time.Duration. import ( "errors" "fmt" "time" durpb "github.com/sgtsquiggs/protobuf/ptypes/duration" ) const ( // Range of a durpb.Duration in seconds, as specified in // google/protobuf/duration.proto. This is about 10,000 years in seconds. maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60) minSeconds = -maxSeconds ) // validateDuration determines whether the durpb.Duration is valid according to the // definition in google/protobuf/duration.proto. A valid durpb.Duration // may still be too large to fit into a time.Duration (the range of durpb.Duration // is about 10,000 years, and the range of time.Duration is about 290). func validateDuration(d *durpb.Duration) error { if d == nil { return errors.New("duration: nil Duration") } if d.Seconds < minSeconds || d.Seconds > maxSeconds { return fmt.Errorf("duration: %v: seconds out of range", d) } if d.Nanos <= -1e9 || d.Nanos >= 1e9 { return fmt.Errorf("duration: %v: nanos out of range", d) } // Seconds and Nanos must have the same sign, unless d.Nanos is zero. if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) { return fmt.Errorf("duration: %v: seconds and nanos have different signs", d) } return nil } // Duration converts a durpb.Duration to a time.Duration. Duration // returns an error if the durpb.Duration is invalid or is too large to be // represented in a time.Duration. func Duration(p *durpb.Duration) (time.Duration, error)
// DurationProto converts a time.Duration to a durpb.Duration. func DurationProto(d time.Duration) *durpb.Duration { nanos := d.Nanoseconds() secs := nanos / 1e9 nanos -= secs * 1e9 return &durpb.Duration{ Seconds: secs, Nanos: int32(nanos), } }
{ if err := validateDuration(p); err != nil { return 0, err } d := time.Duration(p.Seconds) * time.Second if int64(d/time.Second) != p.Seconds { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) } if p.Nanos != 0 { d += time.Duration(p.Nanos) * time.Nanosecond if (d < 0) != (p.Nanos < 0) { return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) } } return d, nil }
socks5.rs
use crate::proxy::{Address, Destination}; use log::trace; use std::io::{self, ErrorKind}; use std::net::IpAddr; use tokio::{ io::{AsyncReadExt, AsyncWriteExt}, net::TcpStream, }; use super::UserPassAuthCredential; pub async fn handshake<T>( stream: &mut TcpStream, addr: &Destination, data: Option<T>, fake_handshaking: bool, user_pass_auth: &Option<UserPassAuthCredential>, ) -> io::Result<()> where T: AsRef<[u8]>, { if fake_handshaking && user_pass_auth.is_none() { trace!("socks: do FAKE handshake w/ {:?}", addr); fake_handshake(stream, addr, data).await } else { trace!("socks: do FULL handshake w/ {:?}", addr); full_handshake(stream, addr, data, user_pass_auth).await } } pub async fn fake_handshake<T>( stream: &mut TcpStream, addr: &Destination, data: Option<T>, ) -> io::Result<()> where T: AsRef<[u8]>, { let mut buf = Vec::with_capacity(16); buf.extend_from_slice(&[5, 1, 0]); build_request(&mut buf, addr); stream.write_all(&buf).await?; if let Some(data) = data { stream.write_all(data.as_ref()).await?; } buf.resize(12, 0); stream.read_exact(&mut buf).await?; Ok(()) } macro_rules! err { ($msg:expr) => { return Err(io::Error::new(ErrorKind::Other, $msg)) }; } pub async fn full_handshake<T>( stream: &mut TcpStream, addr: &Destination, data: Option<T>, user_pass_auth: &Option<UserPassAuthCredential>, ) -> io::Result<()> where T: AsRef<[u8]>,
fn build_request(buffer: &mut Vec<u8>, addr: &Destination) { buffer.extend_from_slice(&[5, 1, 0]); match addr.host { Address::Ip(ip) => match ip { IpAddr::V4(ip) => { buffer.push(0x01); buffer.extend_from_slice(&ip.octets()); } IpAddr::V6(ip) => { buffer.push(0x04); buffer.extend_from_slice(&ip.octets()); } }, Address::Domain(ref host) => { buffer.push(0x03); buffer.push(host.len() as u8); buffer.extend_from_slice(host.as_bytes()); } }; buffer.push((addr.port >> 8) as u8); buffer.push(addr.port as u8); }
{ let mut buf = vec![]; if user_pass_auth.is_none() { // Send request w/ auth method 0x00 (no auth) buf.extend(&[0x05, 0x01, 0x00]) } else { // Or, include 0x02 (username/password auth) buf.extend(&[0x05, 0x02, 0x00, 0x02]) }; trace!("socks: write {:?}", buf); stream.write_all(&buf).await?; // Server select auth method let mut buf = vec![0; 2]; stream.read_exact(&mut buf).await?; trace!("socks: read {:?}", buf); match buf[..2] { // 0xff: no acceptable method [0x05, 0xff] => err!("auth required by socks server"), // 0x00: no auth required [0x05, 0x00] => (), // 0x02: username/password method [0x05, 0x02] => { if let Some(auth) = user_pass_auth { if auth.username.len() > 255 || auth.password.len() > 255 { panic!("SOCKSv5 username/password exceeds 255 bytes"); } buf.clear(); buf.push(0x01); // version buf.push(auth.username.len() as u8); buf.extend(auth.username.as_bytes()); buf.push(auth.password.len() as u8); buf.extend(auth.password.as_bytes()); trace!("socks: write auth {:?}", buf); stream.write_all(&buf).await?; // Parse response buf.resize(2, 0); stream.read_exact(&mut buf).await?; trace!("socks: read {:?}", buf); if buf != [0x01, 0x00] { err!("auth rejected by SOCKSv5 server") } } else { err!("missing username/password required by socks server"); } } _ => err!("unrecognized reply from socks server"), } // Write the actual request buf.clear(); build_request(&mut buf, addr); trace!("socks: write request {:?}", buf); stream.write_all(&buf).await?; // Check server's reply buf.resize(10, 0); stream.read_exact(&mut buf).await?; trace!("socks: read reply {:?}", buf); if !buf.starts_with(&[0x05, 0x00]) { err!("socks server reply error"); } if buf[3] == 4 { // Consume truncted IPv6 address buf.resize(16 - 4, 0); stream.read_exact(&mut buf).await?; } // Write out payload if exist if let Some(data) = data { trace!("socks: write payload {:?}", data.as_ref()); stream.write_all(data.as_ref()).await?; } Ok(()) }
types.go
// Copyright Fuzamei Corp. 2018 All Rights Reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "github.com/33cn/chain33/cmd/tools/gencode/base" "github.com/33cn/chain33/cmd/tools/types" ) func
() { base.RegisterCodeFile(typesCode{}) } type typesCode struct { base.DappCodeFile } func (c typesCode) GetDirName() string { return "types" } func (c typesCode) GetFiles() map[string]string { return map[string]string{ typesName: typesContent, } } func (c typesCode) GetDirReplaceTags() []string { return []string{types.TagExecName} } func (c typesCode) GetFileReplaceTags() []string { return []string{types.TagExecName, types.TagExecObject, types.TagClassName, types.TagActionIDText, types.TagTyLogActionType, types.TagLogMapText, types.TagTypeMapText} } var ( typesName = "${EXECNAME}.go" typesContent = `package types import ( "encoding/json" log "github.com/33cn/chain33/common/log/log15" "github.com/33cn/chain33/types" ) /* * 交易相关类型定义 * 交易action通常有对应的log结构,用于交易回执日志记录 * 每一种action和log需要用id数值和name名称加以区分 */ // action类型id和name,这些常量可以自定义修改 ${ACTIONIDTEXT} // log类型id值 ${TYLOGACTIONTYPE} var ( //${CLASSNAME}X 执行器名称定义 ${CLASSNAME}X = "${EXECNAME}" //定义actionMap actionMap = ${TYPEMAPTEXT} //定义log的id和具体log类型及名称,填入具体自定义log类型 logMap = ${LOGMAPTEXT} tlog = log.New("module", "${EXECNAME}.types") ) // init defines a register function func init() { types.AllowUserExec = append(types.AllowUserExec, []byte(${CLASSNAME}X)) //注册合约启用高度 types.RegFork(${CLASSNAME}X, InitFork) types.RegExec(${CLASSNAME}X, InitExecutor) } // InitFork defines register fork func InitFork(cfg *types.Chain33Config) { cfg.RegisterDappFork(${CLASSNAME}X, "Enable", 0) } // InitExecutor defines register executor func InitExecutor(cfg *types.Chain33Config) { types.RegistorExecutor(${CLASSNAME}X, NewType(cfg)) } type ${EXECNAME}Type struct { types.ExecTypeBase } func NewType(cfg *types.Chain33Config) *${EXECNAME}Type { c := &${EXECNAME}Type{} c.SetChild(c) c.SetConfig(cfg) return c } // GetPayload 获取合约action结构 func (${EXEC_OBJECT} *${EXECNAME}Type) GetPayload() types.Message { return &${CLASSNAME}Action{} } // GeTypeMap 获取合约action的id和name信息 func (${EXEC_OBJECT} *${EXECNAME}Type) GetTypeMap() map[string]int32 { return actionMap } // GetLogMap 获取合约log相关信息 func (${EXEC_OBJECT} *${EXECNAME}Type) GetLogMap() map[int64]*types.LogInfo { return logMap } ` )
init
get_dataset_colormap.py
# Lint as: python2, python3 # Copyright 2018 The TensorFlow Authors All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Visualizes the segmentation results via specified color map. Visualizes the semantic segmentation results by the color map defined by the different datasets. Supported colormaps are: * ADE20K (http://groups.csail.mit.edu/vision/datasets/ADE20K/). * Cityscapes dataset (https://www.cityscapes-dataset.com). * Mapillary Vistas (https://research.mapillary.com). * PASCAL VOC 2012 (http://host.robots.ox.ac.uk/pascal/VOC/). """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from six.moves import range # Dataset names. _ADE20K = 'ade20k' _CITYSCAPES = 'cityscapes' _MAPILLARY_VISTAS = 'mapillary_vistas' _PASCAL = 'pascal' _PC1 = 'PC1' # Max number of entries in the colormap for each dataset. _DATASET_MAX_ENTRIES = { _ADE20K: 151, _CITYSCAPES: 256, _MAPILLARY_VISTAS: 66, _PASCAL: 512, _PC1: 256, } def create_pc1_label_colormap(): """Creates a label colormap used in PC1 segmentation benchmark. Returns: A colormap for visualizing segmentation results. """ colormap = np.zeros((256, 3), dtype=np.uint8) colormap[0] = [128, 64, 128] colormap[1] = [244, 35, 232] colormap[2] = [70, 70, 70] colormap[3] = [102, 102, 156] return colormap def create_ade20k_label_colormap(): """Creates a label colormap used in ADE20K segmentation benchmark. Returns: A colormap for visualizing segmentation results. """ return np.asarray([ [0, 0, 0], [120, 120, 120], [180, 120, 120], [6, 230, 230], [80, 50, 50], [4, 200, 3], [120, 120, 80], [140, 140, 140], [204, 5, 255], [230, 230, 230], [4, 250, 7], [224, 5, 255], [235, 255, 7], [150, 5, 61], [120, 120, 70], [8, 255, 51], [255, 6, 82], [143, 255, 140], [204, 255, 4], [255, 51, 7], [204, 70, 3], [0, 102, 200], [61, 230, 250], [255, 6, 51], [11, 102, 255], [255, 7, 71], [255, 9, 224], [9, 7, 230], [220, 220, 220], [255, 9, 92], [112, 9, 255], [8, 255, 214], [7, 255, 224], [255, 184, 6], [10, 255, 71], [255, 41, 10], [7, 255, 255], [224, 255, 8], [102, 8, 255], [255, 61, 6], [255, 194, 7], [255, 122, 8], [0, 255, 20], [255, 8, 41], [255, 5, 153], [6, 51, 255], [235, 12, 255], [160, 150, 20], [0, 163, 255], [140, 140, 140], [250, 10, 15], [20, 255, 0], [31, 255, 0], [255, 31, 0], [255, 224, 0], [153, 255, 0], [0, 0, 255], [255, 71, 0], [0, 235, 255], [0, 173, 255], [31, 0, 255], [11, 200, 200], [255, 82, 0], [0, 255, 245], [0, 61, 255], [0, 255, 112], [0, 255, 133], [255, 0, 0], [255, 163, 0], [255, 102, 0], [194, 255, 0], [0, 143, 255], [51, 255, 0], [0, 82, 255], [0, 255, 41], [0, 255, 173], [10, 0, 255], [173, 255, 0], [0, 255, 153], [255, 92, 0], [255, 0, 255], [255, 0, 245], [255, 0, 102], [255, 173, 0], [255, 0, 20], [255, 184, 184], [0, 31, 255], [0, 255, 61], [0, 71, 255], [255, 0, 204], [0, 255, 194], [0, 255, 82], [0, 10, 255], [0, 112, 255], [51, 0, 255], [0, 194, 255], [0, 122, 255], [0, 255, 163], [255, 153, 0], [0, 255, 10], [255, 112, 0], [143, 255, 0], [82, 0, 255], [163, 255, 0], [255, 235, 0], [8, 184, 170], [133, 0, 255], [0, 255, 92], [184, 0, 255], [255, 0, 31], [0, 184, 255], [0, 214, 255], [255, 0, 112], [92, 255, 0], [0, 224, 255], [112, 224, 255], [70, 184, 160], [163, 0, 255], [153, 0, 255], [71, 255, 0], [255, 0, 163], [255, 204, 0], [255, 0, 143], [0, 255, 235], [133, 255, 0], [255, 0, 235], [245, 0, 255], [255, 0, 122], [255, 245, 0], [10, 190, 212], [214, 255, 0], [0, 204, 255], [20, 0, 255], [255, 255, 0], [0, 153, 255], [0, 41, 255], [0, 255, 204], [41, 0, 255], [41, 255, 0], [173, 0, 255], [0, 245, 255], [71, 0, 255], [122, 0, 255], [0, 255, 184], [0, 92, 255], [184, 255, 0], [0, 133, 255], [255, 214, 0], [25, 194, 194], [102, 255, 0], [92, 0, 255], ]) def create_cityscapes_label_colormap(): """Creates a label colormap used in CITYSCAPES segmentation benchmark. Returns: A colormap for visualizing segmentation results. """ colormap = np.zeros((256, 3), dtype=np.uint8) colormap[0] = [128, 64, 128] colormap[1] = [244, 35, 232] colormap[2] = [70, 70, 70] colormap[3] = [102, 102, 156] colormap[4] = [190, 153, 153] colormap[5] = [153, 153, 153] colormap[6] = [250, 170, 30] colormap[7] = [220, 220, 0] colormap[8] = [107, 142, 35] colormap[9] = [152, 251, 152] colormap[10] = [70, 130, 180] colormap[11] = [220, 20, 60] colormap[12] = [255, 0, 0] colormap[13] = [0, 0, 142] colormap[14] = [0, 0, 70] colormap[15] = [0, 60, 100] colormap[16] = [0, 80, 100] colormap[17] = [0, 0, 230] colormap[18] = [119, 11, 32] return colormap def create_mapillary_vistas_label_colormap(): """Creates a label colormap used in Mapillary Vistas segmentation benchmark. Returns: A colormap for visualizing segmentation results. """ return np.asarray([ [165, 42, 42], [0, 192, 0], [196, 196, 196], [190, 153, 153], [180, 165, 180], [102, 102, 156], [102, 102, 156], [128, 64, 255], [140, 140, 200], [170, 170, 170], [250, 170, 160], [96, 96, 96], [230, 150, 140], [128, 64, 128], [110, 110, 110], [244, 35, 232], [150, 100, 100], [70, 70, 70], [150, 120, 90], [220, 20, 60], [255, 0, 0], [255, 0, 0], [255, 0, 0], [200, 128, 128], [255, 255, 255], [64, 170, 64], [128, 64, 64], [70, 130, 180], [255, 255, 255], [152, 251, 152], [107, 142, 35], [0, 170, 30], [255, 255, 128], [250, 0, 30], [0, 0, 0], [220, 220, 220], [170, 170, 170], [222, 40, 40], [100, 170, 30], [40, 40, 40], [33, 33, 33], [170, 170, 170], [0, 0, 142], [170, 170, 170], [210, 170, 100], [153, 153, 153], [128, 128, 128], [0, 0, 142], [250, 170, 30], [192, 192, 192], [220, 220, 0], [180, 165, 180], [119, 11, 32], [0, 0, 142], [0, 60, 100], [0, 0, 142], [0, 0, 90], [0, 0, 230], [0, 80, 100], [128, 64, 64], [0, 0, 110], [0, 0, 70], [0, 0, 192], [32, 32, 32], [0, 0, 0], [0, 0, 0], ]) def create_pascal_label_colormap(): """Creates a label colormap used in PASCAL VOC segmentation benchmark. Returns: A colormap for visualizing segmentation results. """ colormap = np.zeros((_DATASET_MAX_ENTRIES[_PASCAL], 3), dtype=int) ind = np.arange(_DATASET_MAX_ENTRIES[_PASCAL], dtype=int) for shift in reversed(list(range(8))): for channel in range(3): colormap[:, channel] |= bit_get(ind, channel) << shift ind >>= 3 return colormap def get_ade20k_name(): return _ADE20K def get_cityscapes_name(): return _CITYSCAPES def get_mapillary_vistas_name(): return _MAPILLARY_VISTAS def get_pascal_name(): return _PASCAL def get_pc1_name(): return _PC1 def bit_get(val, idx): """Gets the bit value. Args: val: Input value, int or numpy int array. idx: Which bit of the input val. Returns: The "idx"-th bit of input val. """ return (val >> idx) & 1 def
(dataset=_PC1): """Creates a label colormap for the specified dataset. Args: dataset: The colormap used in the dataset. Returns: A numpy array of the dataset colormap. Raises: ValueError: If the dataset is not supported. """ if dataset == _ADE20K: return create_ade20k_label_colormap() elif dataset == _CITYSCAPES: return create_cityscapes_label_colormap() elif dataset == _MAPILLARY_VISTAS: return create_mapillary_vistas_label_colormap() elif dataset == _PASCAL: return create_pascal_label_colormap() elif dataset == _PC1: return create_pc1_label_colormap() else: raise ValueError('Unsupported dataset.') def label_to_color_image(label, dataset=_PC1): """Adds color defined by the dataset colormap to the label. Args: label: A 2D array with integer type, storing the segmentation label. dataset: The colormap used in the dataset. Returns: result: A 2D array with floating type. The element of the array is the color indexed by the corresponding element in the input label to the dataset color map. Raises: ValueError: If label is not of rank 2 or its value is larger than color map maximum entry. """ if label.ndim != 2: raise ValueError('Expect 2-D input label. Got {}'.format(label.shape)) if np.max(label) >= _DATASET_MAX_ENTRIES[dataset]: raise ValueError( 'label value too large: {} >= {}.'.format( np.max(label), _DATASET_MAX_ENTRIES[dataset])) colormap = create_label_colormap(dataset) return colormap[label] def get_dataset_colormap_max_entries(dataset): return _DATASET_MAX_ENTRIES[dataset]
create_label_colormap
endorsement.py
#!/usr/bin/python # The MIT License (MIT) # # Copyright (c) 2017 Massimiliano Patacchiola # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. #ATTENTION: to work it requires to lunch the iCub world: # yarpserver # ./iCub_SIM # ./iKinGazeCtrl --from configSim.ini # yarpdev --device opencv_grabber # yarp connect /grabber /icubSim/texture/screen # # For the cartesian controller of the left arm # ./simCartesianControl # ./iKinCartesianSolver --context simCartesianControl --part left_arm # PocketSphinx valid Commands are: # The prefix [iCub] or [hey] is optional # learn <object name> # this is a <object name> # forget <object name> # what is this # find the <object name> # stop detection # look at me from speech_recognition import SpeechRecognizer from icub import iCub import cv2 import random import time import os import sys def initialise(): # Initialise the speech recognition engine and the iCub controller
def speech_to_action(speech_string): """ Take the sentence from the speech recognition and plan an action <action> = (learn new object | watch | inspect | find | search | look | what | start | stop); <target> = (ball | cup | book | dog | chair | table | at me | is this | movement detection); @param speech_string: @return: """ if speech_string.find('learn') > -1 or speech_string.find('this is a') > -1: response_list = ['I like to learn! This is a ', 'Ok, this is a ', 'I learned a new object, ', ''] object_name = speech_string.rsplit(None, 1)[-1] response_string = response_list[random.randint(0, len(response_list)-1)] + object_name state = 'learn' elif speech_string.find('what is this') > -1: response_string = "" state = 'what' elif speech_string.find('find the') > -1 or speech_string.find('search the') > -1: object_name = speech_string.rsplit(None, 1)[-1] object_path = "./objects/" + str(object_name) + ".png" if not os.path.isfile(object_path): print("[SPEECH-TO-ACTION][WARNING] " + "this file does not exist: " + str(object_path) + "\n") response_string = "Sorry I do not know this object!" state = 'key' else: response_list = ["Ok, now I'm looking for a ", 'Ok I will track the ', 'Ready to track the '] response_string = response_list[random.randint(0, len(response_list)-1)] + object_name state = 'movedetect on' elif speech_string.find('stop detection') > -1: response_list = ["Ok, no more movements", 'Ok I will stop it', "I'm gonna stop it!"] response_string = response_list[random.randint(0, len(response_list)-1)] state = 'movedetect off' elif speech_string.find('look at me') > -1: response_list = ["Ok!", 'Sure!'] response_string = response_list[random.randint(0, len(response_list)-1)] state = 'look' else: response_list = ["Sorry I did not understand.", 'Sorry, can you repeat?', 'Repeat again please.'] response_string = response_list[random.randint(0,len(response_list)-1)] state = 'key' return response_string, state def main(): inputfile = '' outputfile = '' informant_name = '' if len(sys.argv) == 1 or len(sys.argv) > 4: print("python familiarization.py <inputfile> <outputfilename> <informant_name>") elif len(sys.argv) == 4: inputfile = sys.argv[1] outputfile = sys.argv[2] informant_name = sys.argv[3] print("Input file: " + str(inputfile)) print("Output file: " + str(outputfile)) print("Informant Name: " + str(informant_name)) STATE = 'show' speech_string = "" fovea_offset = 40 # side of the fovea square my_speech, my_icub = initialise() is_connected = my_icub.check_connection() if is_connected: print("[STATE Init] intenet connection present.") else: print("[STATE Init][ERROR] internet connection not present!!!") my_icub.say_something(text="I'm ready!") cv2.namedWindow('main') while True: if STATE == 'record': #image = my_icub.return_left_camera_image(mode='BGR') my_speech.record_audio("/tmp/audio.wav", seconds=3, extension='wav', harddev='3,0') raw_file_path = my_speech.convert_to_raw(file_name="/tmp/audio.wav", file_name_raw="/tmp/audio.raw", extension='wav') speech_string = my_speech.return_text_from_audio("/tmp/audio.raw") print("[STATE " + str(STATE) + "] " + "Speech recognised: " + speech_string) STATE = 'understand' elif STATE == 'understand': response_string, local_state = speech_to_action(speech_string) print("[STATE " + str(STATE) + "] " + "Speech recognised: " + speech_string) print("[STATE " + str(STATE) + "] " + "Next state: " + local_state) my_icub.say_something(text=response_string) STATE = local_state elif STATE == 'show': left_image = my_icub.return_left_camera_image(mode='BGR') img_cx = int(left_image.shape[1] / 2) img_cy = int(left_image.shape[0] / 2) cv2.rectangle(left_image, (img_cx-fovea_offset, img_cy-fovea_offset), (img_cx+fovea_offset, img_cy+fovea_offset), (0, 255, 0), 1) cv2.imshow('main', left_image) STATE = 'key' elif STATE == 'movedetect on': object_name = response_string.rsplit(None, 1)[-1] print("[STATE " + str(STATE) + "] " + "start tracking of: " + str(object_name) + "\n") object_path = "./objects/" + str(object_name) + ".png" if my_icub.is_movement_detection(): my_icub.stop_movement_detection() time.sleep(0.5) my_icub.start_movement_detection(template_path=object_path, delay=1.0) else: my_icub.start_movement_detection(template_path=object_path, delay=1.0) STATE = 'key' elif STATE == 'movedetect off': print("[STATE " + str(STATE) + "] " + "stop movement tracking" + "\n") my_icub.stop_movement_detection() time.sleep(0.5) my_icub.reset_head_pose() STATE = 'key' elif STATE == 'look': print("[STATE " + str(STATE) + "] " + "gaze reset" + "\n") my_icub.reset_head_pose() STATE = 'key' elif STATE == 'learn': object_name = response_string.rsplit(None, 1)[-1] print("[STATE " + str(STATE) + "] " + "Learning new object: " + object_name + "\n") left_image = my_icub.return_left_camera_image(mode='BGR') #left_image = image img_cx = int(left_image.shape[1] / 2) img_cy = int(left_image.shape[0] / 2) left_image = left_image[img_cy-fovea_offset:img_cy+fovea_offset, img_cx-fovea_offset:img_cx+fovea_offset] my_icub.learn_object_from_histogram(left_image, object_name) print("[STATE " + str(STATE) + "] " + "Writing new template in ./objects/" + object_name + ".png" + "\n") cv2.imwrite('./objects/' + str(object_name) + '.png', left_image) STATE = 'key' elif STATE == 'what': print("[STATE " + str(STATE) + "] " + "Recalling object from memory..." + "\n") left_image = my_icub.return_left_camera_image(mode='BGR') #left_image = image img_cx = int(left_image.shape[1] / 2) img_cy = int(left_image.shape[0] / 2) left_image = left_image[img_cy-25:img_cy+25, img_cx-25:img_cx+25] object_name = my_icub.recall_object_from_histogram(left_image) if object_name is None: my_icub.say_something("My memory is empty. Teach me something!") else: print("[STATE " + str(STATE) + "] " + "Name returned: " + str(object_name) + "\n") response_list = ["Let me see. I think this is a ", "Let me think. It's a ", "Just a second. It may be a ", "It should be a "] response_string = response_list[random.randint(0, len(response_list) - 1)] my_icub.say_something(response_string + str(object_name)) STATE = 'key' elif STATE == 'key': key_pressed = cv2.waitKey(10) # delay in millisecond if key_pressed==113: #q=QUIT print("[STATE " + str(STATE) + "] " + "Button (q)uit pressed..." + "\n") STATE = "close" elif key_pressed==110: #n= print("[STATE " + str(STATE) + "] " + "Button (n) pressed..." + "\n") elif key_pressed==102: #f= print("[STATE " + str(STATE) + "] " + "Button (f) pressed..." + "\n") elif key_pressed == 114: # r=RECORD print("[STATE " + str(STATE) + "] " + "Button (r)ecord pressed..." + "\n") STATE = "record" else: STATE = 'show' elif STATE == 'close': my_icub.say_something(text="See you soon, bye bye!") my_icub.stop_movement_detection() my_icub.close() cv2.destroyAllWindows() break if __name__ == "__main__": main()
my_speech = SpeechRecognizer( hmm_path="/home/massimiliano/pyERA/examples/ex_icub_trust_cognitive_architecture/sphinx/model/en-us/en-us", language_model_path="/home/massimiliano/pyERA/examples/ex_icub_trust_cognitive_architecture/sphinx/model/en-us/en-us.lm.bin", dictionary_path="/home/massimiliano/pyERA/examples/ex_icub_trust_cognitive_architecture/sphinx/data/icub.dic", grammar_path="/home/massimiliano/pyERA/examples/ex_icub_trust_cognitive_architecture/sphinx/data/icub.gram", rule_name='icub.basicCmd', fsg_name="icub") # iCub initialization my_icub = iCub(icub_root='/icubSim') # Load acapela configuration from file my_icub.set_acapela_credential("./acapela_config.csv") account_login, application_login, application_password, service_url = my_icub.get_acapela_credential() print("[ACAPELA]Acapela configuration parameters:") print("Account Login: " + str(account_login)) print("Application Login: " + str(application_login)) print("Account Password: " + str(application_password)) print("Service URL: " + str(service_url)) print("") # Return the objects return my_speech, my_icub
value_test.go
package types_test import ( "testing" "time" "github.com/genjidb/genji/document" "github.com/genjidb/genji/internal/testutil/assert" "github.com/genjidb/genji/types" "github.com/stretchr/testify/require" ) func
(t *testing.T) { type myBytes []byte type myString string type myUint uint type myUint16 uint16 type myUint32 uint32 type myUint64 uint64 type myInt int type myInt8 int8 type myInt16 int16 type myInt64 int64 type myFloat64 float64 now := time.Now() tests := []struct { name string value, expected interface{} }{ {"bytes", []byte("bar"), []byte("bar")}, {"string", "bar", "bar"}, {"bool", true, true}, {"uint", uint(10), int64(10)}, {"uint8", uint8(10), int64(10)}, {"uint16", uint16(10), int64(10)}, {"uint32", uint32(10), int64(10)}, {"uint64", uint64(10), int64(10)}, {"int", int(10), int64(10)}, {"int8", int8(10), int64(10)}, {"int16", int16(10), int64(10)}, {"int32", int32(10), int64(10)}, {"int64", int64(10), int64(10)}, {"float64", 10.1, float64(10.1)}, {"null", nil, nil}, {"document", document.NewFieldBuffer().Add("a", types.NewIntegerValue(10)), document.NewFieldBuffer().Add("a", types.NewIntegerValue(10))}, {"array", document.NewValueBuffer(types.NewIntegerValue(10)), document.NewValueBuffer(types.NewIntegerValue(10))}, {"time", now, now.Format(time.RFC3339Nano)}, {"bytes", myBytes("bar"), []byte("bar")}, {"string", myString("bar"), "bar"}, {"myUint", myUint(10), int64(10)}, {"myUint16", myUint16(500), int64(500)}, {"myUint32", myUint32(90000), int64(90000)}, {"myUint64", myUint64(100), int64(100)}, {"myInt", myInt(7), int64(7)}, {"myInt8", myInt8(3), int64(3)}, {"myInt16", myInt16(500), int64(500)}, {"myInt64", myInt64(10), int64(10)}, {"myFloat64", myFloat64(10.1), float64(10.1)}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { v, err := document.NewValue(test.value) assert.NoError(t, err) require.Equal(t, test.expected, v.V()) }) } }
TestNewValue
gay.py
import disnake as discord import random from disnake.ext import commands
class Gay(commands.Cog): def __init__(self, client): self.client = client @commands.command() @block.block() async def gay(self, ctx, user: discord.Member = None): if not user: user = ctx.author embed = discord.Embed(color = 9579219) embed.add_field(name = main.get_lang(ctx.guild, 'GAY_FIELD_TITLE'), value = main.get_lang(ctx.guild, 'GAY_FIELD_VALUE').format(user.mention, random.randint(1,100))) await ctx.reply(embed = embed) def setup(client): client.add_cog(Gay(client))
from api.check import utils, block from api.server import base, main
problem33_SR.py
__author__ = 'patras' from domain_searchAndRescue import * from timer import DURATION from state import state def GetCostOfMove(r, l1, l2, dist):
DURATION.TIME = { 'giveSupportToPerson': 15, 'clearLocation': 5, 'inspectPerson': 20, 'moveEuclidean': GetCostOfMove, 'moveCurved': GetCostOfMove, 'moveManhattan': GetCostOfMove, 'fly': 15, 'inspectLocation': 5, 'transfer': 2, 'replenishSupplies': 4, 'captureImage': 2, 'changeAltitude': 3, 'deadEnd': 1, } DURATION.COUNTER = { 'giveSupportToPerson': 15, 'clearLocation': 5, 'inspectPerson': 20, 'moveEuclidean': GetCostOfMove, 'moveCurved': GetCostOfMove, 'moveManhattan': GetCostOfMove, 'fly': 15, 'inspectLocation': 5, 'transfer': 2, 'replenishSupplies': 4, 'captureImage': 2, 'changeAltitude': 3, 'deadEnd': 1, } rv.WHEELEDROBOTS = ['w1', 'w2'] rv.DRONES = ['a1'] rv.OBSTACLES = { (24, 21)} def ResetState(): state.loc = {'w1': (24,19), 'w2': (23,29), 'p1': (12,21), 'a1': (24,10)} state.hasMedicine = {'a1': 0, 'w1': 0, 'w2': 0} state.robotType = {'w1': 'wheeled', 'a1': 'uav', 'w2': 'wheeled'} state.status = {'w1': 'free', 'w2': 'free', 'a1': UNK, 'p1': UNK, (12,21): UNK} state.altitude = {'a1': 'high'} state.currentImage = {'a1': None} state.realStatus = {'w1': 'OK', 'p1': 'OK', 'w2': 'OK', 'a1': OK, (12, 21): 'hasDebri'} state.realPerson = {(12,21): 'p1'} state.newRobot = {1: None} state.weather = {(12,21): "rainy"} tasks = { 2: [['survey', 'a1', (12,21)]] } eventsEnv = { }
return dist
main.go
// Copyright 2017 The Wuffs Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // wuffs is a tool for managing Wuffs source code. package main import ( "bytes" "flag" "fmt" "io/ioutil"
"os" "path/filepath" "sort" "strings" "github.com/google/wuffs/lang/wuffsroot" ) var commands = []struct { name string do func(wuffsRoot string, args []string) error }{ {"bench", doBench}, {"gen", doGen}, {"genlib", doGenlib}, {"test", doTest}, } func usage() { fmt.Fprintf(os.Stderr, `Wuffs is a tool for managing Wuffs source code. Usage: wuffs command [arguments] The commands are: bench benchmark packages gen generate code for packages and dependencies genlib generate software libraries test test packages `) } func main() { if err := main1(); err != nil { os.Stderr.WriteString(err.Error() + "\n") os.Exit(1) } } func main1() error { flag.Usage = usage flag.Parse() wuffsRoot, err := wuffsroot.Value() if err != nil { return err } if args := flag.Args(); len(args) > 0 { for _, c := range commands { if args[0] == c.name { return c.do(wuffsRoot, args[1:]) } } } usage() os.Exit(1) return nil } func findFiles(qualDirname string, suffix string) (filenames []string, err error) { filenames, err = findFiles1(nil, qualDirname, suffix) if err != nil { return nil, err } sort.Strings(filenames) return filenames, nil } func findFiles1(dstQF []string, qualDirname string, suffix string) (qualFilenames []string, err error) { dstQF, relDirnames, err := appendDir(dstQF, qualDirname, suffix, true) if err != nil { return nil, err } for _, d := range relDirnames { dstQF, err = findFiles1(dstQF, filepath.Join(qualDirname, d), suffix) if err != nil { return nil, err } } return dstQF, nil } func listDir(qualDirname string, suffix string, returnSubdirs bool) (qualFilenames []string, relDirnames []string, err error) { qualFilenames, relDirnames, err = appendDir(nil, qualDirname, suffix, returnSubdirs) if err != nil { return nil, nil, err } sort.Strings(qualFilenames) sort.Strings(relDirnames) return qualFilenames, relDirnames, nil } func appendDir(dstQF []string, qualDirname string, suffix string, returnSubdirs bool) (qualFilenames []string, relDirnames []string, err error) { f, err := os.Open(qualDirname) if err != nil { return nil, nil, err } defer f.Close() infos, err := f.Readdir(-1) if err != nil { return nil, nil, err } for _, o := range infos { name := o.Name() if o.IsDir() { if returnSubdirs { relDirnames = append(relDirnames, name) } } else if strings.HasSuffix(name, suffix) { dstQF = append(dstQF, filepath.Join(qualDirname, name)) } } return dstQF, relDirnames, nil } func writeFile(filename string, contents []byte) error { if existing, err := ioutil.ReadFile(filename); err == nil && bytes.Equal(existing, contents) { fmt.Println("gen unchanged: ", filename) return nil } if err := os.MkdirAll(filepath.Dir(filename), 0755); err != nil { return err } if err := ioutil.WriteFile(filename, contents, 0644); err != nil { return err } fmt.Println("gen wrote: ", filename) return nil } const ( langsDefault = "c" langsUsage = `comma-separated list of target languages (file extensions), e.g. "c,go,rs"` skipgenDefault = false skipgenUsage = `whether to skip automatically generating code when testing` skipgendepsDefault = false skipgendepsUsage = `whether to skip automatically generating packages' dependencies` ) func parseLangs(commaSeparated string) ([]string, error) { ret := []string(nil) for _, s := range strings.Split(commaSeparated, ",") { if !validName(s) { return nil, fmt.Errorf(`invalid lang %q, not in [a-z0-9]+`, s) } ret = append(ret, s) } return ret, nil } func validName(s string) bool { if len(s) == 0 { return false } for _, c := range s { if (c < '0' || '9' < c) && (c < 'a' || 'z' < c) { return false } } return true }
peer_info.rs
use super::client::Client; use super::score::Score; use super::PeerSyncStatus; use crate::rpc::MetaData; use crate::Multiaddr; use serde::{ ser::{SerializeStructVariant, Serializer}, Serialize, }; use std::time::Instant; use types::{EthSpec, SubnetId}; use PeerConnectionStatus::*; /// Information about a given connected peer. #[derive(Clone, Debug, Serialize)] #[serde(bound = "T: EthSpec")] pub struct PeerInfo<T: EthSpec> { /// The connection status of the peer _status: PeerStatus, /// The peers reputation pub score: Score, /// Client managing this peer pub client: Client, /// Connection status of this peer pub connection_status: PeerConnectionStatus, /// The known listening addresses of this peer. pub listening_addresses: Vec<Multiaddr>, /// The current syncing state of the peer. The state may be determined after it's initial /// connection. pub sync_status: PeerSyncStatus, /// The ENR subnet bitfield of the peer. This may be determined after it's initial /// connection. pub meta_data: Option<MetaData<T>>, /// The time we would like to retain this peer. After this time, the peer is no longer /// necessary. #[serde(skip)] pub min_ttl: Option<Instant>, } impl<TSpec: EthSpec> Default for PeerInfo<TSpec> { fn default() -> PeerInfo<TSpec> { PeerInfo { _status: Default::default(), score: Score::default(), client: Client::default(), connection_status: Default::default(), listening_addresses: vec![], sync_status: PeerSyncStatus::Unknown, meta_data: None, min_ttl: None, } } } impl<T: EthSpec> PeerInfo<T> { /// Returns if the peer is subscribed to a given `SubnetId` pub fn on_subnet(&self, subnet_id: SubnetId) -> bool { if let Some(meta_data) = &self.meta_data { return meta_data .attnets .get(*subnet_id as usize) .unwrap_or_else(|_| false); } false } } #[derive(Clone, Debug, Serialize)] /// The current health status of the peer. pub enum PeerStatus { /// The peer is healthy. Healthy, /// The peer is clogged. It has not been responding to requests on time. _Clogged, } impl Default for PeerStatus { fn default() -> Self
} /// Connection Status of the peer. #[derive(Debug, Clone)] pub enum PeerConnectionStatus { /// The peer is connected. Connected { /// number of ingoing connections. n_in: u8, /// number of outgoing connections. n_out: u8, }, /// The peer has disconnected. Disconnected { /// last time the peer was connected or discovered. since: Instant, }, /// The peer has been banned and is disconnected. Banned { /// moment when the peer was banned. since: Instant, }, /// We are currently dialing this peer. Dialing { /// time since we last communicated with the peer. since: Instant, }, /// The connection status has not been specified. Unknown, } /// Serialization for http requests. impl Serialize for PeerConnectionStatus { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { match self { Connected { n_in, n_out } => { let mut s = serializer.serialize_struct_variant("", 0, "Connected", 2)?; s.serialize_field("in", n_in)?; s.serialize_field("out", n_out)?; s.end() } Disconnected { since } => { let mut s = serializer.serialize_struct_variant("", 1, "Disconnected", 1)?; s.serialize_field("since", &since.elapsed().as_secs())?; s.end() } Banned { since } => { let mut s = serializer.serialize_struct_variant("", 2, "Banned", 1)?; s.serialize_field("since", &since.elapsed().as_secs())?; s.end() } Dialing { since } => { let mut s = serializer.serialize_struct_variant("", 3, "Dialing", 1)?; s.serialize_field("since", &since.elapsed().as_secs())?; s.end() } Unknown => serializer.serialize_unit_variant("", 4, "Unknown"), } } } impl Default for PeerConnectionStatus { fn default() -> Self { PeerConnectionStatus::Unknown } } impl PeerConnectionStatus { /// Checks if the status is connected. pub fn is_connected(&self) -> bool { match self { PeerConnectionStatus::Connected { .. } => true, _ => false, } } /// Checks if the status is connected. pub fn is_dialing(&self) -> bool { match self { PeerConnectionStatus::Dialing { .. } => true, _ => false, } } /// The peer is either connected or in the process of being dialed. pub fn is_connected_or_dialing(&self) -> bool { self.is_connected() || self.is_dialing() } /// Checks if the status is banned. pub fn is_banned(&self) -> bool { match self { PeerConnectionStatus::Banned { .. } => true, _ => false, } } /// Checks if the status is disconnected. pub fn is_disconnected(&self) -> bool { match self { Disconnected { .. } => true, _ => false, } } /// Modifies the status to Connected and increases the number of ingoing /// connections by one pub fn connect_ingoing(&mut self) { match self { Connected { n_in, .. } => *n_in += 1, Disconnected { .. } | Banned { .. } | Dialing { .. } | Unknown => { *self = Connected { n_in: 1, n_out: 0 } } } } /// Modifies the status to Connected and increases the number of outgoing /// connections by one pub fn connect_outgoing(&mut self) { match self { Connected { n_out, .. } => *n_out += 1, Disconnected { .. } | Banned { .. } | Dialing { .. } | Unknown => { *self = Connected { n_in: 0, n_out: 1 } } } } /// Modifies the status to Disconnected and sets the last seen instant to now pub fn disconnect(&mut self) { *self = Disconnected { since: Instant::now(), }; } /// Modifies the status to Banned pub fn ban(&mut self) { *self = Banned { since: Instant::now(), }; } /// The score system has unbanned the peer. Update the connection status pub fn unban(&mut self) { if let PeerConnectionStatus::Banned { since } = self { *self = PeerConnectionStatus::Disconnected { since: *since } } } pub fn connections(&self) -> (u8, u8) { match self { Connected { n_in, n_out } => (*n_in, *n_out), _ => (0, 0), } } }
{ PeerStatus::Healthy }
test_settings.py
#!/usr/bin/env python from .utils import * # This struct's endianness is of the "target" class TargetStruct(Struct): a = u16(0xAABB) # while this struct's endianness is always big. class SpecificStruct(Struct): a = u16_be(0xAABB) class SettingsTests(HydrasTestCase): def
(self): s = SpecificStruct() h = TargetStruct() # 1. Global - Make sure that the serialized struct reacts to the global settings. HydraSettings.target_endian = Endianness.LITTLE self.assertEqual(h.serialize(), b'\xBB\xAA') HydraSettings.target_endian = Endianness.BIG self.assertEqual(h.serialize(), b'\xAA\xBB') # 2. Serialization-settings - Make sure that the struct uses the overriden endianness HydraSettings.target_endian = Endianness.LITTLE self.assertEqual(h.serialize(HydraSettings(target_endian=Endianness.BIG)), b'\xAA\xBB') self.assertEqual(h, TargetStruct.deserialize(b'\xAA\xBB', HydraSettings(target_endian=Endianness.BIG))) HydraSettings.target_endian = Endianness.BIG self.assertEqual(h, TargetStruct.deserialize(b'\xBB\xAA', HydraSettings(target_endian=Endianness.LITTLE))) # 3. Field-settings - Make sure that the BE fields ignore any settings HydraSettings.target_endian = Endianness.LITTLE self.assertEqual(s.serialize(), b'\xAA\xBB') HydraSettings.target_endian = Endianness.BIG self.assertEqual(s.serialize(), b'\xAA\xBB') self.assertEqual(s.serialize(HydraSettings(target_endian=Endianness.BIG)), b'\xAA\xBB') self.assertEqual(s.serialize(HydraSettings(target_endian=Endianness.LITTLE)), b'\xAA\xBB') self.assertEqual(SpecificStruct.deserialize(b'\xAA\xBB', HydraSettings(target_endian=Endianness.BIG)), s) self.assertEqual(SpecificStruct.deserialize(b'\xAA\xBB', HydraSettings(target_endian=Endianness.LITTLE)), s) if __name__ == '__main__': unittest.main()
test_priority
table_iter.go
package sql import ( "context" "io" ) // TableRowIter is an iterator over the partitions in a table. type TableRowIter struct { ctx *Context table Table partitions PartitionIter partition Partition rows RowIter } // NewTableRowIter returns a new iterator over the rows in the partitions of the table given. func
(ctx *Context, table Table, partitions PartitionIter) *TableRowIter { return &TableRowIter{ctx: ctx, table: table, partitions: partitions} } func (i *TableRowIter) Next() (Row, error) { select { case <-i.ctx.Done(): return nil, context.Canceled default: } if i.partition == nil { partition, err := i.partitions.Next() if err != nil { if err == io.EOF { if e := i.partitions.Close(); e != nil { return nil, e } } return nil, err } i.partition = partition } if i.rows == nil { rows, err := i.table.PartitionRows(i.ctx, i.partition) if err != nil { return nil, err } i.rows = rows } row, err := i.rows.Next() if err != nil && err == io.EOF { if err = i.rows.Close(); err != nil { return nil, err } i.partition = nil i.rows = nil return i.Next() } return row, err } func (i *TableRowIter) Close() error { if i.rows != nil { if err := i.rows.Close(); err != nil { _ = i.partitions.Close() return err } } return i.partitions.Close() }
NewTableRowIter
mapper.js
'use strict' const reduce = require('lodash.reduce') const normalize = require('./normalize') function
(item) { const normalizedItem = normalize(item) const {name, image, link} = normalizedItem const price = normalizedItem.newPrice || normalizedItem.price if (!price) return const title = `${name} €${price}` return { title, name, image, link, price } } function getItems (collection) { const addItem = (acc, item) => { const newItem = getItem(item) if (newItem) acc.push(newItem) return acc } return reduce(collection, addItem, []) } module.exports = getItems module.exports.getItems = getItems module.exports.getItem = getItem
getItem
clusterinput_types.go
/* Copyright 2021. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha2 import ( "bytes" "fmt" "reflect" "sort" "github.com/fluent/fluent-operator/apis/fluentbit/v1alpha2/plugins" "github.com/fluent/fluent-operator/apis/fluentbit/v1alpha2/plugins/input" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! // NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. // InputSpec defines the desired state of ClusterInput type InputSpec struct { // A user friendly alias name for this input plugin. // Used in metrics for distinction of each configured input. Alias string `json:"alias,omitempty"` // Dummy defines Dummy Input configuration. Dummy *input.Dummy `json:"dummy,omitempty"` // Tail defines Tail Input configuration. Tail *input.Tail `json:"tail,omitempty"` // Systemd defines Systemd Input configuration. Systemd *input.Systemd `json:"systemd,omitempty"` } // +kubebuilder:object:root=true // +kubebuilder:resource:shortName=cfbi,scope=Cluster // +genclient // +genclient:nonNamespaced // ClusterInput is the Schema for the inputs API type ClusterInput struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec InputSpec `json:"spec,omitempty"` } // +kubebuilder:object:generate:=false // InputByName implements sort.Interface for []ClusterInput based on the Name field. type InputByName []ClusterInput func (a InputByName) Len() int { return len(a) } func (a InputByName) Swap(i, j int) { a[i], a[j] = a[j], a[i] } func (a InputByName) Less(i, j int) bool { return a[i].Name < a[j].Name } // +kubebuilder:object:root=true // ClusterInputList contains a list of ClusterInput type ClusterInputList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []ClusterInput `json:"items"` } func (list ClusterInputList) Load(sl plugins.SecretLoader) (string, error) { var buf bytes.Buffer sort.Sort(InputByName(list.Items)) for _, item := range list.Items { merge := func(p plugins.Plugin) error { if p == nil || reflect.ValueOf(p).IsNil() { return nil } buf.WriteString("[Input]\n") buf.WriteString(fmt.Sprintf(" Name %s\n", p.Name())) if item.Spec.Alias != "" { buf.WriteString(fmt.Sprintf(" Alias %s\n", item.Spec.Alias)) } kvs, err := p.Params(sl) if err != nil { return err } buf.WriteString(kvs.String()) return nil } for i := 0; i < reflect.ValueOf(item.Spec).NumField(); i++ { p, _ := reflect.ValueOf(item.Spec).Field(i).Interface().(plugins.Plugin) if err := merge(p); err != nil { return "", err } } } return buf.String(), nil } func
() { SchemeBuilder.Register(&ClusterInput{}, &ClusterInputList{}) }
init
baseapp.py
# encoding: utf-8 """ The Base Application class for ipyparallel apps """ import os import logging import re import sys from traitlets.config.application import catch_config_error, LevelFormatter from IPython.core import release from IPython.core.crashhandler import CrashHandler from IPython.core.application import ( BaseIPythonApplication, base_aliases as base_ip_aliases, base_flags as base_ip_flags ) from IPython.utils.path import expand_path from IPython.utils.process import check_pid from ipython_genutils import py3compat from ipython_genutils.py3compat import unicode_type from .._version import __version__ from traitlets import Unicode, Bool, Instance, Dict, observe #----------------------------------------------------------------------------- # Module errors #----------------------------------------------------------------------------- class PIDFileError(Exception): pass #----------------------------------------------------------------------------- # Crash handler for this application #----------------------------------------------------------------------------- class ParallelCrashHandler(CrashHandler): """sys.excepthook for IPython itself, leaves a detailed report on disk.""" def __init__(self, app): contact_name = release.authors['Min'][0] contact_email = release.author_email bug_tracker = 'https://github.com/ipython/ipython/issues' super(ParallelCrashHandler,self).__init__( app, contact_name, contact_email, bug_tracker ) #----------------------------------------------------------------------------- # Main application #----------------------------------------------------------------------------- base_aliases = {} base_aliases.update(base_ip_aliases) base_aliases.update({ 'work-dir' : 'BaseParallelApplication.work_dir', 'log-to-file' : 'BaseParallelApplication.log_to_file', 'clean-logs' : 'BaseParallelApplication.clean_logs', 'log-url' : 'BaseParallelApplication.log_url', 'cluster-id' : 'BaseParallelApplication.cluster_id', }) base_flags = { 'log-to-file' : ( {'BaseParallelApplication' : {'log_to_file' : True}}, "send log output to a file" ) } base_flags.update(base_ip_flags) class BaseParallelApplication(BaseIPythonApplication): """The base Application for ipyparallel apps Primary extensions to BaseIPythonApplication: * work_dir * remote logging via pyzmq * IOLoop instance """ version = __version__ crash_handler_class = ParallelCrashHandler def _log_level_default(self): # temporarily override default_log_level to INFO return logging.INFO def _log_format_default(self): """override default log format to include time""" return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s" work_dir = Unicode(py3compat.getcwd(), config=True, help='Set the working dir for the process.' ) @observe('work_dir') def _work_dir_changed(self, change): self.work_dir = unicode_type(expand_path(change['new'])) log_to_file = Bool(config=True, help="whether to log to a file") clean_logs = Bool(False, config=True, help="whether to cleanup old logfiles before starting") log_url = Unicode('', config=True, help="The ZMQ URL of the iplogger to aggregate logging.") cluster_id = Unicode('', config=True, help="""String id to add to runtime files, to prevent name collisions when using multiple clusters with a single profile simultaneously. When set, files will be named like: 'ipcontroller-<cluster_id>-engine.json' Since this is text inserted into filenames, typical recommendations apply: Simple character strings are ideal, and spaces are not recommended (but should generally work). """ ) @observe('cluster_id') def _cluster_id_changed(self, change): if change['new']: self.name += '{}-{}'.format(self.__class__.name, change['new']) else: self.name = self.__class__.name def _config_files_default(self): return ['ipcontroller_config.py', 'ipengine_config.py', 'ipcluster_config.py'] loop = Instance('tornado.ioloop.IOLoop') def _loop_default(self): from ipyparallel.util import ioloop return ioloop.IOLoop.current() aliases = Dict(base_aliases) flags = Dict(base_flags) @catch_config_error def initialize(self, argv=None):
super(BaseParallelApplication, self).initialize(argv) self.to_work_dir() self.reinit_logging() def to_work_dir(self): wd = self.work_dir if unicode_type(wd) != py3compat.getcwd(): os.chdir(wd) self.log.info("Changing to working dir: %s" % wd) # This is the working dir by now. sys.path.insert(0, '') def reinit_logging(self): # Remove old log files log_dir = self.profile_dir.log_dir if self.clean_logs: for f in os.listdir(log_dir): if re.match(r'%s-\d+\.(log|err|out)' % self.name, f): try: os.remove(os.path.join(log_dir, f)) except (OSError, IOError): # probably just conflict from sibling process # already removing it pass if self.log_to_file: # Start logging to the new log file log_filename = self.name + u'-' + str(os.getpid()) + u'.log' logfile = os.path.join(log_dir, log_filename) open_log_file = open(logfile, 'w') else: open_log_file = None if open_log_file is not None: while self.log.handlers: self.log.removeHandler(self.log.handlers[0]) self._log_handler = logging.StreamHandler(open_log_file) self.log.addHandler(self._log_handler) else: self._log_handler = self.log.handlers[0] # Add timestamps to log format: self._log_formatter = LevelFormatter(self.log_format, datefmt=self.log_datefmt) self._log_handler.setFormatter(self._log_formatter) # do not propagate log messages to root logger # ipcluster app will sometimes print duplicate messages during shutdown # if this is 1 (default): self.log.propagate = False def write_pid_file(self, overwrite=False): """Create a .pid file in the pid_dir with my pid. This must be called after pre_construct, which sets `self.pid_dir`. This raises :exc:`PIDFileError` if the pid file exists already. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): pid = self.get_pid_from_file() if not overwrite: raise PIDFileError( 'The pid file [%s] already exists. \nThis could mean that this ' 'server is already running with [pid=%s].' % (pid_file, pid) ) with open(pid_file, 'w') as f: self.log.info("Creating pid file: %s" % pid_file) f.write(repr(os.getpid())+'\n') def remove_pid_file(self): """Remove the pid file. This should be called at shutdown by registering a callback with :func:`reactor.addSystemEventTrigger`. This needs to return ``None``. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): try: self.log.info("Removing pid file: %s" % pid_file) os.remove(pid_file) except: self.log.warn("Error removing the pid file: %s" % pid_file) def get_pid_from_file(self): """Get the pid from the pid file. If the pid file doesn't exist a :exc:`PIDFileError` is raised. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): with open(pid_file, 'r') as f: s = f.read().strip() try: pid = int(s) except: raise PIDFileError("invalid pid file: %s (contents: %r)"%(pid_file, s)) return pid else: raise PIDFileError('pid file not found: %s' % pid_file) def check_pid(self, pid): try: return check_pid(pid) except Exception: self.log.warn( "Could not determine whether pid %i is running. " " Making the likely assumption that it is."%pid ) return True
"""initialize the app"""
Combine_Images.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ ----------------------------------------------------------------------------- Copyright (C) 2006-2014 University of Dundee. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. ------------------------------------------------------------------------------ This script takes a number of images (or Z-stacks) and merges them to create additional C, T, Z dimensions. @author Will Moore &nbsp;&nbsp;&nbsp;&nbsp; <a href="mailto:[email protected]">[email protected]</a> @version 3.0 <small> (<b>Internal version:</b> $Revision: $Date: $) </small> @since 3.0-Beta4.2 """ import re from numpy import zeros import omero import omero.scripts as scripts from omero.gateway import BlitzGateway import omero.constants from omero.rtypes import rstring, rlong, robject import omero.util.script_utils as script_utils COLOURS = script_utils.COLOURS DEFAULT_T_REGEX = "_T" DEFAULT_Z_REGEX = "_Z" DEFAULT_C_REGEX = "_C" channel_regexes = { DEFAULT_C_REGEX: r'_C(?P<C>.+?)(_|$)', "C": r'C(?P<C>\w+?)', "_c": r'_c(?P<C>\w+?)', "_w": r'_w(?P<C>\w+?)', "None (single channel)": False} z_regexes = { DEFAULT_Z_REGEX: r'_Z(?P<Z>\d+)', "Z": r'Z(?P<Z>\d+)', "_z": r'_z(?P<Z>\d+)', "None (single z section)": False} time_regexes = { DEFAULT_T_REGEX: r'_T(?P<T>\d+)', "T": r'T(?P<T>\d+)', "_t": r'_t(?P<T>\d+)', "None (single time point)": False} def get_plane(raw_pixel_store, pixels, the_z, the_c, the_t): """ This method downloads the specified plane of the OMERO image and returns it as a numpy array. @param session The OMERO session @param imageId The ID of the image to download @param pixels The pixels object, with pixelsType @param imageName The name of the image to write. If no path, saved in the current directory. """ # get the plane pixels_id = pixels.getId().getValue() raw_pixel_store.setPixelsId(pixels_id, True) return script_utils.download_plane( raw_pixel_store, pixels, the_z, the_c, the_t) def manually_assign_images(parameter_map, image_ids, source_z): size_z = source_z size_c = 1 size_t = 1 dims = [] dim_sizes = [1, 1, 1] # at least 1 in each dimension dim_map = {"C": "Size_C", "Z": "Size_Z", "T": "Size_T"} dimension_params = ["Dimension_1", "Dimension_2", "Dimension_3"] for i, d in enumerate(dimension_params): if d in parameter_map and len(parameter_map[d]) > 0: # First letter of 'Channel' or 'Time' or 'Z' dim = parameter_map[d][0] dims.append(dim) if dim == "Z" and source_z > 1: continue size_param = dim_map[dim] if size_param in parameter_map: dim_sizes[i] = parameter_map[size_param] else: dim_sizes[i] = len(image_ids) // \ (dim_sizes[0] * dim_sizes[1] * dim_sizes[2]) index = 0 image_map = {} # map of (z,c,t) : imageId for dim3 in range(dim_sizes[2]): for dim2 in range(dim_sizes[1]): for dim1 in range(dim_sizes[0]): if index >= len(image_ids): break z, c, t = (0, 0, 0) ddd = (dim1, dim2, dim3) # bit of a hack, but this somehow does my head in!! for i, d in enumerate(dims): if d == "C": c = ddd[i] size_c = max(size_c, c+1) elif d == "T": t = ddd[i] size_t = max(size_t, t+1) elif d == "Z": z = ddd[i] size_z = max(size_z, z+1) # handle Z stacks... if source_z > 1: for src_z in range(source_z): image_map[(src_z, c, t)] = (image_ids[index], src_z) else: image_map[(z, c, t)] = (image_ids[index], 0) index += 1 return (size_z, size_c, size_t, image_map) def assign_images_by_regex(parameter_map, image_ids, query_service, source_z, id_name_map=None): c = None regex_channel = channel_regexes[parameter_map["Channel_Name_Pattern"]] if regex_channel: c = re.compile(regex_channel) t = None regex_t = time_regexes[parameter_map["Time_Name_Pattern"]] if regex_t: t = re.compile(regex_t) z = None regex_z = z_regexes[parameter_map["Z_Name_Pattern"]] if regex_z: z = re.compile(regex_z) # other parameters we need to determine size_z = source_z size_t = 1 z_start = None # could be 0 or 1 ? t_start = None image_map = {} # map of (z,c,t) : imageId channels = [] if id_name_map is None: id_name_map = get_image_names(query_service, image_ids) # assign each (imageId,zPlane) to combined image (z,c,t) by name. for iid in image_ids: name = id_name_map[iid] if t: t_search = t.search(name) if c: c_search = c.search(name) if t is None or t_search is None: the_t = 0 else: the_t = int(t_search.group('T')) if c is None or c_search is None: c_name = "0" else: c_name = c_search.group('C') if c_name in channels: the_c = channels.index(c_name) else: the_c = len(channels) channels.append(c_name) size_t = max(size_t, the_t+1) if t_start is None: t_start = the_t else: t_start = min(t_start, the_t) # we have T and C now. Need to check if source images are Z stacks if source_z > 1: z_start = 0 for src_z in range(source_z): image_map[(src_z, the_c, the_t)] = (iid, src_z) else: if z: z_search = z.search(name) if z is None or z_search is None: the_z = 0 else: the_z = int(z_search.group('Z')) size_z = max(size_z, the_z+1) if z_start is None: z_start = the_z else: z_start = min(z_start, the_z) # every plane comes from z=0 image_map[(the_z, the_c, the_t)] = (iid, 0) # if indexes were 1-based (or higher), need to shift indexes accordingly. if t_start > 0 or z_start > 0: size_t = size_t-t_start size_z = size_z-z_start i_map = {} for key, value in image_map.items(): z, c, t = key i_map[(z-z_start, c, t-t_start)] = value else: i_map = image_map c_names = {} for c, name in enumerate(channels): c_names[c] = name return (size_z, c_names, size_t, i_map) def get_image_names(query_service, image_ids): id_string = ",".join([str(i) for i in image_ids]) query_string = "select i from Image i where i.id in (%s)" % id_string images = query_service.findAllByQuery(query_string, None) id_map = {} for i in images: iid = i.getId().getValue() name = i.getName().getValue() id_map[iid] = name return id_map def pick_pixel_sizes(pixel_sizes): """ Process a list of pixel sizes and pick sizes to set for new image. If we have different sizes from different images, return None """ pix_size = None for px in pixel_sizes: if px is None: continue if pix_size is None: pix_size = px else: # compare - if different, return None if (pix_size.getValue() != px.getValue() or pix_size.getUnit() != px.getUnit()): return None return pix_size def make_single_image(services, parameter_map, image_ids, dataset, colour_map): """ This takes the images specified by image_ids, sorts them in to Z,C,T dimensions according to parameters in the parameter_map, assembles them into a new Image, which is saved in dataset. """ if len(image_ids) == 0: return rendering_engine = services["renderingEngine"] query_service = services["queryService"] pixels_service = services["pixelsService"] raw_pixel_store = services["rawPixelStore"] raw_pixel_store_upload = services["rawPixelStoreUpload"] update_service = services["updateService"] container_service = services["containerService"] # Filter images by name if user has specified filter. id_name_map = None if "Filter_Names" in parameter_map: filter_string = parameter_map["Filter_Names"] if len(filter_string) > 0: id_name_map = get_image_names(query_service, image_ids) image_ids = [i for i in image_ids if id_name_map[i].find(filter_string) > -1] image_id = image_ids[0] # get pixels, with pixelsType, from the first image query_string = "select p from Pixels p join fetch p.image i join "\ "fetch p.pixelsType pt where i.id='%d'" % image_id pixels = query_service.findByQuery(query_string, None) # use the pixels type object we got from the first image. pixels_type = pixels.getPixelsType() # combined image will have same X and Y sizes... size_x = pixels.getSizeX().getValue() size_y = pixels.getSizeY().getValue() # if we have a Z stack, use this in new image (don't combine Z) source_z = pixels.getSizeZ().getValue() # Now we need to find where our planes are coming from. # imageMap is a map of destination:source, defined as (newX, newY, # newZ):(imageId, z) if "Manually_Define_Dimensions" in parameter_map and \ parameter_map["Manually_Define_Dimensions"]: size_z, size_c, size_t, image_map = manually_assign_images( parameter_map, image_ids, source_z) c_names = {} else: size_z, c_names, size_t, image_map = assign_images_by_regex( parameter_map, image_ids, query_service, source_z, id_name_map) size_c = len(c_names) if "Channel_Names" in parameter_map: for c, name in enumerate(parameter_map["Channel_Names"]): c_names[c] = name image_name = "combinedImage" description = "created from image Ids: %s" % image_ids channel_list = range(size_c) iid = pixels_service.createImage(size_x, size_y, size_z, size_t, channel_list, pixels_type, image_name, description) image = container_service.getImages("Image", [iid.getValue()], None)[0] pixels_id = image.getPrimaryPixels().getId().getValue() raw_pixel_store_upload.setPixelsId(pixels_id, True) pixel_sizes = {'x': [], 'y': []} for the_c in range(size_c): min_value = 0 max_value = 0 for the_z in range(size_z): for the_t in range(size_t): if (the_z, the_c, the_t) in image_map: image_id, plane_z = image_map[(the_z, the_c, the_t)] query_string = "select p from Pixels p join fetch "\ "p.image i join fetch p.pixelsType pt where "\ "i.id='%d'" % image_id pixels = query_service.findByQuery(query_string, None) plane_2d = get_plane(raw_pixel_store, pixels, plane_z, 0, 0) # Note pixels sizes (may be None) pixel_sizes['x'].append(pixels.getPhysicalSizeX()) pixel_sizes['y'].append(pixels.getPhysicalSizeY()) else: plane_2d = zeros((size_y, size_x)) script_utils.upload_plane(raw_pixel_store_upload, plane_2d, the_z, the_c, the_t) min_value = min(min_value, plane_2d.min()) max_value = max(max_value, plane_2d.max()) pixels_service.setChannelGlobalMinMax(pixels_id, the_c, float(min_value), float(max_value)) rgba = COLOURS["White"] if the_c in colour_map: rgba = colour_map[the_c] script_utils.reset_rendering_settings(rendering_engine, pixels_id, the_c, min_value, max_value, rgba) # rename new channels pixels = rendering_engine.getPixels() # has channels loaded - (getting Pixels from image doesn't) i = 0 for c in pixels.iterateChannels(): # c is an instance of omero.model.ChannelI if i >= len(c_names): break lc = c.getLogicalChannel() # returns omero.model.LogicalChannelI lc.setName(rstring(c_names[i])) update_service.saveObject(lc) i += 1 # Set pixel sizes if known pix_size_x = pick_pixel_sizes(pixel_sizes['x']) pix_size_y = pick_pixel_sizes(pixel_sizes['y']) if pix_size_x is not None or pix_size_y is not None: # reload to avoid OptimisticLockException pixels = services["queryService"].get('Pixels', pixels.getId().getValue()) if pix_size_x is not None: pixels.setPhysicalSizeX(pix_size_x) if pix_size_y is not None: pixels.setPhysicalSizeY(pix_size_y) services["updateService"].saveObject(pixels) # put the image in dataset, if specified. if dataset and dataset.canLink(): link = omero.model.DatasetImageLinkI() link.parent = omero.model.DatasetI(dataset.getId(), False) link.child = omero.model.ImageI(image.getId().getValue(), False) update_service.saveAndReturnObject(link) else: link = None return image, link def combine_images(conn, parameter_map): # get the services we need services = {} services["containerService"] = conn.getContainerService() services["renderingEngine"] = conn.createRenderingEngine() services["queryService"] = conn.getQueryService() services["pixelsService"] = conn.getPixelsService() services["rawPixelStore"] = conn.c.sf.createRawPixelsStore() services["rawPixelStoreUpload"] = conn.c.sf.createRawPixelsStore() services["updateService"] = conn.getUpdateService() services["rawFileStore"] = conn.createRawFileStore() query_service = services["queryService"] colour_map = {} if "Channel_Colours" in parameter_map: for c, colour in enumerate(parameter_map["Channel_Colours"]): if colour in COLOURS: colour_map[c] = COLOURS[colour] # Get images or datasets message = "" objects, log_message = script_utils.get_objects(conn, parameter_map) message += log_message if not objects: return None, message # get the images IDs from list (in order) or dataset (sorted by name) output_images = [] links = [] data_type = parameter_map["Data_Type"] if data_type == "Image": dataset = None objects.sort(key=lambda x: (x.getName())) # Sort images by name image_ids = [image.id for image in objects] # get dataset from first image query_string = "select i from Image i join fetch i.datasetLinks idl"\ " join fetch idl.parent where i.id in (%s)" % image_ids[0] image = query_service.findByQuery(query_string, None) if image: for link in image.iterateDatasetLinks(): ds = link.parent dataset = conn.getObject("Dataset", ds.getId().getValue()) break # only use 1st dataset new_img, link = make_single_image(services, parameter_map, image_ids, dataset, colour_map) if new_img: output_images.append(new_img) if link: links.append(link) else: for dataset in objects: images = list(dataset.listChildren()) if not images: continue images.sort(key=lambda x: (x.getName())) image_ids = [i.getId() for i in images] new_img, link = make_single_image(services, parameter_map, image_ids, dataset, colour_map) if new_img: output_images.append(new_img) if link: links.append(link) # try and close any stateful services for s in services: try: s.close() except Exception: pass if output_images: if len(output_images) > 1: message += "%s new images created" % len(output_images) else: message += "New image created" if not links or not len(links) == len(output_images): message += " but could not be attached" else: message += "No image created" message += "." return output_images, message def run_script(): """ The main entry point of the script, as called by the client via the scripting service, passing the required parameters. """ ckeys = list(COLOURS.keys()) ckeys.sort() c_options = [rstring(col) for col in ckeys] data_types = [rstring('Dataset'), rstring('Image')] first_dim = [rstring('Time'), rstring('Channel'), rstring('Z')] extra_dims = [rstring(''), rstring('Time'), rstring('Channel'), rstring('Z')] channel_regs = [rstring(r) for r in channel_regexes.keys()] z_regs = [rstring(r) for r in z_regexes.keys()] t_regs = [rstring(r) for r in time_regexes.keys()] client = scripts.client( 'Combine_Images.py', """Combine several single-plane images (or Z-stacks) into one with \ greater Z, C, T dimensions. See http://help.openmicroscopy.org/scripts.html""", scripts.String( "Data_Type", optional=False, grouping="1", description="Use all the images in specified 'Datasets' or choose" " individual 'Images'.", values=data_types, default="Image"), scripts.List( "IDs", optional=False, grouping="2", description="List of Dataset IDs or Image IDs to " "combine.").ofType(rlong(0)), scripts.String( "Filter_Names", grouping="2.1", description="Filter the images by names that contain this value"), scripts.Bool( "Auto_Define_Dimensions", grouping="3", default=True, description="""Choose new dimensions with respect to the order of" " the input images. See URL above."""), scripts.String( "Channel_Name_Pattern", grouping="3.1", default=DEFAULT_C_REGEX, values=channel_regs, description="""Auto-pick images by channel in the image name"""), scripts.String( "Z_Name_Pattern", grouping="3.2", default=DEFAULT_Z_REGEX, values=z_regs, description="""Auto-pick images by Z-index in the image name"""),
description="""Auto-pick images by T-index in the image name"""), scripts.Bool( "Manually_Define_Dimensions", grouping="4", default=False, description="""Choose new dimensions with respect to the order of" " the input images. See URL above."""), scripts.String( "Dimension_1", grouping="4.1", description="The first Dimension to change", values=first_dim), scripts.String( "Dimension_2", grouping="4.2", values=extra_dims, default="", description="The second Dimension to change. Only specify this if" " combining multiple dimensions."), scripts.String( "Dimension_3", grouping="4.3", values=extra_dims, default="", description="The third Dimension to change. Only specify this if" " combining multiple dimensions."), scripts.Int( "Size_Z", grouping="4.4", description="Number of Z planes in new image", min=1), scripts.Int( "Size_C", grouping="4.5", description="Number of channels in new image", min=1), scripts.Int( "Size_T", grouping="4.6", description="Number of time-points in new image", min=1), scripts.List( "Channel_Colours", grouping="7", description="List of Colors for channels.", default="White", values=c_options).ofType(rstring("")), scripts.List( "Channel_Names", grouping="8", description="List of Names for channels in the new image."), version="4.2.0", authors=["William Moore", "OME Team"], institutions=["University of Dundee"], contact="[email protected]", ) try: parameter_map = client.getInputs(unwrap=True) conn = BlitzGateway(client_obj=client) # create the combined image images, message = combine_images(conn, parameter_map) client.setOutput("Message", rstring(message)) if images: if len(images) == 1: client.setOutput("Combined_Image", robject(images[0])) elif len(images) > 1: client.setOutput("First_Image", robject(images[0])) finally: client.closeSession() if __name__ == "__main__": run_script()
scripts.String( "Time_Name_Pattern", grouping="3.3", default=DEFAULT_T_REGEX, values=t_regs,
generator.go
package obfuscator import ( "fmt" "k8s.io/klog/v2" ) // generator consists of the required fields for the consistent,static obfuscations and the count of the obfuscations // This implements the methods static, consistent inorder to return the required replacement based on the replacementType type generator struct { template string static string count int } func (g *generator) generateConsistentReplacement() string { g.count++ if g.count > maximumSupportedObfuscations { klog.Exitf("maximum number of obfuscations exceeded: %d", maximumSupportedObfuscations) } r := fmt.Sprintf(g.template, g.count) return r } func (g *generator) generateStaticReplacement() string { return g.static } // newGenerator creates a generator objects and populates with the provided arguments func newGenerator(template, static string) *generator
{ return &generator{template: template, static: static} }
libcollections_linked_list_rs_0000.rs
fn main() { use std::collections::LinkedList; let mut a = LinkedList::new(); let mut b = LinkedList::new(); a.push_back(1); a.push_back(2); b.push_back(3); b.push_back(4);
a.append(&mut b); for e in &a { println!("{}", e); // prints 1, then 2, then 3, then 4 } println!("{}", b.len()); // prints 0 }
selem.py
""" :author: Damian Eads, 2009 :license: modified BSD """ import numpy as np from scipy import ndimage from skimage import draw def square(width, dtype=np.uint8): """Generates a flat, square-shaped structuring element. Every pixel along the perimeter has a chessboard distance no greater than radius (radius=floor(width/2)) pixels. Parameters ---------- width : int The width and height of the square. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray A structuring element consisting only of ones, i.e. every pixel belongs to the neighborhood. """ return np.ones((width, width), dtype=dtype) def rectangle(width, height, dtype=np.uint8): """Generates a flat, rectangular-shaped structuring element. Every pixel in the rectangle generated for a given width and given height belongs to the neighboorhood. Parameters ---------- width : int The width of the rectangle. height : int The height of the rectangle. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray A structuring element consisting only of ones, i.e. every pixel belongs to the neighborhood. """ return np.ones((width, height), dtype=dtype) def
(radius, dtype=np.uint8): """Generates a flat, diamond-shaped structuring element. A pixel is part of the neighborhood (i.e. labeled 1) if the city block/manhattan distance between it and the center of the neighborhood is no greater than radius. Parameters ---------- radius : int The radius of the diamond-shaped structuring element. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ L = np.arange(0, radius * 2 + 1) I, J = np.meshgrid(L, L) return np.array(np.abs(I - radius) + np.abs(J - radius) <= radius, dtype=dtype) def disk(radius, dtype=np.uint8): """Generates a flat, disk-shaped structuring element. A pixel is within the neighborhood if the euclidean distance between it and the origin is no greater than radius. Parameters ---------- radius : int The radius of the disk-shaped structuring element. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ L = np.arange(-radius, radius + 1) X, Y = np.meshgrid(L, L) return np.array((X ** 2 + Y ** 2) <= radius ** 2, dtype=dtype) def ellipse(width, height, dtype=np.uint8): """Generates a flat, ellipse-shaped structuring element. Every pixel along the perimeter of ellipse satisfies the equation ``(x/width+1)**2 + (y/height+1)**2 = 1``. Parameters ---------- width : int The width of the ellipse-shaped structuring element. height : int The height of the ellipse-shaped structuring element. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. Examples -------- >>> from skimage.morphology import selem >>> selem.ellipse(5, 3) array([[0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0]], dtype=uint8) """ selem = np.zeros((2 * height + 1, 2 * width + 1), dtype=dtype) rows, cols = draw.ellipse(height, width, height + 1, width + 1) selem[rows, cols] = 1 return selem def cube(width, dtype=np.uint8): """ Generates a cube-shaped structuring element. This is the 3D equivalent of a square. Every pixel along the perimeter has a chessboard distance no greater than radius (radius=floor(width/2)) pixels. Parameters ---------- width : int The width, height and depth of the cube. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray A structuring element consisting only of ones, i.e. every pixel belongs to the neighborhood. """ return np.ones((width, width, width), dtype=dtype) def octahedron(radius, dtype=np.uint8): """Generates a octahedron-shaped structuring element. This is the 3D equivalent of a diamond. A pixel is part of the neighborhood (i.e. labeled 1) if the city block/manhattan distance between it and the center of the neighborhood is no greater than radius. Parameters ---------- radius : int The radius of the octahedron-shaped structuring element. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ # note that in contrast to diamond(), this method allows non-integer radii n = 2 * radius + 1 Z, Y, X = np.mgrid[-radius:radius:n * 1j, -radius:radius:n * 1j, -radius:radius:n * 1j] s = np.abs(X) + np.abs(Y) + np.abs(Z) return np.array(s <= radius, dtype=dtype) def ball(radius, dtype=np.uint8): """Generates a ball-shaped structuring element. This is the 3D equivalent of a disk. A pixel is within the neighborhood if the euclidean distance between it and the origin is no greater than radius. Parameters ---------- radius : int The radius of the ball-shaped structuring element. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ n = 2 * radius + 1 Z, Y, X = np.mgrid[-radius:radius:n * 1j, -radius:radius:n * 1j, -radius:radius:n * 1j] s = X ** 2 + Y ** 2 + Z ** 2 return np.array(s <= radius * radius, dtype=dtype) def octagon(m, n, dtype=np.uint8): """Generates an octagon shaped structuring element. For a given size of (m) horizontal and vertical sides and a given (n) height or width of slanted sides octagon is generated. The slanted sides are 45 or 135 degrees to the horizontal axis and hence the widths and heights are equal. Parameters ---------- m : int The size of the horizontal and vertical sides. n : int The height or width of the slanted sides. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ from . import convex_hull_image selem = np.zeros((m + 2 * n, m + 2 * n)) selem[0, n] = 1 selem[n, 0] = 1 selem[0, m + n - 1] = 1 selem[m + n - 1, 0] = 1 selem[-1, n] = 1 selem[n, -1] = 1 selem[-1, m + n - 1] = 1 selem[m + n - 1, -1] = 1 selem = convex_hull_image(selem).astype(dtype) return selem def star(a, dtype=np.uint8): """Generates a star shaped structuring element. Start has 8 vertices and is an overlap of square of size `2*a + 1` with its 45 degree rotated version. The slanted sides are 45 or 135 degrees to the horizontal axis. Parameters ---------- a : int Parameter deciding the size of the star structural element. The side of the square array returned is `2*a + 1 + 2*floor(a / 2)`. Other Parameters ---------------- dtype : data-type The data type of the structuring element. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ from . import convex_hull_image if a == 1: bfilter = np.zeros((3, 3), dtype) bfilter[:] = 1 return bfilter m = 2 * a + 1 n = a // 2 selem_square = np.zeros((m + 2 * n, m + 2 * n)) selem_square[n: m + n, n: m + n] = 1 c = (m + 2 * n - 1) // 2 selem_rotated = np.zeros((m + 2 * n, m + 2 * n)) selem_rotated[0, c] = selem_rotated[-1, c] = 1 selem_rotated[c, 0] = selem_rotated[c, -1] = 1 selem_rotated = convex_hull_image(selem_rotated).astype(int) selem = selem_square + selem_rotated selem[selem > 0] = 1 return selem.astype(dtype) def _default_selem(ndim): """Generates a cross-shaped structuring element (connectivity=1). This is the default structuring element (selem) if no selem was specified. Parameters ---------- ndim : int Number of dimensions of the image. Returns ------- selem : ndarray The structuring element where elements of the neighborhood are 1 and 0 otherwise. """ return ndimage.morphology.generate_binary_structure(ndim, 1)
diamond
timer_test.go
package s_timer_test import ( "github.com/heransoft/s-timer" "sync/atomic" "testing" "time" ) const precision = 100 func TestTimer_AfterFunc(t *testing.T) { i := int32(0) testCount := int32(2000) testTimer(testCount, func(timer *s_timer.Timer, index int32, finish chan int64) { defer func() { finish <- 0 }() time1 := time.Now().UnixNano() resultChan := make(chan int64, 1) timer.AfterFunc(time.Second, func() { time2 := time.Now().UnixNano() i++ resultChan <- time2 - time1 }) consumeTime := <-resultChan if consumeTime > (time.Second + time.Millisecond*precision).Nanoseconds() { t.Error("timeout", index, consumeTime) } }) if i != testCount { t.Error("no sync!") } } func TestTimer_AfterFuncWithAfterFuncFinishedCallback(t *testing.T) { i := int32(0) testCount := int32(2000) testTimer(testCount, func(timer *s_timer.Timer, index int32, finish chan int64) { defer func() { finish <- 0 }() time1 := time.Now().UnixNano() resultChan := make(chan int64, 1) timer.AfterFuncWithAfterFuncFinishedCallback(time.Second, func() { time2 := time.Now().UnixNano() i++ resultChan <- time2 - time1 }, func(id uint64) { }) consumeTime := <-resultChan if consumeTime > (time.Second + time.Millisecond*precision).Nanoseconds() { t.Error("timeout", index, consumeTime) } }) if i != testCount { t.Error("no sync!") } } func TestTimer_Stop(t *testing.T) { testTimer(2000, func(timer *s_timer.Timer, index int32, finish chan int64) { id := timer.AfterFunc(time.Second, func() { t.Error(index, "execute error") }) time.AfterFunc(time.Second-time.Millisecond*precision, func() { timer.Stop(id) }) time.AfterFunc(time.Second*3, func() { finish <- 0 }) }) } func TestTimer_StopWithStopFinishedCallback(t *testing.T) { testTimer(2000, func(timer *s_timer.Timer, index int32, finish chan int64) { id := timer.AfterFunc(time.Second, func() { t.Error(index, "execute error") }) time.AfterFunc(time.Second-time.Millisecond*precision, func() { timer.StopWithStopFinishedCallback(id, func(success bool) { if success == false { t.Error(index, "stop fail") } finish <- 0 }) }) }) } func
(t *testing.T) { testTimer(2000, func(timer *s_timer.Timer, index int32, finish chan int64) { i := uint64(0) id := timer.AfterFunc(time.Second, func() { if atomic.LoadUint64(&i) == 0 { t.Error(index, "execute error") } }) time.AfterFunc(time.Second-time.Millisecond*precision, func() { atomic.AddUint64(&i, 1) timer.Reset(id, time.Second) }) time.AfterFunc(time.Second*3, func() { finish <- 0 }) }) } func TestTimer_ResetWithResetFinishedCallback(t *testing.T) { executeErrorCount := uint64(0) testTimer(2000, func(timer *s_timer.Timer, index int32, finish chan int64) { i := uint64(0) id := timer.AfterFunc(time.Second, func() { if atomic.LoadUint64(&i) == 0 { t.Error(index, "execute error") atomic.AddUint64(&executeErrorCount, 1) } }) time.AfterFunc(time.Second-time.Millisecond*precision, func() { timer.ResetWithResetFinishedCallback(id, time.Second, func(success bool) { if success == false { t.Error(index, "reset fail") } finish <- 0 }) }) }) if executeErrorCount != 0 { t.Error("execute error count:", executeErrorCount) } } func testTimer(caseCount int32, testCase func(*s_timer.Timer, int32, chan int64)) { timer := s_timer.New() mainThreadExitChan := make(chan int64, 1) mainThreadExitedChan := make(chan int64, 1) go func() { r := int64(0) defer func() { mainThreadExitedChan <- r }() for { select { case result := <-mainThreadExitChan: r = result return case timerMainChanElement := <-timer.GetMainChan(): timer.Deal(timerMainChanElement) } } }() caseThreadExitedChan := make(chan int64, caseCount) for i := int32(0); i < caseCount; i++ { index := i go func() { testCase(timer, index, caseThreadExitedChan) }() } caseThreadExitedCount := int32(0) for { <-caseThreadExitedChan caseThreadExitedCount++ if caseCount == caseThreadExitedCount { mainThreadExitChan <- 0 <-mainThreadExitedChan break } } }
TestTimer_Reset
hash_test.go
package benchmarks import ( "fmt" "math/rand" "runtime" "runtime/debug" "testing" ) /* BenchmarkAccessStructure show compare metrics between data strucuture and number of items. */ func BenchmarkAccessStructure(b *testing.B)
func benchmarkAccessStructure(b *testing.B, size int) { var indexes = make([]int, size, size) var arr = make([]int, size, size) var hash = make(map[int]int) rand.Seed(int64(size % 42)) for i := 0; i < size; i++ { indexes[i] = rand.Intn(size) arr[i] = i hash[i] = i } b.ResetTimer() b.Run(fmt.Sprintf("Array_%d", size), func(b *testing.B) { for i := 0; i < b.N; i++ { indx := indexes[i%size] % size _ = arr[indx] } }) b.Run(fmt.Sprintf("Hash_%d", size), func(b *testing.B) { for i := 0; i < b.N; i++ { indx := indexes[i%size] % size _ = hash[indx] } }) }
{ debug.SetGCPercent(-1) for _, size := range []int{1, 10, 100, 1000, 10000, 100000, 1000000} { benchmarkAccessStructure(b, size) runtime.GC() } }
sobre.js
((window["webpackJsonp"] = window["webpackJsonp"] || []).push([["static/development/pages/sobre.js"],{ /***/ "./node_modules/@babel/runtime-corejs2/core-js/array/is-array.js": /*!***********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/array/is-array.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/array/is-array */ "./node_modules/core-js/library/fn/array/is-array.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/get-iterator.js": /*!*********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/get-iterator.js ***! \*********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/get-iterator */ "./node_modules/core-js/library/fn/get-iterator.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/json/stringify.js": /*!***********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/json/stringify.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/json/stringify */ "./node_modules/core-js/library/fn/json/stringify.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/assign.js": /*!**********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/assign.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/assign */ "./node_modules/core-js/library/fn/object/assign.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/create.js": /*!**********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/create.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/create */ "./node_modules/core-js/library/fn/object/create.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/define-property.js": /*!*******************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/define-property.js ***! \*******************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/define-property */ "./node_modules/core-js/library/fn/object/define-property.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/get-prototype-of.js": /*!********************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/get-prototype-of.js ***! \********************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/get-prototype-of */ "./node_modules/core-js/library/fn/object/get-prototype-of.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/keys.js": /*!********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/keys.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/keys */ "./node_modules/core-js/library/fn/object/keys.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/object/set-prototype-of.js": /*!********************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/object/set-prototype-of.js ***! \********************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/object/set-prototype-of */ "./node_modules/core-js/library/fn/object/set-prototype-of.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/promise.js": /*!****************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/promise.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/promise */ "./node_modules/core-js/library/fn/promise.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/reflect/construct.js": /*!**************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/reflect/construct.js ***! \**************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/reflect/construct */ "./node_modules/core-js/library/fn/reflect/construct.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/set.js": /*!************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/set.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/set */ "./node_modules/core-js/library/fn/set.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/symbol.js": /*!***************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/symbol.js ***! \***************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/symbol */ "./node_modules/core-js/library/fn/symbol/index.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/core-js/symbol/iterator.js": /*!************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/core-js/symbol/iterator.js ***! \************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! core-js/library/fn/symbol/iterator */ "./node_modules/core-js/library/fn/symbol/iterator.js"); /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/assertThisInitialized.js": /*!******************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/assertThisInitialized.js ***! \******************************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } module.exports = _assertThisInitialized; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/classCallCheck.js": /*!***********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/classCallCheck.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } module.exports = _classCallCheck; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/construct.js": /*!******************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/construct.js ***! \******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Reflect$construct = __webpack_require__(/*! ../core-js/reflect/construct */ "./node_modules/@babel/runtime-corejs2/core-js/reflect/construct.js"); var setPrototypeOf = __webpack_require__(/*! ./setPrototypeOf */ "./node_modules/@babel/runtime-corejs2/helpers/setPrototypeOf.js"); function isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !_Reflect$construct) return false; if (_Reflect$construct.sham) return false; if (typeof Proxy === "function") return true; try { Date.prototype.toString.call(_Reflect$construct(Date, [], function () {})); return true; } catch (e) { return false; } } function _construct(Parent, args, Class) { if (isNativeReflectConstruct()) { module.exports = _construct = _Reflect$construct; } else { module.exports = _construct = function _construct(Parent, args, Class) { var a = [null]; a.push.apply(a, args); var Constructor = Function.bind.apply(Parent, a); var instance = new Constructor(); if (Class) setPrototypeOf(instance, Class.prototype); return instance; }; } return _construct.apply(null, arguments); } module.exports = _construct; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/createClass.js": /*!********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/createClass.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Object$defineProperty = __webpack_require__(/*! ../core-js/object/define-property */ "./node_modules/@babel/runtime-corejs2/core-js/object/define-property.js"); function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; _Object$defineProperty(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } module.exports = _createClass; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/arrayWithHoles.js": /*!***************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/arrayWithHoles.js ***! \***************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _arrayWithHoles; }); /* harmony import */ var _core_js_array_is_array__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../core-js/array/is-array */ "./node_modules/@babel/runtime-corejs2/core-js/array/is-array.js"); /* harmony import */ var _core_js_array_is_array__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_core_js_array_is_array__WEBPACK_IMPORTED_MODULE_0__);
/***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/asyncToGenerator.js": /*!*****************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/asyncToGenerator.js ***! \*****************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _asyncToGenerator; }); /* harmony import */ var _core_js_promise__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../core-js/promise */ "./node_modules/@babel/runtime-corejs2/core-js/promise.js"); /* harmony import */ var _core_js_promise__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_core_js_promise__WEBPACK_IMPORTED_MODULE_0__); function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { _core_js_promise__WEBPACK_IMPORTED_MODULE_0___default.a.resolve(value).then(_next, _throw); } } function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new _core_js_promise__WEBPACK_IMPORTED_MODULE_0___default.a(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/classCallCheck.js": /*!***************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/classCallCheck.js ***! \***************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _classCallCheck; }); function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/createClass.js": /*!************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/createClass.js ***! \************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _createClass; }); /* harmony import */ var _core_js_object_define_property__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../core-js/object/define-property */ "./node_modules/@babel/runtime-corejs2/core-js/object/define-property.js"); /* harmony import */ var _core_js_object_define_property__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_core_js_object_define_property__WEBPACK_IMPORTED_MODULE_0__); function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; _core_js_object_define_property__WEBPACK_IMPORTED_MODULE_0___default()(target, descriptor.key, descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/iterableToArrayLimit.js": /*!*********************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/iterableToArrayLimit.js ***! \*********************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _iterableToArrayLimit; }); /* harmony import */ var _core_js_get_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../core-js/get-iterator */ "./node_modules/@babel/runtime-corejs2/core-js/get-iterator.js"); /* harmony import */ var _core_js_get_iterator__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_core_js_get_iterator__WEBPACK_IMPORTED_MODULE_0__); function _iterableToArrayLimit(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = _core_js_get_iterator__WEBPACK_IMPORTED_MODULE_0___default()(arr), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/nonIterableRest.js": /*!****************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/nonIterableRest.js ***! \****************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _nonIterableRest; }); function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/slicedToArray.js": /*!**************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/slicedToArray.js ***! \**************************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _slicedToArray; }); /* harmony import */ var _arrayWithHoles__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./arrayWithHoles */ "./node_modules/@babel/runtime-corejs2/helpers/esm/arrayWithHoles.js"); /* harmony import */ var _iterableToArrayLimit__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./iterableToArrayLimit */ "./node_modules/@babel/runtime-corejs2/helpers/esm/iterableToArrayLimit.js"); /* harmony import */ var _nonIterableRest__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./nonIterableRest */ "./node_modules/@babel/runtime-corejs2/helpers/esm/nonIterableRest.js"); function _slicedToArray(arr, i) { return Object(_arrayWithHoles__WEBPACK_IMPORTED_MODULE_0__["default"])(arr) || Object(_iterableToArrayLimit__WEBPACK_IMPORTED_MODULE_1__["default"])(arr, i) || Object(_nonIterableRest__WEBPACK_IMPORTED_MODULE_2__["default"])(); } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/esm/typeof.js": /*!*******************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/esm/typeof.js ***! \*******************************************************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "default", function() { return _typeof; }); /* harmony import */ var _core_js_symbol_iterator__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../../core-js/symbol/iterator */ "./node_modules/@babel/runtime-corejs2/core-js/symbol/iterator.js"); /* harmony import */ var _core_js_symbol_iterator__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_core_js_symbol_iterator__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var _core_js_symbol__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ../../core-js/symbol */ "./node_modules/@babel/runtime-corejs2/core-js/symbol.js"); /* harmony import */ var _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(_core_js_symbol__WEBPACK_IMPORTED_MODULE_1__); function _typeof2(obj) { if (typeof _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a === "function" && typeof _core_js_symbol_iterator__WEBPACK_IMPORTED_MODULE_0___default.a === "symbol") { _typeof2 = function _typeof2(obj) { return typeof obj; }; } else { _typeof2 = function _typeof2(obj) { return obj && typeof _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a === "function" && obj.constructor === _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a && obj !== _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a.prototype ? "symbol" : typeof obj; }; } return _typeof2(obj); } function _typeof(obj) { if (typeof _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a === "function" && _typeof2(_core_js_symbol_iterator__WEBPACK_IMPORTED_MODULE_0___default.a) === "symbol") { _typeof = function _typeof(obj) { return _typeof2(obj); }; } else { _typeof = function _typeof(obj) { return obj && typeof _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a === "function" && obj.constructor === _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a && obj !== _core_js_symbol__WEBPACK_IMPORTED_MODULE_1___default.a.prototype ? "symbol" : _typeof2(obj); }; } return _typeof(obj); } /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/getPrototypeOf.js": /*!***********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/getPrototypeOf.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Object$getPrototypeOf = __webpack_require__(/*! ../core-js/object/get-prototype-of */ "./node_modules/@babel/runtime-corejs2/core-js/object/get-prototype-of.js"); var _Object$setPrototypeOf = __webpack_require__(/*! ../core-js/object/set-prototype-of */ "./node_modules/@babel/runtime-corejs2/core-js/object/set-prototype-of.js"); function _getPrototypeOf(o) { module.exports = _getPrototypeOf = _Object$setPrototypeOf ? _Object$getPrototypeOf : function _getPrototypeOf(o) { return o.__proto__ || _Object$getPrototypeOf(o); }; return _getPrototypeOf(o); } module.exports = _getPrototypeOf; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/inherits.js": /*!*****************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/inherits.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Object$create = __webpack_require__(/*! ../core-js/object/create */ "./node_modules/@babel/runtime-corejs2/core-js/object/create.js"); var setPrototypeOf = __webpack_require__(/*! ./setPrototypeOf */ "./node_modules/@babel/runtime-corejs2/helpers/setPrototypeOf.js"); function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function"); } subClass.prototype = _Object$create(superClass && superClass.prototype, { constructor: { value: subClass, writable: true, configurable: true } }); if (superClass) setPrototypeOf(subClass, superClass); } module.exports = _inherits; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js": /*!******************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js ***! \******************************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } module.exports = _interopRequireDefault; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/possibleConstructorReturn.js": /*!**********************************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/possibleConstructorReturn.js ***! \**********************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _typeof = __webpack_require__(/*! ../helpers/typeof */ "./node_modules/@babel/runtime-corejs2/helpers/typeof.js"); var assertThisInitialized = __webpack_require__(/*! ./assertThisInitialized */ "./node_modules/@babel/runtime-corejs2/helpers/assertThisInitialized.js"); function _possibleConstructorReturn(self, call) { if (call && (_typeof(call) === "object" || typeof call === "function")) { return call; } return assertThisInitialized(self); } module.exports = _possibleConstructorReturn; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/setPrototypeOf.js": /*!***********************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/setPrototypeOf.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Object$setPrototypeOf = __webpack_require__(/*! ../core-js/object/set-prototype-of */ "./node_modules/@babel/runtime-corejs2/core-js/object/set-prototype-of.js"); function _setPrototypeOf(o, p) { module.exports = _setPrototypeOf = _Object$setPrototypeOf || function _setPrototypeOf(o, p) { o.__proto__ = p; return o; }; return _setPrototypeOf(o, p); } module.exports = _setPrototypeOf; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/helpers/typeof.js": /*!***************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/helpers/typeof.js ***! \***************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var _Symbol$iterator = __webpack_require__(/*! ../core-js/symbol/iterator */ "./node_modules/@babel/runtime-corejs2/core-js/symbol/iterator.js"); var _Symbol = __webpack_require__(/*! ../core-js/symbol */ "./node_modules/@babel/runtime-corejs2/core-js/symbol.js"); function _typeof2(obj) { if (typeof _Symbol === "function" && typeof _Symbol$iterator === "symbol") { _typeof2 = function _typeof2(obj) { return typeof obj; }; } else { _typeof2 = function _typeof2(obj) { return obj && typeof _Symbol === "function" && obj.constructor === _Symbol && obj !== _Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof2(obj); } function _typeof(obj) { if (typeof _Symbol === "function" && _typeof2(_Symbol$iterator) === "symbol") { module.exports = _typeof = function _typeof(obj) { return _typeof2(obj); }; } else { module.exports = _typeof = function _typeof(obj) { return obj && typeof _Symbol === "function" && obj.constructor === _Symbol && obj !== _Symbol.prototype ? "symbol" : _typeof2(obj); }; } return _typeof(obj); } module.exports = _typeof; /***/ }), /***/ "./node_modules/@babel/runtime-corejs2/regenerator/index.js": /*!******************************************************************!*\ !*** ./node_modules/@babel/runtime-corejs2/regenerator/index.js ***! \******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! regenerator-runtime */ "./node_modules/regenerator-runtime/runtime-module.js"); /***/ }), /***/ "./node_modules/core-js/library/fn/array/is-array.js": /*!***********************************************************!*\ !*** ./node_modules/core-js/library/fn/array/is-array.js ***! \***********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.array.is-array */ "./node_modules/core-js/library/modules/es6.array.is-array.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Array.isArray; /***/ }), /***/ "./node_modules/core-js/library/fn/get-iterator.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/fn/get-iterator.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../modules/web.dom.iterable */ "./node_modules/core-js/library/modules/web.dom.iterable.js"); __webpack_require__(/*! ../modules/es6.string.iterator */ "./node_modules/core-js/library/modules/es6.string.iterator.js"); module.exports = __webpack_require__(/*! ../modules/core.get-iterator */ "./node_modules/core-js/library/modules/core.get-iterator.js"); /***/ }), /***/ "./node_modules/core-js/library/fn/json/stringify.js": /*!***********************************************************!*\ !*** ./node_modules/core-js/library/fn/json/stringify.js ***! \***********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var core = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js"); var $JSON = core.JSON || (core.JSON = { stringify: JSON.stringify }); module.exports = function stringify(it) { // eslint-disable-line no-unused-vars return $JSON.stringify.apply($JSON, arguments); }; /***/ }), /***/ "./node_modules/core-js/library/fn/object/assign.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/fn/object/assign.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.assign */ "./node_modules/core-js/library/modules/es6.object.assign.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object.assign; /***/ }), /***/ "./node_modules/core-js/library/fn/object/create.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/fn/object/create.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.create */ "./node_modules/core-js/library/modules/es6.object.create.js"); var $Object = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object; module.exports = function create(P, D) { return $Object.create(P, D); }; /***/ }), /***/ "./node_modules/core-js/library/fn/object/define-property.js": /*!*******************************************************************!*\ !*** ./node_modules/core-js/library/fn/object/define-property.js ***! \*******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.define-property */ "./node_modules/core-js/library/modules/es6.object.define-property.js"); var $Object = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object; module.exports = function defineProperty(it, key, desc) { return $Object.defineProperty(it, key, desc); }; /***/ }), /***/ "./node_modules/core-js/library/fn/object/get-prototype-of.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/fn/object/get-prototype-of.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.get-prototype-of */ "./node_modules/core-js/library/modules/es6.object.get-prototype-of.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object.getPrototypeOf; /***/ }), /***/ "./node_modules/core-js/library/fn/object/keys.js": /*!********************************************************!*\ !*** ./node_modules/core-js/library/fn/object/keys.js ***! \********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.keys */ "./node_modules/core-js/library/modules/es6.object.keys.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object.keys; /***/ }), /***/ "./node_modules/core-js/library/fn/object/set-prototype-of.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/fn/object/set-prototype-of.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.object.set-prototype-of */ "./node_modules/core-js/library/modules/es6.object.set-prototype-of.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Object.setPrototypeOf; /***/ }), /***/ "./node_modules/core-js/library/fn/promise.js": /*!****************************************************!*\ !*** ./node_modules/core-js/library/fn/promise.js ***! \****************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../modules/es6.object.to-string */ "./node_modules/core-js/library/modules/es6.object.to-string.js"); __webpack_require__(/*! ../modules/es6.string.iterator */ "./node_modules/core-js/library/modules/es6.string.iterator.js"); __webpack_require__(/*! ../modules/web.dom.iterable */ "./node_modules/core-js/library/modules/web.dom.iterable.js"); __webpack_require__(/*! ../modules/es6.promise */ "./node_modules/core-js/library/modules/es6.promise.js"); __webpack_require__(/*! ../modules/es7.promise.finally */ "./node_modules/core-js/library/modules/es7.promise.finally.js"); __webpack_require__(/*! ../modules/es7.promise.try */ "./node_modules/core-js/library/modules/es7.promise.try.js"); module.exports = __webpack_require__(/*! ../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Promise; /***/ }), /***/ "./node_modules/core-js/library/fn/reflect/construct.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/fn/reflect/construct.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.reflect.construct */ "./node_modules/core-js/library/modules/es6.reflect.construct.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Reflect.construct; /***/ }), /***/ "./node_modules/core-js/library/fn/set.js": /*!************************************************!*\ !*** ./node_modules/core-js/library/fn/set.js ***! \************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../modules/es6.object.to-string */ "./node_modules/core-js/library/modules/es6.object.to-string.js"); __webpack_require__(/*! ../modules/es6.string.iterator */ "./node_modules/core-js/library/modules/es6.string.iterator.js"); __webpack_require__(/*! ../modules/web.dom.iterable */ "./node_modules/core-js/library/modules/web.dom.iterable.js"); __webpack_require__(/*! ../modules/es6.set */ "./node_modules/core-js/library/modules/es6.set.js"); __webpack_require__(/*! ../modules/es7.set.to-json */ "./node_modules/core-js/library/modules/es7.set.to-json.js"); __webpack_require__(/*! ../modules/es7.set.of */ "./node_modules/core-js/library/modules/es7.set.of.js"); __webpack_require__(/*! ../modules/es7.set.from */ "./node_modules/core-js/library/modules/es7.set.from.js"); module.exports = __webpack_require__(/*! ../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Set; /***/ }), /***/ "./node_modules/core-js/library/fn/symbol/index.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/fn/symbol/index.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.symbol */ "./node_modules/core-js/library/modules/es6.symbol.js"); __webpack_require__(/*! ../../modules/es6.object.to-string */ "./node_modules/core-js/library/modules/es6.object.to-string.js"); __webpack_require__(/*! ../../modules/es7.symbol.async-iterator */ "./node_modules/core-js/library/modules/es7.symbol.async-iterator.js"); __webpack_require__(/*! ../../modules/es7.symbol.observable */ "./node_modules/core-js/library/modules/es7.symbol.observable.js"); module.exports = __webpack_require__(/*! ../../modules/_core */ "./node_modules/core-js/library/modules/_core.js").Symbol; /***/ }), /***/ "./node_modules/core-js/library/fn/symbol/iterator.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/fn/symbol/iterator.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ../../modules/es6.string.iterator */ "./node_modules/core-js/library/modules/es6.string.iterator.js"); __webpack_require__(/*! ../../modules/web.dom.iterable */ "./node_modules/core-js/library/modules/web.dom.iterable.js"); module.exports = __webpack_require__(/*! ../../modules/_wks-ext */ "./node_modules/core-js/library/modules/_wks-ext.js").f('iterator'); /***/ }), /***/ "./node_modules/core-js/library/modules/_a-function.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_a-function.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (it) { if (typeof it != 'function') throw TypeError(it + ' is not a function!'); return it; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_add-to-unscopables.js": /*!*********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_add-to-unscopables.js ***! \*********************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function () { /* empty */ }; /***/ }), /***/ "./node_modules/core-js/library/modules/_an-instance.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_an-instance.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (it, Constructor, name, forbiddenField) { if (!(it instanceof Constructor) || (forbiddenField !== undefined && forbiddenField in it)) { throw TypeError(name + ': incorrect invocation!'); } return it; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_an-object.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_an-object.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); module.exports = function (it) { if (!isObject(it)) throw TypeError(it + ' is not an object!'); return it; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_array-from-iterable.js": /*!**********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_array-from-iterable.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var forOf = __webpack_require__(/*! ./_for-of */ "./node_modules/core-js/library/modules/_for-of.js"); module.exports = function (iter, ITERATOR) { var result = []; forOf(iter, false, result.push, result, ITERATOR); return result; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_array-includes.js": /*!*****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_array-includes.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // false -> Array#indexOf // true -> Array#includes var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); var toLength = __webpack_require__(/*! ./_to-length */ "./node_modules/core-js/library/modules/_to-length.js"); var toAbsoluteIndex = __webpack_require__(/*! ./_to-absolute-index */ "./node_modules/core-js/library/modules/_to-absolute-index.js"); module.exports = function (IS_INCLUDES) { return function ($this, el, fromIndex) { var O = toIObject($this); var length = toLength(O.length); var index = toAbsoluteIndex(fromIndex, length); var value; // Array#includes uses SameValueZero equality algorithm // eslint-disable-next-line no-self-compare if (IS_INCLUDES && el != el) while (length > index) { value = O[index++]; // eslint-disable-next-line no-self-compare if (value != value) return true; // Array#indexOf ignores holes, Array#includes - not } else for (;length > index; index++) if (IS_INCLUDES || index in O) { if (O[index] === el) return IS_INCLUDES || index || 0; } return !IS_INCLUDES && -1; }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_array-methods.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_array-methods.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 0 -> Array#forEach // 1 -> Array#map // 2 -> Array#filter // 3 -> Array#some // 4 -> Array#every // 5 -> Array#find // 6 -> Array#findIndex var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var IObject = __webpack_require__(/*! ./_iobject */ "./node_modules/core-js/library/modules/_iobject.js"); var toObject = __webpack_require__(/*! ./_to-object */ "./node_modules/core-js/library/modules/_to-object.js"); var toLength = __webpack_require__(/*! ./_to-length */ "./node_modules/core-js/library/modules/_to-length.js"); var asc = __webpack_require__(/*! ./_array-species-create */ "./node_modules/core-js/library/modules/_array-species-create.js"); module.exports = function (TYPE, $create) { var IS_MAP = TYPE == 1; var IS_FILTER = TYPE == 2; var IS_SOME = TYPE == 3; var IS_EVERY = TYPE == 4; var IS_FIND_INDEX = TYPE == 6; var NO_HOLES = TYPE == 5 || IS_FIND_INDEX; var create = $create || asc; return function ($this, callbackfn, that) { var O = toObject($this); var self = IObject(O); var f = ctx(callbackfn, that, 3); var length = toLength(self.length); var index = 0; var result = IS_MAP ? create($this, length) : IS_FILTER ? create($this, 0) : undefined; var val, res; for (;length > index; index++) if (NO_HOLES || index in self) { val = self[index]; res = f(val, index, O); if (TYPE) { if (IS_MAP) result[index] = res; // map else if (res) switch (TYPE) { case 3: return true; // some case 5: return val; // find case 6: return index; // findIndex case 2: result.push(val); // filter } else if (IS_EVERY) return false; // every } } return IS_FIND_INDEX ? -1 : IS_SOME || IS_EVERY ? IS_EVERY : result; }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_array-species-constructor.js": /*!****************************************************************************!*\ !*** ./node_modules/core-js/library/modules/_array-species-constructor.js ***! \****************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var isArray = __webpack_require__(/*! ./_is-array */ "./node_modules/core-js/library/modules/_is-array.js"); var SPECIES = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('species'); module.exports = function (original) { var C; if (isArray(original)) { C = original.constructor; // cross-realm fallback if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined; if (isObject(C)) { C = C[SPECIES]; if (C === null) C = undefined; } } return C === undefined ? Array : C; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_array-species-create.js": /*!***********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_array-species-create.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 9.4.2.3 ArraySpeciesCreate(originalArray, length) var speciesConstructor = __webpack_require__(/*! ./_array-species-constructor */ "./node_modules/core-js/library/modules/_array-species-constructor.js"); module.exports = function (original, length) { return new (speciesConstructor(original))(length); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_bind.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_bind.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var invoke = __webpack_require__(/*! ./_invoke */ "./node_modules/core-js/library/modules/_invoke.js"); var arraySlice = [].slice; var factories = {}; var construct = function (F, len, args) { if (!(len in factories)) { for (var n = [], i = 0; i < len; i++) n[i] = 'a[' + i + ']'; // eslint-disable-next-line no-new-func factories[len] = Function('F,a', 'return new F(' + n.join(',') + ')'); } return factories[len](F, args); }; module.exports = Function.bind || function bind(that /* , ...args */) { var fn = aFunction(this); var partArgs = arraySlice.call(arguments, 1); var bound = function (/* args... */) { var args = partArgs.concat(arraySlice.call(arguments)); return this instanceof bound ? construct(fn, args.length, args) : invoke(fn, args, that); }; if (isObject(fn.prototype)) bound.prototype = fn.prototype; return bound; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_classof.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_classof.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // getting tag from 19.1.3.6 Object.prototype.toString() var cof = __webpack_require__(/*! ./_cof */ "./node_modules/core-js/library/modules/_cof.js"); var TAG = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('toStringTag'); // ES3 wrong here var ARG = cof(function () { return arguments; }()) == 'Arguments'; // fallback for IE11 Script Access Denied error var tryGet = function (it, key) { try { return it[key]; } catch (e) { /* empty */ } }; module.exports = function (it) { var O, T, B; return it === undefined ? 'Undefined' : it === null ? 'Null' // @@toStringTag case : typeof (T = tryGet(O = Object(it), TAG)) == 'string' ? T // builtinTag case : ARG ? cof(O) // ES3 arguments fallback : (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_cof.js": /*!******************************************************!*\ !*** ./node_modules/core-js/library/modules/_cof.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports) { var toString = {}.toString; module.exports = function (it) { return toString.call(it).slice(8, -1); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_collection-strong.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_collection-strong.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var dP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f; var create = __webpack_require__(/*! ./_object-create */ "./node_modules/core-js/library/modules/_object-create.js"); var redefineAll = __webpack_require__(/*! ./_redefine-all */ "./node_modules/core-js/library/modules/_redefine-all.js"); var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var anInstance = __webpack_require__(/*! ./_an-instance */ "./node_modules/core-js/library/modules/_an-instance.js"); var forOf = __webpack_require__(/*! ./_for-of */ "./node_modules/core-js/library/modules/_for-of.js"); var $iterDefine = __webpack_require__(/*! ./_iter-define */ "./node_modules/core-js/library/modules/_iter-define.js"); var step = __webpack_require__(/*! ./_iter-step */ "./node_modules/core-js/library/modules/_iter-step.js"); var setSpecies = __webpack_require__(/*! ./_set-species */ "./node_modules/core-js/library/modules/_set-species.js"); var DESCRIPTORS = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js"); var fastKey = __webpack_require__(/*! ./_meta */ "./node_modules/core-js/library/modules/_meta.js").fastKey; var validate = __webpack_require__(/*! ./_validate-collection */ "./node_modules/core-js/library/modules/_validate-collection.js"); var SIZE = DESCRIPTORS ? '_s' : 'size'; var getEntry = function (that, key) { // fast case var index = fastKey(key); var entry; if (index !== 'F') return that._i[index]; // frozen object case for (entry = that._f; entry; entry = entry.n) { if (entry.k == key) return entry; } }; module.exports = { getConstructor: function (wrapper, NAME, IS_MAP, ADDER) { var C = wrapper(function (that, iterable) { anInstance(that, C, NAME, '_i'); that._t = NAME; // collection type that._i = create(null); // index that._f = undefined; // first entry that._l = undefined; // last entry that[SIZE] = 0; // size if (iterable != undefined) forOf(iterable, IS_MAP, that[ADDER], that); }); redefineAll(C.prototype, { // 23.1.3.1 Map.prototype.clear() // 23.2.3.2 Set.prototype.clear() clear: function clear() { for (var that = validate(this, NAME), data = that._i, entry = that._f; entry; entry = entry.n) { entry.r = true; if (entry.p) entry.p = entry.p.n = undefined; delete data[entry.i]; } that._f = that._l = undefined; that[SIZE] = 0; }, // 23.1.3.3 Map.prototype.delete(key) // 23.2.3.4 Set.prototype.delete(value) 'delete': function (key) { var that = validate(this, NAME); var entry = getEntry(that, key); if (entry) { var next = entry.n; var prev = entry.p; delete that._i[entry.i]; entry.r = true; if (prev) prev.n = next; if (next) next.p = prev; if (that._f == entry) that._f = next; if (that._l == entry) that._l = prev; that[SIZE]--; } return !!entry; }, // 23.2.3.6 Set.prototype.forEach(callbackfn, thisArg = undefined) // 23.1.3.5 Map.prototype.forEach(callbackfn, thisArg = undefined) forEach: function forEach(callbackfn /* , that = undefined */) { validate(this, NAME); var f = ctx(callbackfn, arguments.length > 1 ? arguments[1] : undefined, 3); var entry; while (entry = entry ? entry.n : this._f) { f(entry.v, entry.k, this); // revert to the last existing entry while (entry && entry.r) entry = entry.p; } }, // 23.1.3.7 Map.prototype.has(key) // 23.2.3.7 Set.prototype.has(value) has: function has(key) { return !!getEntry(validate(this, NAME), key); } }); if (DESCRIPTORS) dP(C.prototype, 'size', { get: function () { return validate(this, NAME)[SIZE]; } }); return C; }, def: function (that, key, value) { var entry = getEntry(that, key); var prev, index; // change existing entry if (entry) { entry.v = value; // create new entry } else { that._l = entry = { i: index = fastKey(key, true), // <- index k: key, // <- key v: value, // <- value p: prev = that._l, // <- previous entry n: undefined, // <- next entry r: false // <- removed }; if (!that._f) that._f = entry; if (prev) prev.n = entry; that[SIZE]++; // add to index if (index !== 'F') that._i[index] = entry; } return that; }, getEntry: getEntry, setStrong: function (C, NAME, IS_MAP) { // add .keys, .values, .entries, [@@iterator] // 23.1.3.4, 23.1.3.8, 23.1.3.11, 23.1.3.12, 23.2.3.5, 23.2.3.8, 23.2.3.10, 23.2.3.11 $iterDefine(C, NAME, function (iterated, kind) { this._t = validate(iterated, NAME); // target this._k = kind; // kind this._l = undefined; // previous }, function () { var that = this; var kind = that._k; var entry = that._l; // revert to the last existing entry while (entry && entry.r) entry = entry.p; // get next entry if (!that._t || !(that._l = entry = entry ? entry.n : that._t._f)) { // or finish the iteration that._t = undefined; return step(1); } // return step by kind if (kind == 'keys') return step(0, entry.k); if (kind == 'values') return step(0, entry.v); return step(0, [entry.k, entry.v]); }, IS_MAP ? 'entries' : 'values', !IS_MAP, true); // add [@@species], 23.1.2.2, 23.2.2.2 setSpecies(NAME); } }; /***/ }), /***/ "./node_modules/core-js/library/modules/_collection-to-json.js": /*!*********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_collection-to-json.js ***! \*********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // https://github.com/DavidBruant/Map-Set.prototype.toJSON var classof = __webpack_require__(/*! ./_classof */ "./node_modules/core-js/library/modules/_classof.js"); var from = __webpack_require__(/*! ./_array-from-iterable */ "./node_modules/core-js/library/modules/_array-from-iterable.js"); module.exports = function (NAME) { return function toJSON() { if (classof(this) != NAME) throw TypeError(NAME + "#toJSON isn't generic"); return from(this); }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_collection.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_collection.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var meta = __webpack_require__(/*! ./_meta */ "./node_modules/core-js/library/modules/_meta.js"); var fails = __webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js"); var hide = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); var redefineAll = __webpack_require__(/*! ./_redefine-all */ "./node_modules/core-js/library/modules/_redefine-all.js"); var forOf = __webpack_require__(/*! ./_for-of */ "./node_modules/core-js/library/modules/_for-of.js"); var anInstance = __webpack_require__(/*! ./_an-instance */ "./node_modules/core-js/library/modules/_an-instance.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var setToStringTag = __webpack_require__(/*! ./_set-to-string-tag */ "./node_modules/core-js/library/modules/_set-to-string-tag.js"); var dP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f; var each = __webpack_require__(/*! ./_array-methods */ "./node_modules/core-js/library/modules/_array-methods.js")(0); var DESCRIPTORS = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js"); module.exports = function (NAME, wrapper, methods, common, IS_MAP, IS_WEAK) { var Base = global[NAME]; var C = Base; var ADDER = IS_MAP ? 'set' : 'add'; var proto = C && C.prototype; var O = {}; if (!DESCRIPTORS || typeof C != 'function' || !(IS_WEAK || proto.forEach && !fails(function () { new C().entries().next(); }))) { // create collection constructor C = common.getConstructor(wrapper, NAME, IS_MAP, ADDER); redefineAll(C.prototype, methods); meta.NEED = true; } else { C = wrapper(function (target, iterable) { anInstance(target, C, NAME, '_c'); target._c = new Base(); if (iterable != undefined) forOf(iterable, IS_MAP, target[ADDER], target); }); each('add,clear,delete,forEach,get,has,set,keys,values,entries,toJSON'.split(','), function (KEY) { var IS_ADDER = KEY == 'add' || KEY == 'set'; if (KEY in proto && !(IS_WEAK && KEY == 'clear')) hide(C.prototype, KEY, function (a, b) { anInstance(this, C, KEY); if (!IS_ADDER && IS_WEAK && !isObject(a)) return KEY == 'get' ? undefined : false; var result = this._c[KEY](a === 0 ? 0 : a, b); return IS_ADDER ? this : result; }); }); IS_WEAK || dP(C.prototype, 'size', { get: function () { return this._c.size; } }); } setToStringTag(C, NAME); O[NAME] = C; $export($export.G + $export.W + $export.F, O); if (!IS_WEAK) common.setStrong(C, NAME, IS_MAP); return C; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_core.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_core.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports) { var core = module.exports = { version: '2.6.5' }; if (typeof __e == 'number') __e = core; // eslint-disable-line no-undef /***/ }), /***/ "./node_modules/core-js/library/modules/_ctx.js": /*!******************************************************!*\ !*** ./node_modules/core-js/library/modules/_ctx.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // optional / simple context binding var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); module.exports = function (fn, that, length) { aFunction(fn); if (that === undefined) return fn; switch (length) { case 1: return function (a) { return fn.call(that, a); }; case 2: return function (a, b) { return fn.call(that, a, b); }; case 3: return function (a, b, c) { return fn.call(that, a, b, c); }; } return function (/* ...args */) { return fn.apply(that, arguments); }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_defined.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_defined.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { // 7.2.1 RequireObjectCoercible(argument) module.exports = function (it) { if (it == undefined) throw TypeError("Can't call method on " + it); return it; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_descriptors.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_descriptors.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // Thank's IE8 for his funny defineProperty module.exports = !__webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js")(function () { return Object.defineProperty({}, 'a', { get: function () { return 7; } }).a != 7; }); /***/ }), /***/ "./node_modules/core-js/library/modules/_dom-create.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_dom-create.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var document = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js").document; // typeof document.createElement is 'object' in old IE var is = isObject(document) && isObject(document.createElement); module.exports = function (it) { return is ? document.createElement(it) : {}; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_enum-bug-keys.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_enum-bug-keys.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { // IE 8- don't enum bug keys module.exports = ( 'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf' ).split(','); /***/ }), /***/ "./node_modules/core-js/library/modules/_enum-keys.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_enum-keys.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // all enumerable object keys, includes symbols var getKeys = __webpack_require__(/*! ./_object-keys */ "./node_modules/core-js/library/modules/_object-keys.js"); var gOPS = __webpack_require__(/*! ./_object-gops */ "./node_modules/core-js/library/modules/_object-gops.js"); var pIE = __webpack_require__(/*! ./_object-pie */ "./node_modules/core-js/library/modules/_object-pie.js"); module.exports = function (it) { var result = getKeys(it); var getSymbols = gOPS.f; if (getSymbols) { var symbols = getSymbols(it); var isEnum = pIE.f; var i = 0; var key; while (symbols.length > i) if (isEnum.call(it, key = symbols[i++])) result.push(key); } return result; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_export.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/_export.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var hide = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var PROTOTYPE = 'prototype'; var $export = function (type, name, source) { var IS_FORCED = type & $export.F; var IS_GLOBAL = type & $export.G; var IS_STATIC = type & $export.S; var IS_PROTO = type & $export.P; var IS_BIND = type & $export.B; var IS_WRAP = type & $export.W; var exports = IS_GLOBAL ? core : core[name] || (core[name] = {}); var expProto = exports[PROTOTYPE]; var target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {})[PROTOTYPE]; var key, own, out; if (IS_GLOBAL) source = name; for (key in source) { // contains in native own = !IS_FORCED && target && target[key] !== undefined; if (own && has(exports, key)) continue; // export native or passed out = own ? target[key] : source[key]; // prevent global pollution for namespaces exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key] // bind timers to global for call from export context : IS_BIND && own ? ctx(out, global) // wrap global constructors for prevent change them in library : IS_WRAP && target[key] == out ? (function (C) { var F = function (a, b, c) { if (this instanceof C) { switch (arguments.length) { case 0: return new C(); case 1: return new C(a); case 2: return new C(a, b); } return new C(a, b, c); } return C.apply(this, arguments); }; F[PROTOTYPE] = C[PROTOTYPE]; return F; // make static versions for prototype methods })(out) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out; // export proto methods to core.%CONSTRUCTOR%.methods.%NAME% if (IS_PROTO) { (exports.virtual || (exports.virtual = {}))[key] = out; // export proto methods to core.%CONSTRUCTOR%.prototype.%NAME% if (type & $export.R && expProto && !expProto[key]) hide(expProto, key, out); } } }; // type bitmap $export.F = 1; // forced $export.G = 2; // global $export.S = 4; // static $export.P = 8; // proto $export.B = 16; // bind $export.W = 32; // wrap $export.U = 64; // safe $export.R = 128; // real proto method for `library` module.exports = $export; /***/ }), /***/ "./node_modules/core-js/library/modules/_fails.js": /*!********************************************************!*\ !*** ./node_modules/core-js/library/modules/_fails.js ***! \********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (exec) { try { return !!exec(); } catch (e) { return true; } }; /***/ }), /***/ "./node_modules/core-js/library/modules/_for-of.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/_for-of.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var call = __webpack_require__(/*! ./_iter-call */ "./node_modules/core-js/library/modules/_iter-call.js"); var isArrayIter = __webpack_require__(/*! ./_is-array-iter */ "./node_modules/core-js/library/modules/_is-array-iter.js"); var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var toLength = __webpack_require__(/*! ./_to-length */ "./node_modules/core-js/library/modules/_to-length.js"); var getIterFn = __webpack_require__(/*! ./core.get-iterator-method */ "./node_modules/core-js/library/modules/core.get-iterator-method.js"); var BREAK = {}; var RETURN = {}; var exports = module.exports = function (iterable, entries, fn, that, ITERATOR) { var iterFn = ITERATOR ? function () { return iterable; } : getIterFn(iterable); var f = ctx(fn, that, entries ? 2 : 1); var index = 0; var length, step, iterator, result; if (typeof iterFn != 'function') throw TypeError(iterable + ' is not iterable!'); // fast case for arrays with default iterator if (isArrayIter(iterFn)) for (length = toLength(iterable.length); length > index; index++) { result = entries ? f(anObject(step = iterable[index])[0], step[1]) : f(iterable[index]); if (result === BREAK || result === RETURN) return result; } else for (iterator = iterFn.call(iterable); !(step = iterator.next()).done;) { result = call(iterator, f, step.value, entries); if (result === BREAK || result === RETURN) return result; } }; exports.BREAK = BREAK; exports.RETURN = RETURN; /***/ }), /***/ "./node_modules/core-js/library/modules/_global.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/_global.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { // https://github.com/zloirock/core-js/issues/86#issuecomment-115759028 var global = module.exports = typeof window != 'undefined' && window.Math == Math ? window : typeof self != 'undefined' && self.Math == Math ? self // eslint-disable-next-line no-new-func : Function('return this')(); if (typeof __g == 'number') __g = global; // eslint-disable-line no-undef /***/ }), /***/ "./node_modules/core-js/library/modules/_has.js": /*!******************************************************!*\ !*** ./node_modules/core-js/library/modules/_has.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports) { var hasOwnProperty = {}.hasOwnProperty; module.exports = function (it, key) { return hasOwnProperty.call(it, key); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_hide.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_hide.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var dP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js"); var createDesc = __webpack_require__(/*! ./_property-desc */ "./node_modules/core-js/library/modules/_property-desc.js"); module.exports = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js") ? function (object, key, value) { return dP.f(object, key, createDesc(1, value)); } : function (object, key, value) { object[key] = value; return object; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_html.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_html.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var document = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js").document; module.exports = document && document.documentElement; /***/ }), /***/ "./node_modules/core-js/library/modules/_ie8-dom-define.js": /*!*****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_ie8-dom-define.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = !__webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js") && !__webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js")(function () { return Object.defineProperty(__webpack_require__(/*! ./_dom-create */ "./node_modules/core-js/library/modules/_dom-create.js")('div'), 'a', { get: function () { return 7; } }).a != 7; }); /***/ }), /***/ "./node_modules/core-js/library/modules/_invoke.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/_invoke.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { // fast apply, http://jsperf.lnkit.com/fast-apply/5 module.exports = function (fn, args, that) { var un = that === undefined; switch (args.length) { case 0: return un ? fn() : fn.call(that); case 1: return un ? fn(args[0]) : fn.call(that, args[0]); case 2: return un ? fn(args[0], args[1]) : fn.call(that, args[0], args[1]); case 3: return un ? fn(args[0], args[1], args[2]) : fn.call(that, args[0], args[1], args[2]); case 4: return un ? fn(args[0], args[1], args[2], args[3]) : fn.call(that, args[0], args[1], args[2], args[3]); } return fn.apply(that, args); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iobject.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_iobject.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // fallback for non-array-like ES3 and non-enumerable old V8 strings var cof = __webpack_require__(/*! ./_cof */ "./node_modules/core-js/library/modules/_cof.js"); // eslint-disable-next-line no-prototype-builtins module.exports = Object('z').propertyIsEnumerable(0) ? Object : function (it) { return cof(it) == 'String' ? it.split('') : Object(it); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_is-array-iter.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_is-array-iter.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // check on default Array iterator var Iterators = __webpack_require__(/*! ./_iterators */ "./node_modules/core-js/library/modules/_iterators.js"); var ITERATOR = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('iterator'); var ArrayProto = Array.prototype; module.exports = function (it) { return it !== undefined && (Iterators.Array === it || ArrayProto[ITERATOR] === it); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_is-array.js": /*!***********************************************************!*\ !*** ./node_modules/core-js/library/modules/_is-array.js ***! \***********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 7.2.2 IsArray(argument) var cof = __webpack_require__(/*! ./_cof */ "./node_modules/core-js/library/modules/_cof.js"); module.exports = Array.isArray || function isArray(arg) { return cof(arg) == 'Array'; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_is-object.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_is-object.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (it) { return typeof it === 'object' ? it !== null : typeof it === 'function'; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iter-call.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iter-call.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // call something on iterator step with safe closing on error var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); module.exports = function (iterator, fn, value, entries) { try { return entries ? fn(anObject(value)[0], value[1]) : fn(value); // 7.4.6 IteratorClose(iterator, completion) } catch (e) { var ret = iterator['return']; if (ret !== undefined) anObject(ret.call(iterator)); throw e; } }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iter-create.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iter-create.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var create = __webpack_require__(/*! ./_object-create */ "./node_modules/core-js/library/modules/_object-create.js"); var descriptor = __webpack_require__(/*! ./_property-desc */ "./node_modules/core-js/library/modules/_property-desc.js"); var setToStringTag = __webpack_require__(/*! ./_set-to-string-tag */ "./node_modules/core-js/library/modules/_set-to-string-tag.js"); var IteratorPrototype = {}; // 25.1.2.1.1 %IteratorPrototype%[@@iterator]() __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js")(IteratorPrototype, __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('iterator'), function () { return this; }); module.exports = function (Constructor, NAME, next) { Constructor.prototype = create(IteratorPrototype, { next: descriptor(1, next) }); setToStringTag(Constructor, NAME + ' Iterator'); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iter-define.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iter-define.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var LIBRARY = __webpack_require__(/*! ./_library */ "./node_modules/core-js/library/modules/_library.js"); var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var redefine = __webpack_require__(/*! ./_redefine */ "./node_modules/core-js/library/modules/_redefine.js"); var hide = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); var Iterators = __webpack_require__(/*! ./_iterators */ "./node_modules/core-js/library/modules/_iterators.js"); var $iterCreate = __webpack_require__(/*! ./_iter-create */ "./node_modules/core-js/library/modules/_iter-create.js"); var setToStringTag = __webpack_require__(/*! ./_set-to-string-tag */ "./node_modules/core-js/library/modules/_set-to-string-tag.js"); var getPrototypeOf = __webpack_require__(/*! ./_object-gpo */ "./node_modules/core-js/library/modules/_object-gpo.js"); var ITERATOR = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('iterator'); var BUGGY = !([].keys && 'next' in [].keys()); // Safari has buggy iterators w/o `next` var FF_ITERATOR = '@@iterator'; var KEYS = 'keys'; var VALUES = 'values'; var returnThis = function () { return this; }; module.exports = function (Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) { $iterCreate(Constructor, NAME, next); var getMethod = function (kind) { if (!BUGGY && kind in proto) return proto[kind]; switch (kind) { case KEYS: return function keys() { return new Constructor(this, kind); }; case VALUES: return function values() { return new Constructor(this, kind); }; } return function entries() { return new Constructor(this, kind); }; }; var TAG = NAME + ' Iterator'; var DEF_VALUES = DEFAULT == VALUES; var VALUES_BUG = false; var proto = Base.prototype; var $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT]; var $default = $native || getMethod(DEFAULT); var $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined; var $anyNative = NAME == 'Array' ? proto.entries || $native : $native; var methods, key, IteratorPrototype; // Fix native if ($anyNative) { IteratorPrototype = getPrototypeOf($anyNative.call(new Base())); if (IteratorPrototype !== Object.prototype && IteratorPrototype.next) { // Set @@toStringTag to native iterators setToStringTag(IteratorPrototype, TAG, true); // fix for some old engines if (!LIBRARY && typeof IteratorPrototype[ITERATOR] != 'function') hide(IteratorPrototype, ITERATOR, returnThis); } } // fix Array#{values, @@iterator}.name in V8 / FF if (DEF_VALUES && $native && $native.name !== VALUES) { VALUES_BUG = true; $default = function values() { return $native.call(this); }; } // Define iterator if ((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])) { hide(proto, ITERATOR, $default); } // Plug for library Iterators[NAME] = $default; Iterators[TAG] = returnThis; if (DEFAULT) { methods = { values: DEF_VALUES ? $default : getMethod(VALUES), keys: IS_SET ? $default : getMethod(KEYS), entries: $entries }; if (FORCED) for (key in methods) { if (!(key in proto)) redefine(proto, key, methods[key]); } else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods); } return methods; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iter-detect.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iter-detect.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var ITERATOR = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('iterator'); var SAFE_CLOSING = false; try { var riter = [7][ITERATOR](); riter['return'] = function () { SAFE_CLOSING = true; }; // eslint-disable-next-line no-throw-literal Array.from(riter, function () { throw 2; }); } catch (e) { /* empty */ } module.exports = function (exec, skipClosing) { if (!skipClosing && !SAFE_CLOSING) return false; var safe = false; try { var arr = [7]; var iter = arr[ITERATOR](); iter.next = function () { return { done: safe = true }; }; arr[ITERATOR] = function () { return iter; }; exec(arr); } catch (e) { /* empty */ } return safe; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iter-step.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iter-step.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (done, value) { return { value: value, done: !!done }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_iterators.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_iterators.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = {}; /***/ }), /***/ "./node_modules/core-js/library/modules/_library.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_library.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = true; /***/ }), /***/ "./node_modules/core-js/library/modules/_meta.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_meta.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var META = __webpack_require__(/*! ./_uid */ "./node_modules/core-js/library/modules/_uid.js")('meta'); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var setDesc = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f; var id = 0; var isExtensible = Object.isExtensible || function () { return true; }; var FREEZE = !__webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js")(function () { return isExtensible(Object.preventExtensions({})); }); var setMeta = function (it) { setDesc(it, META, { value: { i: 'O' + ++id, // object ID w: {} // weak collections IDs } }); }; var fastKey = function (it, create) { // return primitive with prefix if (!isObject(it)) return typeof it == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it; if (!has(it, META)) { // can't set metadata to uncaught frozen object if (!isExtensible(it)) return 'F'; // not necessary to add metadata if (!create) return 'E'; // add missing metadata setMeta(it); // return object ID } return it[META].i; }; var getWeak = function (it, create) { if (!has(it, META)) { // can't set metadata to uncaught frozen object if (!isExtensible(it)) return true; // not necessary to add metadata if (!create) return false; // add missing metadata setMeta(it); // return hash weak collections IDs } return it[META].w; }; // add metadata on freeze-family methods calling var onFreeze = function (it) { if (FREEZE && meta.NEED && isExtensible(it) && !has(it, META)) setMeta(it); return it; }; var meta = module.exports = { KEY: META, NEED: false, fastKey: fastKey, getWeak: getWeak, onFreeze: onFreeze }; /***/ }), /***/ "./node_modules/core-js/library/modules/_microtask.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_microtask.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var macrotask = __webpack_require__(/*! ./_task */ "./node_modules/core-js/library/modules/_task.js").set; var Observer = global.MutationObserver || global.WebKitMutationObserver; var process = global.process; var Promise = global.Promise; var isNode = __webpack_require__(/*! ./_cof */ "./node_modules/core-js/library/modules/_cof.js")(process) == 'process'; module.exports = function () { var head, last, notify; var flush = function () { var parent, fn; if (isNode && (parent = process.domain)) parent.exit(); while (head) { fn = head.fn; head = head.next; try { fn(); } catch (e) { if (head) notify(); else last = undefined; throw e; } } last = undefined; if (parent) parent.enter(); }; // Node.js if (isNode) { notify = function () { process.nextTick(flush); }; // browsers with MutationObserver, except iOS Safari - https://github.com/zloirock/core-js/issues/339 } else if (Observer && !(global.navigator && global.navigator.standalone)) { var toggle = true; var node = document.createTextNode(''); new Observer(flush).observe(node, { characterData: true }); // eslint-disable-line no-new notify = function () { node.data = toggle = !toggle; }; // environments with maybe non-completely correct, but existent Promise } else if (Promise && Promise.resolve) { // Promise.resolve without an argument throws an error in LG WebOS 2 var promise = Promise.resolve(undefined); notify = function () { promise.then(flush); }; // for other environments - macrotask based on: // - setImmediate // - MessageChannel // - window.postMessag // - onreadystatechange // - setTimeout } else { notify = function () { // strange IE + webpack dev server bug - use .call(global) macrotask.call(global, flush); }; } return function (fn) { var task = { fn: fn, next: undefined }; if (last) last.next = task; if (!head) { head = task; notify(); } last = task; }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_new-promise-capability.js": /*!*************************************************************************!*\ !*** ./node_modules/core-js/library/modules/_new-promise-capability.js ***! \*************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // 25.4.1.5 NewPromiseCapability(C) var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); function PromiseCapability(C) { var resolve, reject; this.promise = new C(function ($$resolve, $$reject) { if (resolve !== undefined || reject !== undefined) throw TypeError('Bad Promise constructor'); resolve = $$resolve; reject = $$reject; }); this.resolve = aFunction(resolve); this.reject = aFunction(reject); } module.exports.f = function (C) { return new PromiseCapability(C); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-assign.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-assign.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // 19.1.2.1 Object.assign(target, source, ...) var getKeys = __webpack_require__(/*! ./_object-keys */ "./node_modules/core-js/library/modules/_object-keys.js"); var gOPS = __webpack_require__(/*! ./_object-gops */ "./node_modules/core-js/library/modules/_object-gops.js"); var pIE = __webpack_require__(/*! ./_object-pie */ "./node_modules/core-js/library/modules/_object-pie.js"); var toObject = __webpack_require__(/*! ./_to-object */ "./node_modules/core-js/library/modules/_to-object.js"); var IObject = __webpack_require__(/*! ./_iobject */ "./node_modules/core-js/library/modules/_iobject.js"); var $assign = Object.assign; // should work with symbols and should have deterministic property order (V8 bug) module.exports = !$assign || __webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js")(function () { var A = {}; var B = {}; // eslint-disable-next-line no-undef var S = Symbol(); var K = 'abcdefghijklmnopqrst'; A[S] = 7; K.split('').forEach(function (k) { B[k] = k; }); return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K; }) ? function assign(target, source) { // eslint-disable-line no-unused-vars var T = toObject(target); var aLen = arguments.length; var index = 1; var getSymbols = gOPS.f; var isEnum = pIE.f; while (aLen > index) { var S = IObject(arguments[index++]); var keys = getSymbols ? getKeys(S).concat(getSymbols(S)) : getKeys(S); var length = keys.length; var j = 0; var key; while (length > j) if (isEnum.call(S, key = keys[j++])) T[key] = S[key]; } return T; } : $assign; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-create.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-create.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties]) var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var dPs = __webpack_require__(/*! ./_object-dps */ "./node_modules/core-js/library/modules/_object-dps.js"); var enumBugKeys = __webpack_require__(/*! ./_enum-bug-keys */ "./node_modules/core-js/library/modules/_enum-bug-keys.js"); var IE_PROTO = __webpack_require__(/*! ./_shared-key */ "./node_modules/core-js/library/modules/_shared-key.js")('IE_PROTO'); var Empty = function () { /* empty */ }; var PROTOTYPE = 'prototype'; // Create object with fake `null` prototype: use iframe Object with cleared prototype var createDict = function () { // Thrash, waste and sodomy: IE GC bug var iframe = __webpack_require__(/*! ./_dom-create */ "./node_modules/core-js/library/modules/_dom-create.js")('iframe'); var i = enumBugKeys.length; var lt = '<'; var gt = '>'; var iframeDocument; iframe.style.display = 'none'; __webpack_require__(/*! ./_html */ "./node_modules/core-js/library/modules/_html.js").appendChild(iframe); iframe.src = 'javascript:'; // eslint-disable-line no-script-url // createDict = iframe.contentWindow.Object; // html.removeChild(iframe); iframeDocument = iframe.contentWindow.document; iframeDocument.open(); iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt); iframeDocument.close(); createDict = iframeDocument.F; while (i--) delete createDict[PROTOTYPE][enumBugKeys[i]]; return createDict(); }; module.exports = Object.create || function create(O, Properties) { var result; if (O !== null) { Empty[PROTOTYPE] = anObject(O); result = new Empty(); Empty[PROTOTYPE] = null; // add "__proto__" for Object.getPrototypeOf polyfill result[IE_PROTO] = O; } else result = createDict(); return Properties === undefined ? result : dPs(result, Properties); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-dp.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-dp.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var IE8_DOM_DEFINE = __webpack_require__(/*! ./_ie8-dom-define */ "./node_modules/core-js/library/modules/_ie8-dom-define.js"); var toPrimitive = __webpack_require__(/*! ./_to-primitive */ "./node_modules/core-js/library/modules/_to-primitive.js"); var dP = Object.defineProperty; exports.f = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js") ? Object.defineProperty : function defineProperty(O, P, Attributes) { anObject(O); P = toPrimitive(P, true); anObject(Attributes); if (IE8_DOM_DEFINE) try { return dP(O, P, Attributes); } catch (e) { /* empty */ } if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported!'); if ('value' in Attributes) O[P] = Attributes.value; return O; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-dps.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-dps.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var dP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js"); var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var getKeys = __webpack_require__(/*! ./_object-keys */ "./node_modules/core-js/library/modules/_object-keys.js"); module.exports = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js") ? Object.defineProperties : function defineProperties(O, Properties) { anObject(O); var keys = getKeys(Properties); var length = keys.length; var i = 0; var P; while (length > i) dP.f(O, P = keys[i++], Properties[P]); return O; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-gopd.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-gopd.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var pIE = __webpack_require__(/*! ./_object-pie */ "./node_modules/core-js/library/modules/_object-pie.js"); var createDesc = __webpack_require__(/*! ./_property-desc */ "./node_modules/core-js/library/modules/_property-desc.js"); var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); var toPrimitive = __webpack_require__(/*! ./_to-primitive */ "./node_modules/core-js/library/modules/_to-primitive.js"); var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var IE8_DOM_DEFINE = __webpack_require__(/*! ./_ie8-dom-define */ "./node_modules/core-js/library/modules/_ie8-dom-define.js"); var gOPD = Object.getOwnPropertyDescriptor; exports.f = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js") ? gOPD : function getOwnPropertyDescriptor(O, P) { O = toIObject(O); P = toPrimitive(P, true); if (IE8_DOM_DEFINE) try { return gOPD(O, P); } catch (e) { /* empty */ } if (has(O, P)) return createDesc(!pIE.f.call(O, P), O[P]); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-gopn-ext.js": /*!******************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-gopn-ext.js ***! \******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // fallback for IE11 buggy Object.getOwnPropertyNames with iframe and window var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); var gOPN = __webpack_require__(/*! ./_object-gopn */ "./node_modules/core-js/library/modules/_object-gopn.js").f; var toString = {}.toString; var windowNames = typeof window == 'object' && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : []; var getWindowNames = function (it) { try { return gOPN(it); } catch (e) { return windowNames.slice(); } }; module.exports.f = function getOwnPropertyNames(it) { return windowNames && toString.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(toIObject(it)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-gopn.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-gopn.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.7 / 15.2.3.4 Object.getOwnPropertyNames(O) var $keys = __webpack_require__(/*! ./_object-keys-internal */ "./node_modules/core-js/library/modules/_object-keys-internal.js"); var hiddenKeys = __webpack_require__(/*! ./_enum-bug-keys */ "./node_modules/core-js/library/modules/_enum-bug-keys.js").concat('length', 'prototype'); exports.f = Object.getOwnPropertyNames || function getOwnPropertyNames(O) { return $keys(O, hiddenKeys); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-gops.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-gops.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { exports.f = Object.getOwnPropertySymbols; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-gpo.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-gpo.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.9 / 15.2.3.2 Object.getPrototypeOf(O) var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var toObject = __webpack_require__(/*! ./_to-object */ "./node_modules/core-js/library/modules/_to-object.js"); var IE_PROTO = __webpack_require__(/*! ./_shared-key */ "./node_modules/core-js/library/modules/_shared-key.js")('IE_PROTO'); var ObjectProto = Object.prototype; module.exports = Object.getPrototypeOf || function (O) { O = toObject(O); if (has(O, IE_PROTO)) return O[IE_PROTO]; if (typeof O.constructor == 'function' && O instanceof O.constructor) { return O.constructor.prototype; } return O instanceof Object ? ObjectProto : null; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-keys-internal.js": /*!***********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-keys-internal.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); var arrayIndexOf = __webpack_require__(/*! ./_array-includes */ "./node_modules/core-js/library/modules/_array-includes.js")(false); var IE_PROTO = __webpack_require__(/*! ./_shared-key */ "./node_modules/core-js/library/modules/_shared-key.js")('IE_PROTO'); module.exports = function (object, names) { var O = toIObject(object); var i = 0; var result = []; var key; for (key in O) if (key != IE_PROTO) has(O, key) && result.push(key); // Don't enum bug & hidden keys while (names.length > i) if (has(O, key = names[i++])) { ~arrayIndexOf(result, key) || result.push(key); } return result; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-keys.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-keys.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.14 / 15.2.3.14 Object.keys(O) var $keys = __webpack_require__(/*! ./_object-keys-internal */ "./node_modules/core-js/library/modules/_object-keys-internal.js"); var enumBugKeys = __webpack_require__(/*! ./_enum-bug-keys */ "./node_modules/core-js/library/modules/_enum-bug-keys.js"); module.exports = Object.keys || function keys(O) { return $keys(O, enumBugKeys); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-pie.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-pie.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { exports.f = {}.propertyIsEnumerable; /***/ }), /***/ "./node_modules/core-js/library/modules/_object-sap.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_object-sap.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // most Object methods by ES6 should accept primitives var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var fails = __webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js"); module.exports = function (KEY, exec) { var fn = (core.Object || {})[KEY] || Object[KEY]; var exp = {}; exp[KEY] = exec(fn); $export($export.S + $export.F * fails(function () { fn(1); }), 'Object', exp); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_perform.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_perform.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (exec) { try { return { e: false, v: exec() }; } catch (e) { return { e: true, v: e }; } }; /***/ }), /***/ "./node_modules/core-js/library/modules/_promise-resolve.js": /*!******************************************************************!*\ !*** ./node_modules/core-js/library/modules/_promise-resolve.js ***! \******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var newPromiseCapability = __webpack_require__(/*! ./_new-promise-capability */ "./node_modules/core-js/library/modules/_new-promise-capability.js"); module.exports = function (C, x) { anObject(C); if (isObject(x) && x.constructor === C) return x; var promiseCapability = newPromiseCapability.f(C); var resolve = promiseCapability.resolve; resolve(x); return promiseCapability.promise; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_property-desc.js": /*!****************************************************************!*\ !*** ./node_modules/core-js/library/modules/_property-desc.js ***! \****************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function (bitmap, value) { return { enumerable: !(bitmap & 1), configurable: !(bitmap & 2), writable: !(bitmap & 4), value: value }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_redefine-all.js": /*!***************************************************************!*\ !*** ./node_modules/core-js/library/modules/_redefine-all.js ***! \***************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var hide = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); module.exports = function (target, src, safe) { for (var key in src) { if (safe && target[key]) target[key] = src[key]; else hide(target, key, src[key]); } return target; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_redefine.js": /*!***********************************************************!*\ !*** ./node_modules/core-js/library/modules/_redefine.js ***! \***********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); /***/ }), /***/ "./node_modules/core-js/library/modules/_set-collection-from.js": /*!**********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_set-collection-from.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // https://tc39.github.io/proposal-setmap-offrom/ var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var forOf = __webpack_require__(/*! ./_for-of */ "./node_modules/core-js/library/modules/_for-of.js"); module.exports = function (COLLECTION) { $export($export.S, COLLECTION, { from: function from(source /* , mapFn, thisArg */) { var mapFn = arguments[1]; var mapping, A, n, cb; aFunction(this); mapping = mapFn !== undefined; if (mapping) aFunction(mapFn); if (source == undefined) return new this(); A = []; if (mapping) { n = 0; cb = ctx(mapFn, arguments[2], 2); forOf(source, false, function (nextItem) { A.push(cb(nextItem, n++)); }); } else { forOf(source, false, A.push, A); } return new this(A); } }); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_set-collection-of.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_set-collection-of.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // https://tc39.github.io/proposal-setmap-offrom/ var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); module.exports = function (COLLECTION) { $export($export.S, COLLECTION, { of: function of() { var length = arguments.length; var A = new Array(length); while (length--) A[length] = arguments[length]; return new this(A); } }); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_set-proto.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_set-proto.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // Works with __proto__ only. Old v8 can't work with null proto objects. /* eslint-disable no-proto */ var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var check = function (O, proto) { anObject(O); if (!isObject(proto) && proto !== null) throw TypeError(proto + ": can't set as prototype!"); }; module.exports = { set: Object.setPrototypeOf || ('__proto__' in {} ? // eslint-disable-line function (test, buggy, set) { try { set = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js")(Function.call, __webpack_require__(/*! ./_object-gopd */ "./node_modules/core-js/library/modules/_object-gopd.js").f(Object.prototype, '__proto__').set, 2); set(test, []); buggy = !(test instanceof Array); } catch (e) { buggy = true; } return function setPrototypeOf(O, proto) { check(O, proto); if (buggy) O.__proto__ = proto; else set(O, proto); return O; }; }({}, false) : undefined), check: check }; /***/ }), /***/ "./node_modules/core-js/library/modules/_set-species.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/_set-species.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var dP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js"); var DESCRIPTORS = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js"); var SPECIES = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('species'); module.exports = function (KEY) { var C = typeof core[KEY] == 'function' ? core[KEY] : global[KEY]; if (DESCRIPTORS && C && !C[SPECIES]) dP.f(C, SPECIES, { configurable: true, get: function () { return this; } }); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_set-to-string-tag.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_set-to-string-tag.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var def = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f; var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var TAG = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('toStringTag'); module.exports = function (it, tag, stat) { if (it && !has(it = stat ? it : it.prototype, TAG)) def(it, TAG, { configurable: true, value: tag }); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_shared-key.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_shared-key.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var shared = __webpack_require__(/*! ./_shared */ "./node_modules/core-js/library/modules/_shared.js")('keys'); var uid = __webpack_require__(/*! ./_uid */ "./node_modules/core-js/library/modules/_uid.js"); module.exports = function (key) { return shared[key] || (shared[key] = uid(key)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_shared.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/_shared.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var SHARED = '__core-js_shared__'; var store = global[SHARED] || (global[SHARED] = {}); (module.exports = function (key, value) { return store[key] || (store[key] = value !== undefined ? value : {}); })('versions', []).push({ version: core.version, mode: __webpack_require__(/*! ./_library */ "./node_modules/core-js/library/modules/_library.js") ? 'pure' : 'global', copyright: '© 2019 Denis Pushkarev (zloirock.ru)' }); /***/ }), /***/ "./node_modules/core-js/library/modules/_species-constructor.js": /*!**********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_species-constructor.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 7.3.20 SpeciesConstructor(O, defaultConstructor) var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); var SPECIES = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('species'); module.exports = function (O, D) { var C = anObject(O).constructor; var S; return C === undefined || (S = anObject(C)[SPECIES]) == undefined ? D : aFunction(S); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_string-at.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_string-at.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var toInteger = __webpack_require__(/*! ./_to-integer */ "./node_modules/core-js/library/modules/_to-integer.js"); var defined = __webpack_require__(/*! ./_defined */ "./node_modules/core-js/library/modules/_defined.js"); // true -> String#at // false -> String#codePointAt module.exports = function (TO_STRING) { return function (that, pos) { var s = String(defined(that)); var i = toInteger(pos); var l = s.length; var a, b; if (i < 0 || i >= l) return TO_STRING ? '' : undefined; a = s.charCodeAt(i); return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff ? TO_STRING ? s.charAt(i) : a : TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000; }; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_task.js": /*!*******************************************************!*\ !*** ./node_modules/core-js/library/modules/_task.js ***! \*******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var invoke = __webpack_require__(/*! ./_invoke */ "./node_modules/core-js/library/modules/_invoke.js"); var html = __webpack_require__(/*! ./_html */ "./node_modules/core-js/library/modules/_html.js"); var cel = __webpack_require__(/*! ./_dom-create */ "./node_modules/core-js/library/modules/_dom-create.js"); var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var process = global.process; var setTask = global.setImmediate; var clearTask = global.clearImmediate; var MessageChannel = global.MessageChannel; var Dispatch = global.Dispatch; var counter = 0; var queue = {}; var ONREADYSTATECHANGE = 'onreadystatechange'; var defer, channel, port; var run = function () { var id = +this; // eslint-disable-next-line no-prototype-builtins if (queue.hasOwnProperty(id)) { var fn = queue[id]; delete queue[id]; fn(); } }; var listener = function (event) { run.call(event.data); }; // Node.js 0.9+ & IE10+ has setImmediate, otherwise: if (!setTask || !clearTask) { setTask = function setImmediate(fn) { var args = []; var i = 1; while (arguments.length > i) args.push(arguments[i++]); queue[++counter] = function () { // eslint-disable-next-line no-new-func invoke(typeof fn == 'function' ? fn : Function(fn), args); }; defer(counter); return counter; }; clearTask = function clearImmediate(id) { delete queue[id]; }; // Node.js 0.8- if (__webpack_require__(/*! ./_cof */ "./node_modules/core-js/library/modules/_cof.js")(process) == 'process') { defer = function (id) { process.nextTick(ctx(run, id, 1)); }; // Sphere (JS game engine) Dispatch API } else if (Dispatch && Dispatch.now) { defer = function (id) { Dispatch.now(ctx(run, id, 1)); }; // Browsers with MessageChannel, includes WebWorkers } else if (MessageChannel) { channel = new MessageChannel(); port = channel.port2; channel.port1.onmessage = listener; defer = ctx(port.postMessage, port, 1); // Browsers with postMessage, skip WebWorkers // IE8 has postMessage, but it's sync & typeof its postMessage is 'object' } else if (global.addEventListener && typeof postMessage == 'function' && !global.importScripts) { defer = function (id) { global.postMessage(id + '', '*'); }; global.addEventListener('message', listener, false); // IE8- } else if (ONREADYSTATECHANGE in cel('script')) { defer = function (id) { html.appendChild(cel('script'))[ONREADYSTATECHANGE] = function () { html.removeChild(this); run.call(id); }; }; // Rest old browsers } else { defer = function (id) { setTimeout(ctx(run, id, 1), 0); }; } } module.exports = { set: setTask, clear: clearTask }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-absolute-index.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-absolute-index.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var toInteger = __webpack_require__(/*! ./_to-integer */ "./node_modules/core-js/library/modules/_to-integer.js"); var max = Math.max; var min = Math.min; module.exports = function (index, length) { index = toInteger(index); return index < 0 ? max(index + length, 0) : min(index, length); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-integer.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-integer.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { // 7.1.4 ToInteger var ceil = Math.ceil; var floor = Math.floor; module.exports = function (it) { return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-iobject.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-iobject.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // to indexed object, toObject with fallback for non-array-like ES3 strings var IObject = __webpack_require__(/*! ./_iobject */ "./node_modules/core-js/library/modules/_iobject.js"); var defined = __webpack_require__(/*! ./_defined */ "./node_modules/core-js/library/modules/_defined.js"); module.exports = function (it) { return IObject(defined(it)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-length.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-length.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 7.1.15 ToLength var toInteger = __webpack_require__(/*! ./_to-integer */ "./node_modules/core-js/library/modules/_to-integer.js"); var min = Math.min; module.exports = function (it) { return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991 }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-object.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-object.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 7.1.13 ToObject(argument) var defined = __webpack_require__(/*! ./_defined */ "./node_modules/core-js/library/modules/_defined.js"); module.exports = function (it) { return Object(defined(it)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_to-primitive.js": /*!***************************************************************!*\ !*** ./node_modules/core-js/library/modules/_to-primitive.js ***! \***************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 7.1.1 ToPrimitive(input [, PreferredType]) var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); // instead of the ES6 spec version, we didn't implement @@toPrimitive case // and the second argument - flag - preferred type is a string module.exports = function (it, S) { if (!isObject(it)) return it; var fn, val; if (S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val; if (typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it))) return val; if (!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val; throw TypeError("Can't convert object to primitive value"); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_uid.js": /*!******************************************************!*\ !*** ./node_modules/core-js/library/modules/_uid.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports) { var id = 0; var px = Math.random(); module.exports = function (key) { return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_user-agent.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_user-agent.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var navigator = global.navigator; module.exports = navigator && navigator.userAgent || ''; /***/ }), /***/ "./node_modules/core-js/library/modules/_validate-collection.js": /*!**********************************************************************!*\ !*** ./node_modules/core-js/library/modules/_validate-collection.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); module.exports = function (it, TYPE) { if (!isObject(it) || it._t !== TYPE) throw TypeError('Incompatible receiver, ' + TYPE + ' required!'); return it; }; /***/ }), /***/ "./node_modules/core-js/library/modules/_wks-define.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/_wks-define.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var LIBRARY = __webpack_require__(/*! ./_library */ "./node_modules/core-js/library/modules/_library.js"); var wksExt = __webpack_require__(/*! ./_wks-ext */ "./node_modules/core-js/library/modules/_wks-ext.js"); var defineProperty = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f; module.exports = function (name) { var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {}); if (name.charAt(0) != '_' && !(name in $Symbol)) defineProperty($Symbol, name, { value: wksExt.f(name) }); }; /***/ }), /***/ "./node_modules/core-js/library/modules/_wks-ext.js": /*!**********************************************************!*\ !*** ./node_modules/core-js/library/modules/_wks-ext.js ***! \**********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { exports.f = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js"); /***/ }), /***/ "./node_modules/core-js/library/modules/_wks.js": /*!******************************************************!*\ !*** ./node_modules/core-js/library/modules/_wks.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var store = __webpack_require__(/*! ./_shared */ "./node_modules/core-js/library/modules/_shared.js")('wks'); var uid = __webpack_require__(/*! ./_uid */ "./node_modules/core-js/library/modules/_uid.js"); var Symbol = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js").Symbol; var USE_SYMBOL = typeof Symbol == 'function'; var $exports = module.exports = function (name) { return store[name] || (store[name] = USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name)); }; $exports.store = store; /***/ }), /***/ "./node_modules/core-js/library/modules/core.get-iterator-method.js": /*!**************************************************************************!*\ !*** ./node_modules/core-js/library/modules/core.get-iterator-method.js ***! \**************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var classof = __webpack_require__(/*! ./_classof */ "./node_modules/core-js/library/modules/_classof.js"); var ITERATOR = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('iterator'); var Iterators = __webpack_require__(/*! ./_iterators */ "./node_modules/core-js/library/modules/_iterators.js"); module.exports = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js").getIteratorMethod = function (it) { if (it != undefined) return it[ITERATOR] || it['@@iterator'] || Iterators[classof(it)]; }; /***/ }), /***/ "./node_modules/core-js/library/modules/core.get-iterator.js": /*!*******************************************************************!*\ !*** ./node_modules/core-js/library/modules/core.get-iterator.js ***! \*******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var get = __webpack_require__(/*! ./core.get-iterator-method */ "./node_modules/core-js/library/modules/core.get-iterator-method.js"); module.exports = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js").getIterator = function (it) { var iterFn = get(it); if (typeof iterFn != 'function') throw TypeError(it + ' is not iterable!'); return anObject(iterFn.call(it)); }; /***/ }), /***/ "./node_modules/core-js/library/modules/es6.array.is-array.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.array.is-array.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 22.1.2.2 / 15.4.3.2 Array.isArray(arg) var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); $export($export.S, 'Array', { isArray: __webpack_require__(/*! ./_is-array */ "./node_modules/core-js/library/modules/_is-array.js") }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.array.iterator.js": /*!********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.array.iterator.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var addToUnscopables = __webpack_require__(/*! ./_add-to-unscopables */ "./node_modules/core-js/library/modules/_add-to-unscopables.js"); var step = __webpack_require__(/*! ./_iter-step */ "./node_modules/core-js/library/modules/_iter-step.js"); var Iterators = __webpack_require__(/*! ./_iterators */ "./node_modules/core-js/library/modules/_iterators.js"); var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); // 22.1.3.4 Array.prototype.entries() // 22.1.3.13 Array.prototype.keys() // 22.1.3.29 Array.prototype.values() // 22.1.3.30 Array.prototype[@@iterator]() module.exports = __webpack_require__(/*! ./_iter-define */ "./node_modules/core-js/library/modules/_iter-define.js")(Array, 'Array', function (iterated, kind) { this._t = toIObject(iterated); // target this._i = 0; // next index this._k = kind; // kind // 22.1.5.2.1 %ArrayIteratorPrototype%.next() }, function () { var O = this._t; var kind = this._k; var index = this._i++; if (!O || index >= O.length) { this._t = undefined; return step(1); } if (kind == 'keys') return step(0, index); if (kind == 'values') return step(0, O[index]); return step(0, [index, O[index]]); }, 'values'); // argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7) Iterators.Arguments = Iterators.Array; addToUnscopables('keys'); addToUnscopables('values'); addToUnscopables('entries'); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.assign.js": /*!*******************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.assign.js ***! \*******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.3.1 Object.assign(target, source) var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); $export($export.S + $export.F, 'Object', { assign: __webpack_require__(/*! ./_object-assign */ "./node_modules/core-js/library/modules/_object-assign.js") }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.create.js": /*!*******************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.create.js ***! \*******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); // 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties]) $export($export.S, 'Object', { create: __webpack_require__(/*! ./_object-create */ "./node_modules/core-js/library/modules/_object-create.js") }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.define-property.js": /*!****************************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.define-property.js ***! \****************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); // 19.1.2.4 / 15.2.3.6 Object.defineProperty(O, P, Attributes) $export($export.S + $export.F * !__webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js"), 'Object', { defineProperty: __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js").f }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.get-prototype-of.js": /*!*****************************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.get-prototype-of.js ***! \*****************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.9 Object.getPrototypeOf(O) var toObject = __webpack_require__(/*! ./_to-object */ "./node_modules/core-js/library/modules/_to-object.js"); var $getPrototypeOf = __webpack_require__(/*! ./_object-gpo */ "./node_modules/core-js/library/modules/_object-gpo.js"); __webpack_require__(/*! ./_object-sap */ "./node_modules/core-js/library/modules/_object-sap.js")('getPrototypeOf', function () { return function getPrototypeOf(it) { return $getPrototypeOf(toObject(it)); }; }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.keys.js": /*!*****************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.keys.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.2.14 Object.keys(O) var toObject = __webpack_require__(/*! ./_to-object */ "./node_modules/core-js/library/modules/_to-object.js"); var $keys = __webpack_require__(/*! ./_object-keys */ "./node_modules/core-js/library/modules/_object-keys.js"); __webpack_require__(/*! ./_object-sap */ "./node_modules/core-js/library/modules/_object-sap.js")('keys', function () { return function keys(it) { return $keys(toObject(it)); }; }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.set-prototype-of.js": /*!*****************************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.set-prototype-of.js ***! \*****************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 19.1.3.19 Object.setPrototypeOf(O, proto) var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); $export($export.S, 'Object', { setPrototypeOf: __webpack_require__(/*! ./_set-proto */ "./node_modules/core-js/library/modules/_set-proto.js").set }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.object.to-string.js": /*!**********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.object.to-string.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { /***/ }), /***/ "./node_modules/core-js/library/modules/es6.promise.js": /*!*************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.promise.js ***! \*************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var LIBRARY = __webpack_require__(/*! ./_library */ "./node_modules/core-js/library/modules/_library.js"); var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var ctx = __webpack_require__(/*! ./_ctx */ "./node_modules/core-js/library/modules/_ctx.js"); var classof = __webpack_require__(/*! ./_classof */ "./node_modules/core-js/library/modules/_classof.js"); var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); var anInstance = __webpack_require__(/*! ./_an-instance */ "./node_modules/core-js/library/modules/_an-instance.js"); var forOf = __webpack_require__(/*! ./_for-of */ "./node_modules/core-js/library/modules/_for-of.js"); var speciesConstructor = __webpack_require__(/*! ./_species-constructor */ "./node_modules/core-js/library/modules/_species-constructor.js"); var task = __webpack_require__(/*! ./_task */ "./node_modules/core-js/library/modules/_task.js").set; var microtask = __webpack_require__(/*! ./_microtask */ "./node_modules/core-js/library/modules/_microtask.js")(); var newPromiseCapabilityModule = __webpack_require__(/*! ./_new-promise-capability */ "./node_modules/core-js/library/modules/_new-promise-capability.js"); var perform = __webpack_require__(/*! ./_perform */ "./node_modules/core-js/library/modules/_perform.js"); var userAgent = __webpack_require__(/*! ./_user-agent */ "./node_modules/core-js/library/modules/_user-agent.js"); var promiseResolve = __webpack_require__(/*! ./_promise-resolve */ "./node_modules/core-js/library/modules/_promise-resolve.js"); var PROMISE = 'Promise'; var TypeError = global.TypeError; var process = global.process; var versions = process && process.versions; var v8 = versions && versions.v8 || ''; var $Promise = global[PROMISE]; var isNode = classof(process) == 'process'; var empty = function () { /* empty */ }; var Internal, newGenericPromiseCapability, OwnPromiseCapability, Wrapper; var newPromiseCapability = newGenericPromiseCapability = newPromiseCapabilityModule.f; var USE_NATIVE = !!function () { try { // correct subclassing with @@species support var promise = $Promise.resolve(1); var FakePromise = (promise.constructor = {})[__webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('species')] = function (exec) { exec(empty, empty); }; // unhandled rejections tracking support, NodeJS Promise without it fails @@species test return (isNode || typeof PromiseRejectionEvent == 'function') && promise.then(empty) instanceof FakePromise // v8 6.6 (Node 10 and Chrome 66) have a bug with resolving custom thenables // https://bugs.chromium.org/p/chromium/issues/detail?id=830565 // we can't detect it synchronously, so just check versions && v8.indexOf('6.6') !== 0 && userAgent.indexOf('Chrome/66') === -1; } catch (e) { /* empty */ } }(); // helpers var isThenable = function (it) { var then; return isObject(it) && typeof (then = it.then) == 'function' ? then : false; }; var notify = function (promise, isReject) { if (promise._n) return; promise._n = true; var chain = promise._c; microtask(function () { var value = promise._v; var ok = promise._s == 1; var i = 0; var run = function (reaction) { var handler = ok ? reaction.ok : reaction.fail; var resolve = reaction.resolve; var reject = reaction.reject; var domain = reaction.domain; var result, then, exited; try { if (handler) { if (!ok) { if (promise._h == 2) onHandleUnhandled(promise); promise._h = 1; } if (handler === true) result = value; else { if (domain) domain.enter(); result = handler(value); // may throw if (domain) { domain.exit(); exited = true; } } if (result === reaction.promise) { reject(TypeError('Promise-chain cycle')); } else if (then = isThenable(result)) { then.call(result, resolve, reject); } else resolve(result); } else reject(value); } catch (e) { if (domain && !exited) domain.exit(); reject(e); } }; while (chain.length > i) run(chain[i++]); // variable length - can't use forEach promise._c = []; promise._n = false; if (isReject && !promise._h) onUnhandled(promise); }); }; var onUnhandled = function (promise) { task.call(global, function () { var value = promise._v; var unhandled = isUnhandled(promise); var result, handler, console; if (unhandled) { result = perform(function () { if (isNode) { process.emit('unhandledRejection', value, promise); } else if (handler = global.onunhandledrejection) { handler({ promise: promise, reason: value }); } else if ((console = global.console) && console.error) { console.error('Unhandled promise rejection', value); } }); // Browsers should not trigger `rejectionHandled` event if it was handled here, NodeJS - should promise._h = isNode || isUnhandled(promise) ? 2 : 1; } promise._a = undefined; if (unhandled && result.e) throw result.v; }); }; var isUnhandled = function (promise) { return promise._h !== 1 && (promise._a || promise._c).length === 0; }; var onHandleUnhandled = function (promise) { task.call(global, function () { var handler; if (isNode) { process.emit('rejectionHandled', promise); } else if (handler = global.onrejectionhandled) { handler({ promise: promise, reason: promise._v }); } }); }; var $reject = function (value) { var promise = this; if (promise._d) return; promise._d = true; promise = promise._w || promise; // unwrap promise._v = value; promise._s = 2; if (!promise._a) promise._a = promise._c.slice(); notify(promise, true); }; var $resolve = function (value) { var promise = this; var then; if (promise._d) return; promise._d = true; promise = promise._w || promise; // unwrap try { if (promise === value) throw TypeError("Promise can't be resolved itself"); if (then = isThenable(value)) { microtask(function () { var wrapper = { _w: promise, _d: false }; // wrap try { then.call(value, ctx($resolve, wrapper, 1), ctx($reject, wrapper, 1)); } catch (e) { $reject.call(wrapper, e); } }); } else { promise._v = value; promise._s = 1; notify(promise, false); } } catch (e) { $reject.call({ _w: promise, _d: false }, e); // wrap } }; // constructor polyfill if (!USE_NATIVE) { // 25.4.3.1 Promise(executor) $Promise = function Promise(executor) { anInstance(this, $Promise, PROMISE, '_h'); aFunction(executor); Internal.call(this); try { executor(ctx($resolve, this, 1), ctx($reject, this, 1)); } catch (err) { $reject.call(this, err); } }; // eslint-disable-next-line no-unused-vars Internal = function Promise(executor) { this._c = []; // <- awaiting reactions this._a = undefined; // <- checked in isUnhandled reactions this._s = 0; // <- state this._d = false; // <- done this._v = undefined; // <- value this._h = 0; // <- rejection state, 0 - default, 1 - handled, 2 - unhandled this._n = false; // <- notify }; Internal.prototype = __webpack_require__(/*! ./_redefine-all */ "./node_modules/core-js/library/modules/_redefine-all.js")($Promise.prototype, { // 25.4.5.3 Promise.prototype.then(onFulfilled, onRejected) then: function then(onFulfilled, onRejected) { var reaction = newPromiseCapability(speciesConstructor(this, $Promise)); reaction.ok = typeof onFulfilled == 'function' ? onFulfilled : true; reaction.fail = typeof onRejected == 'function' && onRejected; reaction.domain = isNode ? process.domain : undefined; this._c.push(reaction); if (this._a) this._a.push(reaction); if (this._s) notify(this, false); return reaction.promise; }, // 25.4.5.1 Promise.prototype.catch(onRejected) 'catch': function (onRejected) { return this.then(undefined, onRejected); } }); OwnPromiseCapability = function () { var promise = new Internal(); this.promise = promise; this.resolve = ctx($resolve, promise, 1); this.reject = ctx($reject, promise, 1); }; newPromiseCapabilityModule.f = newPromiseCapability = function (C) { return C === $Promise || C === Wrapper ? new OwnPromiseCapability(C) : newGenericPromiseCapability(C); }; } $export($export.G + $export.W + $export.F * !USE_NATIVE, { Promise: $Promise }); __webpack_require__(/*! ./_set-to-string-tag */ "./node_modules/core-js/library/modules/_set-to-string-tag.js")($Promise, PROMISE); __webpack_require__(/*! ./_set-species */ "./node_modules/core-js/library/modules/_set-species.js")(PROMISE); Wrapper = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js")[PROMISE]; // statics $export($export.S + $export.F * !USE_NATIVE, PROMISE, { // 25.4.4.5 Promise.reject(r) reject: function reject(r) { var capability = newPromiseCapability(this); var $$reject = capability.reject; $$reject(r); return capability.promise; } }); $export($export.S + $export.F * (LIBRARY || !USE_NATIVE), PROMISE, { // 25.4.4.6 Promise.resolve(x) resolve: function resolve(x) { return promiseResolve(LIBRARY && this === Wrapper ? $Promise : this, x); } }); $export($export.S + $export.F * !(USE_NATIVE && __webpack_require__(/*! ./_iter-detect */ "./node_modules/core-js/library/modules/_iter-detect.js")(function (iter) { $Promise.all(iter)['catch'](empty); })), PROMISE, { // 25.4.4.1 Promise.all(iterable) all: function all(iterable) { var C = this; var capability = newPromiseCapability(C); var resolve = capability.resolve; var reject = capability.reject; var result = perform(function () { var values = []; var index = 0; var remaining = 1; forOf(iterable, false, function (promise) { var $index = index++; var alreadyCalled = false; values.push(undefined); remaining++; C.resolve(promise).then(function (value) { if (alreadyCalled) return; alreadyCalled = true; values[$index] = value; --remaining || resolve(values); }, reject); }); --remaining || resolve(values); }); if (result.e) reject(result.v); return capability.promise; }, // 25.4.4.4 Promise.race(iterable) race: function race(iterable) { var C = this; var capability = newPromiseCapability(C); var reject = capability.reject; var result = perform(function () { forOf(iterable, false, function (promise) { C.resolve(promise).then(capability.resolve, reject); }); }); if (result.e) reject(result.v); return capability.promise; } }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.reflect.construct.js": /*!***********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.reflect.construct.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // 26.1.2 Reflect.construct(target, argumentsList [, newTarget]) var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var create = __webpack_require__(/*! ./_object-create */ "./node_modules/core-js/library/modules/_object-create.js"); var aFunction = __webpack_require__(/*! ./_a-function */ "./node_modules/core-js/library/modules/_a-function.js"); var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var fails = __webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js"); var bind = __webpack_require__(/*! ./_bind */ "./node_modules/core-js/library/modules/_bind.js"); var rConstruct = (__webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js").Reflect || {}).construct; // MS Edge supports only 2 arguments and argumentsList argument is optional // FF Nightly sets third argument as `new.target`, but does not create `this` from it var NEW_TARGET_BUG = fails(function () { function F() { /* empty */ } return !(rConstruct(function () { /* empty */ }, [], F) instanceof F); }); var ARGS_BUG = !fails(function () { rConstruct(function () { /* empty */ }); }); $export($export.S + $export.F * (NEW_TARGET_BUG || ARGS_BUG), 'Reflect', { construct: function construct(Target, args /* , newTarget */) { aFunction(Target); anObject(args); var newTarget = arguments.length < 3 ? Target : aFunction(arguments[2]); if (ARGS_BUG && !NEW_TARGET_BUG) return rConstruct(Target, args, newTarget); if (Target == newTarget) { // w/o altered newTarget, optimization for 0-4 arguments switch (args.length) { case 0: return new Target(); case 1: return new Target(args[0]); case 2: return new Target(args[0], args[1]); case 3: return new Target(args[0], args[1], args[2]); case 4: return new Target(args[0], args[1], args[2], args[3]); } // w/o altered newTarget, lot of arguments case var $args = [null]; $args.push.apply($args, args); return new (bind.apply(Target, $args))(); } // with altered newTarget, not support built-in constructors var proto = newTarget.prototype; var instance = create(isObject(proto) ? proto : Object.prototype); var result = Function.apply.call(Target, instance, args); return isObject(result) ? result : instance; } }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.set.js": /*!*********************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.set.js ***! \*********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var strong = __webpack_require__(/*! ./_collection-strong */ "./node_modules/core-js/library/modules/_collection-strong.js"); var validate = __webpack_require__(/*! ./_validate-collection */ "./node_modules/core-js/library/modules/_validate-collection.js"); var SET = 'Set'; // 23.2 Set Objects module.exports = __webpack_require__(/*! ./_collection */ "./node_modules/core-js/library/modules/_collection.js")(SET, function (get) { return function Set() { return get(this, arguments.length > 0 ? arguments[0] : undefined); }; }, { // 23.2.3.1 Set.prototype.add(value) add: function add(value) { return strong.def(validate(this, SET), value = value === 0 ? 0 : value, value); } }, strong); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.string.iterator.js": /*!*********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.string.iterator.js ***! \*********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var $at = __webpack_require__(/*! ./_string-at */ "./node_modules/core-js/library/modules/_string-at.js")(true); // 21.1.3.27 String.prototype[@@iterator]() __webpack_require__(/*! ./_iter-define */ "./node_modules/core-js/library/modules/_iter-define.js")(String, 'String', function (iterated) { this._t = String(iterated); // target this._i = 0; // next index // 21.1.5.2.1 %StringIteratorPrototype%.next() }, function () { var O = this._t; var index = this._i; var point; if (index >= O.length) return { value: undefined, done: true }; point = $at(O, index); this._i += point.length; return { value: point, done: false }; }); /***/ }), /***/ "./node_modules/core-js/library/modules/es6.symbol.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/es6.symbol.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // ECMAScript 6 symbols shim var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var has = __webpack_require__(/*! ./_has */ "./node_modules/core-js/library/modules/_has.js"); var DESCRIPTORS = __webpack_require__(/*! ./_descriptors */ "./node_modules/core-js/library/modules/_descriptors.js"); var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var redefine = __webpack_require__(/*! ./_redefine */ "./node_modules/core-js/library/modules/_redefine.js"); var META = __webpack_require__(/*! ./_meta */ "./node_modules/core-js/library/modules/_meta.js").KEY; var $fails = __webpack_require__(/*! ./_fails */ "./node_modules/core-js/library/modules/_fails.js"); var shared = __webpack_require__(/*! ./_shared */ "./node_modules/core-js/library/modules/_shared.js"); var setToStringTag = __webpack_require__(/*! ./_set-to-string-tag */ "./node_modules/core-js/library/modules/_set-to-string-tag.js"); var uid = __webpack_require__(/*! ./_uid */ "./node_modules/core-js/library/modules/_uid.js"); var wks = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js"); var wksExt = __webpack_require__(/*! ./_wks-ext */ "./node_modules/core-js/library/modules/_wks-ext.js"); var wksDefine = __webpack_require__(/*! ./_wks-define */ "./node_modules/core-js/library/modules/_wks-define.js"); var enumKeys = __webpack_require__(/*! ./_enum-keys */ "./node_modules/core-js/library/modules/_enum-keys.js"); var isArray = __webpack_require__(/*! ./_is-array */ "./node_modules/core-js/library/modules/_is-array.js"); var anObject = __webpack_require__(/*! ./_an-object */ "./node_modules/core-js/library/modules/_an-object.js"); var isObject = __webpack_require__(/*! ./_is-object */ "./node_modules/core-js/library/modules/_is-object.js"); var toIObject = __webpack_require__(/*! ./_to-iobject */ "./node_modules/core-js/library/modules/_to-iobject.js"); var toPrimitive = __webpack_require__(/*! ./_to-primitive */ "./node_modules/core-js/library/modules/_to-primitive.js"); var createDesc = __webpack_require__(/*! ./_property-desc */ "./node_modules/core-js/library/modules/_property-desc.js"); var _create = __webpack_require__(/*! ./_object-create */ "./node_modules/core-js/library/modules/_object-create.js"); var gOPNExt = __webpack_require__(/*! ./_object-gopn-ext */ "./node_modules/core-js/library/modules/_object-gopn-ext.js"); var $GOPD = __webpack_require__(/*! ./_object-gopd */ "./node_modules/core-js/library/modules/_object-gopd.js"); var $DP = __webpack_require__(/*! ./_object-dp */ "./node_modules/core-js/library/modules/_object-dp.js"); var $keys = __webpack_require__(/*! ./_object-keys */ "./node_modules/core-js/library/modules/_object-keys.js"); var gOPD = $GOPD.f; var dP = $DP.f; var gOPN = gOPNExt.f; var $Symbol = global.Symbol; var $JSON = global.JSON; var _stringify = $JSON && $JSON.stringify; var PROTOTYPE = 'prototype'; var HIDDEN = wks('_hidden'); var TO_PRIMITIVE = wks('toPrimitive'); var isEnum = {}.propertyIsEnumerable; var SymbolRegistry = shared('symbol-registry'); var AllSymbols = shared('symbols'); var OPSymbols = shared('op-symbols'); var ObjectProto = Object[PROTOTYPE]; var USE_NATIVE = typeof $Symbol == 'function'; var QObject = global.QObject; // Don't use setters in Qt Script, https://github.com/zloirock/core-js/issues/173 var setter = !QObject || !QObject[PROTOTYPE] || !QObject[PROTOTYPE].findChild; // fallback for old Android, https://code.google.com/p/v8/issues/detail?id=687 var setSymbolDesc = DESCRIPTORS && $fails(function () { return _create(dP({}, 'a', { get: function () { return dP(this, 'a', { value: 7 }).a; } })).a != 7; }) ? function (it, key, D) { var protoDesc = gOPD(ObjectProto, key); if (protoDesc) delete ObjectProto[key]; dP(it, key, D); if (protoDesc && it !== ObjectProto) dP(ObjectProto, key, protoDesc); } : dP; var wrap = function (tag) { var sym = AllSymbols[tag] = _create($Symbol[PROTOTYPE]); sym._k = tag; return sym; }; var isSymbol = USE_NATIVE && typeof $Symbol.iterator == 'symbol' ? function (it) { return typeof it == 'symbol'; } : function (it) { return it instanceof $Symbol; }; var $defineProperty = function defineProperty(it, key, D) { if (it === ObjectProto) $defineProperty(OPSymbols, key, D); anObject(it); key = toPrimitive(key, true); anObject(D); if (has(AllSymbols, key)) { if (!D.enumerable) { if (!has(it, HIDDEN)) dP(it, HIDDEN, createDesc(1, {})); it[HIDDEN][key] = true; } else { if (has(it, HIDDEN) && it[HIDDEN][key]) it[HIDDEN][key] = false; D = _create(D, { enumerable: createDesc(0, false) }); } return setSymbolDesc(it, key, D); } return dP(it, key, D); }; var $defineProperties = function defineProperties(it, P) { anObject(it); var keys = enumKeys(P = toIObject(P)); var i = 0; var l = keys.length; var key; while (l > i) $defineProperty(it, key = keys[i++], P[key]); return it; }; var $create = function create(it, P) { return P === undefined ? _create(it) : $defineProperties(_create(it), P); }; var $propertyIsEnumerable = function propertyIsEnumerable(key) { var E = isEnum.call(this, key = toPrimitive(key, true)); if (this === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return false; return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] ? E : true; }; var $getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key) { it = toIObject(it); key = toPrimitive(key, true); if (it === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return; var D = gOPD(it, key); if (D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key])) D.enumerable = true; return D; }; var $getOwnPropertyNames = function getOwnPropertyNames(it) { var names = gOPN(toIObject(it)); var result = []; var i = 0; var key; while (names.length > i) { if (!has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META) result.push(key); } return result; }; var $getOwnPropertySymbols = function getOwnPropertySymbols(it) { var IS_OP = it === ObjectProto; var names = gOPN(IS_OP ? OPSymbols : toIObject(it)); var result = []; var i = 0; var key; while (names.length > i) { if (has(AllSymbols, key = names[i++]) && (IS_OP ? has(ObjectProto, key) : true)) result.push(AllSymbols[key]); } return result; }; // 19.4.1.1 Symbol([description]) if (!USE_NATIVE) { $Symbol = function Symbol() { if (this instanceof $Symbol) throw TypeError('Symbol is not a constructor!'); var tag = uid(arguments.length > 0 ? arguments[0] : undefined); var $set = function (value) { if (this === ObjectProto) $set.call(OPSymbols, value); if (has(this, HIDDEN) && has(this[HIDDEN], tag)) this[HIDDEN][tag] = false; setSymbolDesc(this, tag, createDesc(1, value)); }; if (DESCRIPTORS && setter) setSymbolDesc(ObjectProto, tag, { configurable: true, set: $set }); return wrap(tag); }; redefine($Symbol[PROTOTYPE], 'toString', function toString() { return this._k; }); $GOPD.f = $getOwnPropertyDescriptor; $DP.f = $defineProperty; __webpack_require__(/*! ./_object-gopn */ "./node_modules/core-js/library/modules/_object-gopn.js").f = gOPNExt.f = $getOwnPropertyNames; __webpack_require__(/*! ./_object-pie */ "./node_modules/core-js/library/modules/_object-pie.js").f = $propertyIsEnumerable; __webpack_require__(/*! ./_object-gops */ "./node_modules/core-js/library/modules/_object-gops.js").f = $getOwnPropertySymbols; if (DESCRIPTORS && !__webpack_require__(/*! ./_library */ "./node_modules/core-js/library/modules/_library.js")) { redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true); } wksExt.f = function (name) { return wrap(wks(name)); }; } $export($export.G + $export.W + $export.F * !USE_NATIVE, { Symbol: $Symbol }); for (var es6Symbols = ( // 19.4.2.2, 19.4.2.3, 19.4.2.4, 19.4.2.6, 19.4.2.8, 19.4.2.9, 19.4.2.10, 19.4.2.11, 19.4.2.12, 19.4.2.13, 19.4.2.14 'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables' ).split(','), j = 0; es6Symbols.length > j;)wks(es6Symbols[j++]); for (var wellKnownSymbols = $keys(wks.store), k = 0; wellKnownSymbols.length > k;) wksDefine(wellKnownSymbols[k++]); $export($export.S + $export.F * !USE_NATIVE, 'Symbol', { // 19.4.2.1 Symbol.for(key) 'for': function (key) { return has(SymbolRegistry, key += '') ? SymbolRegistry[key] : SymbolRegistry[key] = $Symbol(key); }, // 19.4.2.5 Symbol.keyFor(sym) keyFor: function keyFor(sym) { if (!isSymbol(sym)) throw TypeError(sym + ' is not a symbol!'); for (var key in SymbolRegistry) if (SymbolRegistry[key] === sym) return key; }, useSetter: function () { setter = true; }, useSimple: function () { setter = false; } }); $export($export.S + $export.F * !USE_NATIVE, 'Object', { // 19.1.2.2 Object.create(O [, Properties]) create: $create, // 19.1.2.4 Object.defineProperty(O, P, Attributes) defineProperty: $defineProperty, // 19.1.2.3 Object.defineProperties(O, Properties) defineProperties: $defineProperties, // 19.1.2.6 Object.getOwnPropertyDescriptor(O, P) getOwnPropertyDescriptor: $getOwnPropertyDescriptor, // 19.1.2.7 Object.getOwnPropertyNames(O) getOwnPropertyNames: $getOwnPropertyNames, // 19.1.2.8 Object.getOwnPropertySymbols(O) getOwnPropertySymbols: $getOwnPropertySymbols }); // 24.3.2 JSON.stringify(value [, replacer [, space]]) $JSON && $export($export.S + $export.F * (!USE_NATIVE || $fails(function () { var S = $Symbol(); // MS Edge converts symbol values to JSON as {} // WebKit converts symbol values to JSON as null // V8 throws on boxed symbols return _stringify([S]) != '[null]' || _stringify({ a: S }) != '{}' || _stringify(Object(S)) != '{}'; })), 'JSON', { stringify: function stringify(it) { var args = [it]; var i = 1; var replacer, $replacer; while (arguments.length > i) args.push(arguments[i++]); $replacer = replacer = args[1]; if (!isObject(replacer) && it === undefined || isSymbol(it)) return; // IE8 returns string on undefined if (!isArray(replacer)) replacer = function (key, value) { if (typeof $replacer == 'function') value = $replacer.call(this, key, value); if (!isSymbol(value)) return value; }; args[1] = replacer; return _stringify.apply($JSON, args); } }); // 19.4.3.4 Symbol.prototype[@@toPrimitive](hint) $Symbol[PROTOTYPE][TO_PRIMITIVE] || __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js")($Symbol[PROTOTYPE], TO_PRIMITIVE, $Symbol[PROTOTYPE].valueOf); // 19.4.3.5 Symbol.prototype[@@toStringTag] setToStringTag($Symbol, 'Symbol'); // 20.2.1.9 Math[@@toStringTag] setToStringTag(Math, 'Math', true); // 24.3.3 JSON[@@toStringTag] setToStringTag(global.JSON, 'JSON', true); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.promise.finally.js": /*!*********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.promise.finally.js ***! \*********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // https://github.com/tc39/proposal-promise-finally var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var core = __webpack_require__(/*! ./_core */ "./node_modules/core-js/library/modules/_core.js"); var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var speciesConstructor = __webpack_require__(/*! ./_species-constructor */ "./node_modules/core-js/library/modules/_species-constructor.js"); var promiseResolve = __webpack_require__(/*! ./_promise-resolve */ "./node_modules/core-js/library/modules/_promise-resolve.js"); $export($export.P + $export.R, 'Promise', { 'finally': function (onFinally) { var C = speciesConstructor(this, core.Promise || global.Promise); var isFunction = typeof onFinally == 'function'; return this.then( isFunction ? function (x) { return promiseResolve(C, onFinally()).then(function () { return x; }); } : onFinally, isFunction ? function (e) { return promiseResolve(C, onFinally()).then(function () { throw e; }); } : onFinally ); } }); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.promise.try.js": /*!*****************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.promise.try.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // https://github.com/tc39/proposal-promise-try var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); var newPromiseCapability = __webpack_require__(/*! ./_new-promise-capability */ "./node_modules/core-js/library/modules/_new-promise-capability.js"); var perform = __webpack_require__(/*! ./_perform */ "./node_modules/core-js/library/modules/_perform.js"); $export($export.S, 'Promise', { 'try': function (callbackfn) { var promiseCapability = newPromiseCapability.f(this); var result = perform(callbackfn); (result.e ? promiseCapability.reject : promiseCapability.resolve)(result.v); return promiseCapability.promise; } }); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.set.from.js": /*!**************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.set.from.js ***! \**************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // https://tc39.github.io/proposal-setmap-offrom/#sec-set.from __webpack_require__(/*! ./_set-collection-from */ "./node_modules/core-js/library/modules/_set-collection-from.js")('Set'); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.set.of.js": /*!************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.set.of.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // https://tc39.github.io/proposal-setmap-offrom/#sec-set.of __webpack_require__(/*! ./_set-collection-of */ "./node_modules/core-js/library/modules/_set-collection-of.js")('Set'); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.set.to-json.js": /*!*****************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.set.to-json.js ***! \*****************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { // https://github.com/DavidBruant/Map-Set.prototype.toJSON var $export = __webpack_require__(/*! ./_export */ "./node_modules/core-js/library/modules/_export.js"); $export($export.P + $export.R, 'Set', { toJSON: __webpack_require__(/*! ./_collection-to-json */ "./node_modules/core-js/library/modules/_collection-to-json.js")('Set') }); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.symbol.async-iterator.js": /*!***************************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.symbol.async-iterator.js ***! \***************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ./_wks-define */ "./node_modules/core-js/library/modules/_wks-define.js")('asyncIterator'); /***/ }), /***/ "./node_modules/core-js/library/modules/es7.symbol.observable.js": /*!***********************************************************************!*\ !*** ./node_modules/core-js/library/modules/es7.symbol.observable.js ***! \***********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ./_wks-define */ "./node_modules/core-js/library/modules/_wks-define.js")('observable'); /***/ }), /***/ "./node_modules/core-js/library/modules/web.dom.iterable.js": /*!******************************************************************!*\ !*** ./node_modules/core-js/library/modules/web.dom.iterable.js ***! \******************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { __webpack_require__(/*! ./es6.array.iterator */ "./node_modules/core-js/library/modules/es6.array.iterator.js"); var global = __webpack_require__(/*! ./_global */ "./node_modules/core-js/library/modules/_global.js"); var hide = __webpack_require__(/*! ./_hide */ "./node_modules/core-js/library/modules/_hide.js"); var Iterators = __webpack_require__(/*! ./_iterators */ "./node_modules/core-js/library/modules/_iterators.js"); var TO_STRING_TAG = __webpack_require__(/*! ./_wks */ "./node_modules/core-js/library/modules/_wks.js")('toStringTag'); var DOMIterables = ('CSSRuleList,CSSStyleDeclaration,CSSValueList,ClientRectList,DOMRectList,DOMStringList,' + 'DOMTokenList,DataTransferItemList,FileList,HTMLAllCollection,HTMLCollection,HTMLFormElement,HTMLSelectElement,' + 'MediaList,MimeTypeArray,NamedNodeMap,NodeList,PaintRequestList,Plugin,PluginArray,SVGLengthList,SVGNumberList,' + 'SVGPathSegList,SVGPointList,SVGStringList,SVGTransformList,SourceBufferList,StyleSheetList,TextTrackCueList,' + 'TextTrackList,TouchList').split(','); for (var i = 0; i < DOMIterables.length; i++) { var NAME = DOMIterables[i]; var Collection = global[NAME]; var proto = Collection && Collection.prototype; if (proto && !proto[TO_STRING_TAG]) hide(proto, TO_STRING_TAG, NAME); Iterators[NAME] = Iterators.Array; } /***/ }), /***/ "./node_modules/define-properties/index.js": /*!*************************************************!*\ !*** ./node_modules/define-properties/index.js ***! \*************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var keys = __webpack_require__(/*! object-keys */ "./node_modules/object-keys/index.js"); var hasSymbols = typeof Symbol === 'function' && typeof Symbol('foo') === 'symbol'; var toStr = Object.prototype.toString; var concat = Array.prototype.concat; var origDefineProperty = Object.defineProperty; var isFunction = function (fn) { return typeof fn === 'function' && toStr.call(fn) === '[object Function]'; }; var arePropertyDescriptorsSupported = function () { var obj = {}; try { origDefineProperty(obj, 'x', { enumerable: false, value: obj }); // eslint-disable-next-line no-unused-vars, no-restricted-syntax for (var _ in obj) { // jscs:ignore disallowUnusedVariables return false; } return obj.x === obj; } catch (e) { /* this is IE 8. */ return false; } }; var supportsDescriptors = origDefineProperty && arePropertyDescriptorsSupported(); var defineProperty = function (object, name, value, predicate) { if (name in object && (!isFunction(predicate) || !predicate())) { return; } if (supportsDescriptors) { origDefineProperty(object, name, { configurable: true, enumerable: false, value: value, writable: true }); } else { object[name] = value; } }; var defineProperties = function (object, map) { var predicates = arguments.length > 2 ? arguments[2] : {}; var props = keys(map); if (hasSymbols) { props = concat.call(props, Object.getOwnPropertySymbols(map)); } for (var i = 0; i < props.length; i += 1) { defineProperty(object, props[i], map[props[i]], predicates[props[i]]); } }; defineProperties.supportsDescriptors = !!supportsDescriptors; module.exports = defineProperties; /***/ }), /***/ "./node_modules/function-bind/implementation.js": /*!******************************************************!*\ !*** ./node_modules/function-bind/implementation.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* eslint no-invalid-this: 1 */ var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; var slice = Array.prototype.slice; var toStr = Object.prototype.toString; var funcType = '[object Function]'; module.exports = function bind(that) { var target = this; if (typeof target !== 'function' || toStr.call(target) !== funcType) { throw new TypeError(ERROR_MESSAGE + target); } var args = slice.call(arguments, 1); var bound; var binder = function () { if (this instanceof bound) { var result = target.apply( this, args.concat(slice.call(arguments)) ); if (Object(result) === result) { return result; } return this; } else { return target.apply( that, args.concat(slice.call(arguments)) ); } }; var boundLength = Math.max(0, target.length - args.length); var boundArgs = []; for (var i = 0; i < boundLength; i++) { boundArgs.push('$' + i); } bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); if (target.prototype) { var Empty = function Empty() {}; Empty.prototype = target.prototype; bound.prototype = new Empty(); Empty.prototype = null; } return bound; }; /***/ }), /***/ "./node_modules/function-bind/index.js": /*!*********************************************!*\ !*** ./node_modules/function-bind/index.js ***! \*********************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var implementation = __webpack_require__(/*! ./implementation */ "./node_modules/function-bind/implementation.js"); module.exports = Function.prototype.bind || implementation; /***/ }), /***/ "./node_modules/has-symbols/shams.js": /*!*******************************************!*\ !*** ./node_modules/has-symbols/shams.js ***! \*******************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* eslint complexity: [2, 17], max-statements: [2, 33] */ module.exports = function hasSymbols() { if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } if (typeof Symbol.iterator === 'symbol') { return true; } var obj = {}; var sym = Symbol('test'); var symObj = Object(sym); if (typeof sym === 'string') { return false; } if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } // temp disabled per https://github.com/ljharb/object.assign/issues/17 // if (sym instanceof Symbol) { return false; } // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 // if (!(symObj instanceof Symbol)) { return false; } // if (typeof Symbol.prototype.toString !== 'function') { return false; } // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } var symVal = 42; obj[sym] = symVal; for (sym in obj) { return false; } // eslint-disable-line no-restricted-syntax if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } var syms = Object.getOwnPropertySymbols(obj); if (syms.length !== 1 || syms[0] !== sym) { return false; } if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } if (typeof Object.getOwnPropertyDescriptor === 'function') { var descriptor = Object.getOwnPropertyDescriptor(obj, sym); if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } } return true; }; /***/ }), /***/ "./node_modules/has/src/index.js": /*!***************************************!*\ !*** ./node_modules/has/src/index.js ***! \***************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var bind = __webpack_require__(/*! function-bind */ "./node_modules/function-bind/index.js"); module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); /***/ }), /***/ "./node_modules/hoist-non-react-statics/dist/hoist-non-react-statics.cjs.js": /*!**********************************************************************************!*\ !*** ./node_modules/hoist-non-react-statics/dist/hoist-non-react-statics.cjs.js ***! \**********************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /** * Copyright 2015, Yahoo! Inc. * Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms. */ var ReactIs = __webpack_require__(/*! react-is */ "./node_modules/react-is/index.js"); var REACT_STATICS = { childContextTypes: true, contextType: true, contextTypes: true, defaultProps: true, displayName: true, getDefaultProps: true, getDerivedStateFromError: true, getDerivedStateFromProps: true, mixins: true, propTypes: true, type: true }; var KNOWN_STATICS = { name: true, length: true, prototype: true, caller: true, callee: true, arguments: true, arity: true }; var FORWARD_REF_STATICS = { '$$typeof': true, render: true }; var TYPE_STATICS = {}; TYPE_STATICS[ReactIs.ForwardRef] = FORWARD_REF_STATICS; var defineProperty = Object.defineProperty; var getOwnPropertyNames = Object.getOwnPropertyNames; var getOwnPropertySymbols = Object.getOwnPropertySymbols; var getOwnPropertyDescriptor = Object.getOwnPropertyDescriptor; var getPrototypeOf = Object.getPrototypeOf; var objectPrototype = Object.prototype; function hoistNonReactStatics(targetComponent, sourceComponent, blacklist) { if (typeof sourceComponent !== 'string') { // don't hoist over string (html) components if (objectPrototype) { var inheritedComponent = getPrototypeOf(sourceComponent); if (inheritedComponent && inheritedComponent !== objectPrototype) { hoistNonReactStatics(targetComponent, inheritedComponent, blacklist); } } var keys = getOwnPropertyNames(sourceComponent); if (getOwnPropertySymbols) { keys = keys.concat(getOwnPropertySymbols(sourceComponent)); } var targetStatics = TYPE_STATICS[targetComponent['$$typeof']] || REACT_STATICS; var sourceStatics = TYPE_STATICS[sourceComponent['$$typeof']] || REACT_STATICS; for (var i = 0; i < keys.length; ++i) { var key = keys[i]; if (!KNOWN_STATICS[key] && !(blacklist && blacklist[key]) && !(sourceStatics && sourceStatics[key]) && !(targetStatics && targetStatics[key])) { var descriptor = getOwnPropertyDescriptor(sourceComponent, key); try { // Avoid failures from read-only properties defineProperty(targetComponent, key, descriptor); } catch (e) {} } } return targetComponent; } return targetComponent; } module.exports = hoistNonReactStatics; /***/ }), /***/ "./node_modules/next-server/dist/lib/mitt.js": /*!***************************************************!*\ !*** ./node_modules/next-server/dist/lib/mitt.js ***! \***************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* MIT License Copyright (c) Jason Miller (https://jasonformat.com/) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _create = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/create */ "./node_modules/@babel/runtime-corejs2/core-js/object/create.js")); Object.defineProperty(exports, "__esModule", { value: true }); function mitt() { var all = (0, _create.default)(null); return { on: function on(type, handler) { (all[type] || (all[type] = [])).push(handler); }, off: function off(type, handler) { if (all[type]) { // tslint:disable-next-line:no-bitwise all[type].splice(all[type].indexOf(handler) >>> 0, 1); } }, emit: function emit(type) { for (var _len = arguments.length, evts = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { evts[_key - 1] = arguments[_key]; } (all[type] || []).slice().map(function (handler) { handler.apply(void 0, evts); }); } }; } exports.default = mitt; /***/ }), /***/ "./node_modules/next-server/dist/lib/router/router.js": /*!************************************************************!*\ !*** ./node_modules/next-server/dist/lib/router/router.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* global __NEXT_DATA__ */ var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _slicedToArray2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/slicedToArray */ "./node_modules/@babel/runtime-corejs2/helpers/esm/slicedToArray.js")); var _typeof2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/typeof */ "./node_modules/@babel/runtime-corejs2/helpers/esm/typeof.js")); var _regenerator = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/regenerator */ "./node_modules/@babel/runtime-corejs2/regenerator/index.js")); var _asyncToGenerator2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/asyncToGenerator */ "./node_modules/@babel/runtime-corejs2/helpers/esm/asyncToGenerator.js")); var _assign = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/assign */ "./node_modules/@babel/runtime-corejs2/core-js/object/assign.js")); var _set = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/set */ "./node_modules/@babel/runtime-corejs2/core-js/set.js")); var _classCallCheck2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/classCallCheck */ "./node_modules/@babel/runtime-corejs2/helpers/esm/classCallCheck.js")); var _createClass2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/createClass */ "./node_modules/@babel/runtime-corejs2/helpers/esm/createClass.js")); var __importDefault = void 0 && (void 0).__importDefault || function (mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); var url_1 = __webpack_require__(/*! url */ "./node_modules/url/url.js"); var mitt_1 = __importDefault(__webpack_require__(/*! ../mitt */ "./node_modules/next-server/dist/lib/mitt.js")); var shallow_equals_1 = __importDefault(__webpack_require__(/*! ./shallow-equals */ "./node_modules/next-server/dist/lib/router/shallow-equals.js")); var utils_1 = __webpack_require__(/*! ../utils */ "./node_modules/next-server/dist/lib/utils.js"); var Router = /*#__PURE__*/ function () { function Router(pathname, query, as) { var _this = this; var _ref = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}, initialProps = _ref.initialProps, pageLoader = _ref.pageLoader, App = _ref.App, Component = _ref.Component, err = _ref.err; (0, _classCallCheck2.default)(this, Router); this.onPopState = function (e) { if (!e.state) { // We get state as undefined for two reasons. // 1. With older safari (< 8) and older chrome (< 34) // 2. When the URL changed with # // // In the both cases, we don't need to proceed and change the route. // (as it's already changed) // But we can simply replace the state with the new changes. // Actually, for (1) we don't need to nothing. But it's hard to detect that event. // So, doing the following for (1) does no harm. var _pathname = _this.pathname, _query = _this.query; _this.changeState('replaceState', utils_1.formatWithValidation({ pathname: _pathname, query: _query }), utils_1.getURL()); return; } // If the downstream application returns falsy, return. // They will then be responsible for handling the event. if (!_this._beforePopState(e.state)) { return; } var _e$state = e.state, url = _e$state.url, as = _e$state.as, options = _e$state.options; if (true) { if (typeof url === 'undefined' || typeof as === 'undefined') { console.warn('`popstate` event triggered but `event.state` did not have `url` or `as` https://err.sh/zeit/next.js/popstate-state-empty'); } } _this.replace(url, as, options); }; // represents the current component key this.route = toRoute(pathname); // set up the component cache (by route keys) this.components = {}; // We should not keep the cache, if there's an error // Otherwise, this cause issues when when going back and // come again to the errored page. if (pathname !== '/_error') { this.components[this.route] = { Component: Component, props: initialProps, err: err }; } this.components['/_app'] = { Component: App }; // Backwards compat for Router.router.events // TODO: Should be remove the following major version as it was never documented this.events = Router.events; this.pageLoader = pageLoader; this.pathname = pathname; this.query = query; this.asPath = as; this.subscriptions = new _set.default(); this.componentLoadCancel = null; this._beforePopState = function () { return true; }; if (typeof window !== 'undefined') { // in order for `e.state` to work on the `onpopstate` event // we have to register the initial route upon initialization this.changeState('replaceState', utils_1.formatWithValidation({ pathname: pathname, query: query }), as); window.addEventListener('popstate', this.onPopState); } } (0, _createClass2.default)(Router, [{ key: "update", value: function update(route, Component) { var data = this.components[route]; if (!data) { throw new Error("Cannot update unavailable route: ".concat(route)); } var newData = (0, _assign.default)({}, data, { Component: Component }); this.components[route] = newData; // pages/_app.js updated if (route === '/_app') { this.notify(this.components[this.route]); return; } if (route === this.route) { this.notify(newData); } } }, { key: "reload", value: function () { var _reload = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee(route) { var pathname, query, url, as, routeInfo, error; return _regenerator.default.wrap(function _callee$(_context) { while (1) { switch (_context.prev = _context.next) { case 0: delete this.components[route]; this.pageLoader.clearCache(route); if (!(route !== this.route)) { _context.next = 4; break; } return _context.abrupt("return"); case 4: pathname = this.pathname, query = this.query; url = window.location.href; // This makes sure we only use pathname + query + hash, to mirror `asPath` coming from the server. as = window.location.pathname + window.location.search + window.location.hash; Router.events.emit('routeChangeStart', url); _context.next = 10; return this.getRouteInfo(route, pathname, query, as); case 10: routeInfo = _context.sent; error = routeInfo.error; if (!(error && error.cancelled)) { _context.next = 14; break; } return _context.abrupt("return"); case 14: this.notify(routeInfo); if (!error) { _context.next = 18; break; } Router.events.emit('routeChangeError', error, url); throw error; case 18: Router.events.emit('routeChangeComplete', url); case 19: case "end": return _context.stop(); } } }, _callee, this); })); function reload(_x) { return _reload.apply(this, arguments); } return reload; }() }, { key: "back", value: function back() { window.history.back(); } }, { key: "push", value: function push(url) { var as = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : url; var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; return this.change('pushState', url, as, options); } }, { key: "replace", value: function replace(url) { var as = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : url; var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}; return this.change('replaceState', url, as, options); } }, { key: "change", value: function () { var _change = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee2(method, _url, _as, options) { var url, as, _url_1$parse, pathname, query, route, _options$shallow, shallow, routeInfo, _routeInfo, error, hash; return _regenerator.default.wrap(function _callee2$(_context2) { while (1) { switch (_context2.prev = _context2.next) { case 0: // If url and as provided as an object representation, // we'll format them into the string version here. url = (0, _typeof2.default)(_url) === 'object' ? utils_1.formatWithValidation(_url) : _url; as = (0, _typeof2.default)(_as) === 'object' ? utils_1.formatWithValidation(_as) : _as; // Add the ending slash to the paths. So, we can serve the // "<page>/index.html" directly for the SSR page. if (__NEXT_DATA__.nextExport) { as = Router._rewriteUrlForNextExport(as); } this.abortComponentLoad(as); // If the url change is only related to a hash change // We should not proceed. We should only change the state. if (!this.onlyAHashChange(as)) { _context2.next = 10; break; } Router.events.emit('hashChangeStart', as); this.changeState(method, url, as); this.scrollToHash(as); Router.events.emit('hashChangeComplete', as); return _context2.abrupt("return", true); case 10: _url_1$parse = url_1.parse(url, true), pathname = _url_1$parse.pathname, query = _url_1$parse.query; // If asked to change the current URL we should reload the current page // (not location.reload() but reload getInitialProps and other Next.js stuffs) // We also need to set the method = replaceState always // as this should not go into the history (That's how browsers work) // We should compare the new asPath to the current asPath, not the url if (!this.urlIsNew(as)) { method = 'replaceState'; } route = toRoute(pathname); _options$shallow = options.shallow, shallow = _options$shallow === void 0 ? false : _options$shallow; routeInfo = null; Router.events.emit('routeChangeStart', as); // If shallow === false and other conditions met, we reuse the // existing routeInfo for this route. // Because of this, getInitialProps would not run. if (!(shallow && this.isShallowRoutingPossible(route))) { _context2.next = 20; break; } routeInfo = this.components[route]; _context2.next = 23; break; case 20: _context2.next = 22; return this.getRouteInfo(route, pathname, query, as); case 22: routeInfo = _context2.sent; case 23: _routeInfo = routeInfo, error = _routeInfo.error; if (!(error && error.cancelled)) { _context2.next = 26; break; } return _context2.abrupt("return", false); case 26: Router.events.emit('beforeHistoryChange', as); this.changeState(method, url, as, options); hash = window.location.hash.substring(1); this.set(route, pathname, query, as, (0, _assign.default)({}, routeInfo, { hash: hash })); if (!error) { _context2.next = 33; break; } Router.events.emit('routeChangeError', error, as); throw error; case 33: Router.events.emit('routeChangeComplete', as); return _context2.abrupt("return", true); case 35: case "end": return _context2.stop(); } } }, _callee2, this); })); function change(_x2, _x3, _x4, _x5) { return _change.apply(this, arguments); } return change; }() }, { key: "changeState", value: function changeState(method, url, as) { var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {}; if (true) { if (typeof window.history === 'undefined') { console.error("Warning: window.history is not available."); return; } if (typeof window.history[method] === 'undefined') { console.error("Warning: window.history.".concat(method, " is not available")); return; } } if (method !== 'pushState' || utils_1.getURL() !== as) { window.history[method]({ url: url, as: as, options: options }, null, as); } } }, { key: "getRouteInfo", value: function () { var _getRouteInfo = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee3(route, pathname, query, as) { var routeInfo, _routeInfo2, Component, _require, isValidElementType, ctx, _Component, _ctx; return _regenerator.default.wrap(function _callee3$(_context3) { while (1) { switch (_context3.prev = _context3.next) { case 0: routeInfo = null; _context3.prev = 1; routeInfo = this.components[route]; if (routeInfo) { _context3.next = 8; break; } _context3.next = 6; return this.fetchComponent(route, as); case 6: _context3.t0 = _context3.sent; routeInfo = { Component: _context3.t0 }; case 8: _routeInfo2 = routeInfo, Component = _routeInfo2.Component; if (false) {} _require = __webpack_require__(/*! react-is */ "./node_modules/react-is/index.js"), isValidElementType = _require.isValidElementType; if (isValidElementType(Component)) { _context3.next = 13; break; } throw new Error("The default export is not a React Component in page: \"".concat(pathname, "\"")); case 13: ctx = { pathname: pathname, query: query, asPath: as }; _context3.next = 16; return this.getInitialProps(Component, ctx); case 16: routeInfo.props = _context3.sent; this.components[route] = routeInfo; _context3.next = 44; break; case 20: _context3.prev = 20; _context3.t1 = _context3["catch"](1); if (!(_context3.t1.code === 'PAGE_LOAD_ERROR')) { _context3.next = 26; break; } // If we can't load the page it could be one of following reasons // 1. Page doesn't exists // 2. Page does exist in a different zone // 3. Internal error while loading the page // So, doing a hard reload is the proper way to deal with this. window.location.href = as; // Changing the URL doesn't block executing the current code path. // So, we need to mark it as a cancelled error and stop the routing logic. _context3.t1.cancelled = true; return _context3.abrupt("return", { error: _context3.t1 }); case 26: if (!_context3.t1.cancelled) { _context3.next = 28; break; } return _context3.abrupt("return", { error: _context3.t1 }); case 28: _context3.next = 30; return this.fetchComponent('/_error'); case 30: _Component = _context3.sent; routeInfo = { Component: _Component, err: _context3.t1 }; _ctx = { err: _context3.t1, pathname: pathname, query: query }; _context3.prev = 33; _context3.next = 36; return this.getInitialProps(_Component, _ctx); case 36: routeInfo.props = _context3.sent; _context3.next = 43; break; case 39: _context3.prev = 39; _context3.t2 = _context3["catch"](33); console.error('Error in error page `getInitialProps`: ', _context3.t2); routeInfo.props = {}; case 43: routeInfo.error = _context3.t1; case 44: return _context3.abrupt("return", routeInfo); case 45: case "end": return _context3.stop(); } } }, _callee3, this, [[1, 20], [33, 39]]); })); function getRouteInfo(_x6, _x7, _x8, _x9) { return _getRouteInfo.apply(this, arguments); } return getRouteInfo; }() }, { key: "set", value: function set(route, pathname, query, as, data) { this.route = route; this.pathname = pathname; this.query = query; this.asPath = as; this.notify(data); } }, { key: "beforePopState", value: function beforePopState(cb) { this._beforePopState = cb; } }, { key: "onlyAHashChange", value: function onlyAHashChange(as) { if (!this.asPath) return false; var _this$asPath$split = this.asPath.split('#'), _this$asPath$split2 = (0, _slicedToArray2.default)(_this$asPath$split, 2), oldUrlNoHash = _this$asPath$split2[0], oldHash = _this$asPath$split2[1]; var _as$split = as.split('#'), _as$split2 = (0, _slicedToArray2.default)(_as$split, 2), newUrlNoHash = _as$split2[0], newHash = _as$split2[1]; // Makes sure we scroll to the provided hash if the url/hash are the same if (newHash && oldUrlNoHash === newUrlNoHash && oldHash === newHash) { return true; } // If the urls are change, there's more than a hash change if (oldUrlNoHash !== newUrlNoHash) { return false; } // If the hash has changed, then it's a hash only change. // This check is necessary to handle both the enter and // leave hash === '' cases. The identity case falls through // and is treated as a next reload. return oldHash !== newHash; } }, { key: "scrollToHash", value: function scrollToHash(as) { var _as$split3 = as.split('#'), _as$split4 = (0, _slicedToArray2.default)(_as$split3, 2), hash = _as$split4[1]; // Scroll to top if the hash is just `#` with no value if (hash === '') { window.scrollTo(0, 0); return; } // First we check if the element by id is found var idEl = document.getElementById(hash); if (idEl) { idEl.scrollIntoView(); return; } // If there's no element with the id, we check the `name` property // To mirror browsers var nameEl = document.getElementsByName(hash)[0]; if (nameEl) { nameEl.scrollIntoView(); } } }, { key: "urlIsNew", value: function urlIsNew(asPath) { var _url_1$parse2 = url_1.parse(asPath, true), pathname = _url_1$parse2.pathname, query = _url_1$parse2.query; var _url_1$parse3 = url_1.parse(this.asPath, true), curPathname = _url_1$parse3.pathname; return curPathname !== pathname || !shallow_equals_1.default(query, this.query); } }, { key: "isShallowRoutingPossible", value: function isShallowRoutingPossible(route) { return (// If there's cached routeInfo for the route. Boolean(this.components[route]) && // If the route is already rendered on the screen. this.route === route ); } }, { key: "prefetch", value: function () { var _prefetch = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee4(url) { var _url_1$parse4, pathname, route; return _regenerator.default.wrap(function _callee4$(_context4) { while (1) { switch (_context4.prev = _context4.next) { case 0: if (false) {} return _context4.abrupt("return"); case 2: _url_1$parse4 = url_1.parse(url), pathname = _url_1$parse4.pathname; route = toRoute(pathname); return _context4.abrupt("return", this.pageLoader.prefetch(route)); case 5: case "end": return _context4.stop(); } } }, _callee4, this); })); function prefetch(_x10) { return _prefetch.apply(this, arguments); } return prefetch; }() }, { key: "fetchComponent", value: function () { var _fetchComponent = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee5(route, as) { var cancelled, cancel, Component, error; return _regenerator.default.wrap(function _callee5$(_context5) { while (1) { switch (_context5.prev = _context5.next) { case 0: cancelled = false; cancel = this.componentLoadCancel = function () { cancelled = true; }; _context5.next = 4; return this.fetchRoute(route); case 4: Component = _context5.sent; if (!cancelled) { _context5.next = 9; break; } error = new Error("Abort fetching component for route: \"".concat(route, "\"")); error.cancelled = true; throw error; case 9: if (cancel === this.componentLoadCancel) { this.componentLoadCancel = null; } return _context5.abrupt("return", Component); case 11: case "end": return _context5.stop(); } } }, _callee5, this); })); function fetchComponent(_x11, _x12) { return _fetchComponent.apply(this, arguments); } return fetchComponent; }() }, { key: "getInitialProps", value: function () { var _getInitialProps = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee6(Component, ctx) { var cancelled, cancel, App, props, err; return _regenerator.default.wrap(function _callee6$(_context6) { while (1) { switch (_context6.prev = _context6.next) { case 0: cancelled = false; cancel = function cancel() { cancelled = true; }; this.componentLoadCancel = cancel; App = this.components['/_app'].Component; _context6.next = 6; return utils_1.loadGetInitialProps(App, { Component: Component, router: this, ctx: ctx }); case 6: props = _context6.sent; if (cancel === this.componentLoadCancel) { this.componentLoadCancel = null; } if (!cancelled) { _context6.next = 12; break; } err = new Error('Loading initial props cancelled'); err.cancelled = true; throw err; case 12: return _context6.abrupt("return", props); case 13: case "end": return _context6.stop(); } } }, _callee6, this); })); function getInitialProps(_x13, _x14) { return _getInitialProps.apply(this, arguments); } return getInitialProps; }() }, { key: "fetchRoute", value: function () { var _fetchRoute = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee7(route) { return _regenerator.default.wrap(function _callee7$(_context7) { while (1) { switch (_context7.prev = _context7.next) { case 0: return _context7.abrupt("return", this.pageLoader.loadPage(route)); case 1: case "end": return _context7.stop(); } } }, _callee7, this); })); function fetchRoute(_x15) { return _fetchRoute.apply(this, arguments); } return fetchRoute; }() }, { key: "abortComponentLoad", value: function abortComponentLoad(as) { if (this.componentLoadCancel) { Router.events.emit('routeChangeError', new Error('Route Cancelled'), as); this.componentLoadCancel(); this.componentLoadCancel = null; } } }, { key: "notify", value: function notify(data) { var App = this.components['/_app'].Component; this.subscriptions.forEach(function (fn) { return fn((0, _assign.default)({}, data, { App: App })); }); } }, { key: "subscribe", value: function subscribe(fn) { var _this2 = this; this.subscriptions.add(fn); return function () { return _this2.subscriptions.delete(fn); }; } }], [{ key: "_rewriteUrlForNextExport", value: function _rewriteUrlForNextExport(url) { var _url$split = url.split('#'), _url$split2 = (0, _slicedToArray2.default)(_url$split, 2), hash = _url$split2[1]; url = url.replace(/#.*/, ''); var _url$split3 = url.split('?'), _url$split4 = (0, _slicedToArray2.default)(_url$split3, 2), path = _url$split4[0], qs = _url$split4[1]; path = path.replace(/\/$/, ''); var newPath = path; // Append a trailing slash if this path does not have an extension if (!/\.[^/]+\/?$/.test(path)) { newPath = "".concat(path, "/"); } if (qs) { newPath = "".concat(newPath, "?").concat(qs); } if (hash) { newPath = "".concat(newPath, "#").concat(hash); } return newPath; } }]); return Router; }(); Router.events = mitt_1.default(); exports.default = Router; function toRoute(path) { return path.replace(/\/$/, '') || '/'; } /***/ }), /***/ "./node_modules/next-server/dist/lib/router/shallow-equals.js": /*!********************************************************************!*\ !*** ./node_modules/next-server/dist/lib/router/shallow-equals.js ***! \********************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); function shallowEquals(a, b) { for (var i in a) { if (b[i] !== a[i]) return false; } for (var _i in b) { if (b[_i] !== a[_i]) return false; } return true; } exports.default = shallowEquals; /***/ }), /***/ "./node_modules/next-server/dist/lib/utils.js": /*!****************************************************!*\ !*** ./node_modules/next-server/dist/lib/utils.js ***! \****************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _regenerator = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/regenerator */ "./node_modules/@babel/runtime-corejs2/regenerator/index.js")); var _keys = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/keys */ "./node_modules/@babel/runtime-corejs2/core-js/object/keys.js")); var _typeof2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/typeof */ "./node_modules/@babel/runtime-corejs2/helpers/esm/typeof.js")); var _asyncToGenerator2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/esm/asyncToGenerator */ "./node_modules/@babel/runtime-corejs2/helpers/esm/asyncToGenerator.js")); Object.defineProperty(exports, "__esModule", { value: true }); var url_1 = __webpack_require__(/*! url */ "./node_modules/url/url.js"); function execOnce(fn) { var _this = this; var used = false; return function () { if (!used) { used = true; for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } fn.apply(_this, args); } }; } exports.execOnce = execOnce; function getLocationOrigin() { var _window$location = window.location, protocol = _window$location.protocol, hostname = _window$location.hostname, port = _window$location.port; return "".concat(protocol, "//").concat(hostname).concat(port ? ':' + port : ''); } exports.getLocationOrigin = getLocationOrigin; function getURL() { var href = window.location.href; var origin = getLocationOrigin(); return href.substring(origin.length); } exports.getURL = getURL; function getDisplayName(Component) { if (typeof Component === 'string') { return Component; } return Component.displayName || Component.name || 'Unknown'; } exports.getDisplayName = getDisplayName; function isResSent(res) { return res.finished || res.headersSent; } exports.isResSent = isResSent; function loadGetInitialProps(_x, _x2) { return _loadGetInitialProps.apply(this, arguments); } function _loadGetInitialProps() { _loadGetInitialProps = (0, _asyncToGenerator2.default)( /*#__PURE__*/ _regenerator.default.mark(function _callee(Component, ctx) { var message, props, _message; return _regenerator.default.wrap(function _callee$(_context) { while (1) { switch (_context.prev = _context.next) { case 0: if (false) {} if (!(Component.prototype && Component.prototype.getInitialProps)) { _context.next = 4; break; } message = "\"".concat(getDisplayName(Component), ".getInitialProps()\" is defined as an instance method - visit https://err.sh/zeit/next.js/get-initial-props-as-an-instance-method for more information."); throw new Error(message); case 4: if (Component.getInitialProps) { _context.next = 6; break; } return _context.abrupt("return", {}); case 6: _context.next = 8; return Component.getInitialProps(ctx); case 8: props = _context.sent; if (!(ctx.res && isResSent(ctx.res))) { _context.next = 11; break; } return _context.abrupt("return", props); case 11: if (props) { _context.next = 14; break; } _message = "\"".concat(getDisplayName(Component), ".getInitialProps()\" should resolve to an object. But found \"").concat(props, "\" instead."); throw new Error(_message); case 14: return _context.abrupt("return", props); case 15: case "end": return _context.stop(); } } }, _callee); })); return _loadGetInitialProps.apply(this, arguments); } exports.loadGetInitialProps = loadGetInitialProps; exports.urlObjectKeys = ['auth', 'hash', 'host', 'hostname', 'href', 'path', 'pathname', 'port', 'protocol', 'query', 'search', 'slashes']; function formatWithValidation(url, options) { if (true) { if (url !== null && (0, _typeof2.default)(url) === 'object') { (0, _keys.default)(url).forEach(function (key) { if (!exports.urlObjectKeys.includes(key)) { console.warn("Unknown key passed via urlObject into url.format: ".concat(key)); } }); } } return url_1.format(url, options); } exports.formatWithValidation = formatWithValidation; /***/ }), /***/ "./node_modules/next/dist/build/webpack/loaders/next-client-pages-loader.js?page=%2Fsobre&absolutePagePath=%2FUsers%2Fwillian%2FCursos%2Fnextjs%2Fpages%2Fsobre.js!./": /*!*************************************************************************************************************************************************************************!*\ !*** ./node_modules/next/dist/build/webpack/loaders/next-client-pages-loader.js?page=%2Fsobre&absolutePagePath=%2FUsers%2Fwillian%2FCursos%2Fnextjs%2Fpages%2Fsobre.js ***! \*************************************************************************************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { (window.__NEXT_P=window.__NEXT_P||[]).push(["/sobre", function() { var page = __webpack_require__(/*! ./pages/sobre.js */ "./pages/sobre.js") if(true) { module.hot.accept(/*! ./pages/sobre.js */ "./pages/sobre.js", function() { if(!next.router.components["/sobre"]) return var updatedPage = __webpack_require__(/*! ./pages/sobre.js */ "./pages/sobre.js") next.router.update("/sobre", updatedPage.default || updatedPage) }) } return { page: page.default || page } }]); /***/ }), /***/ "./node_modules/next/dist/client/link.js": /*!***********************************************!*\ !*** ./node_modules/next/dist/client/link.js ***! \***********************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* global __NEXT_DATA__ */ var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _stringify = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/json/stringify */ "./node_modules/@babel/runtime-corejs2/core-js/json/stringify.js")); var _typeof2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/typeof */ "./node_modules/@babel/runtime-corejs2/helpers/typeof.js")); var _classCallCheck2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/classCallCheck */ "./node_modules/@babel/runtime-corejs2/helpers/classCallCheck.js")); var _createClass2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/createClass */ "./node_modules/@babel/runtime-corejs2/helpers/createClass.js")); var _possibleConstructorReturn2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/possibleConstructorReturn */ "./node_modules/@babel/runtime-corejs2/helpers/possibleConstructorReturn.js")); var _getPrototypeOf2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/getPrototypeOf */ "./node_modules/@babel/runtime-corejs2/helpers/getPrototypeOf.js")); var _inherits2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/inherits */ "./node_modules/@babel/runtime-corejs2/helpers/inherits.js")); var __importStar = void 0 && (void 0).__importStar || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) { if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; } result["default"] = mod; return result; }; var __importDefault = void 0 && (void 0).__importDefault || function (mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); var url_1 = __webpack_require__(/*! url */ "./node_modules/url/url.js"); var react_1 = __importStar(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var prop_types_1 = __importDefault(__webpack_require__(/*! prop-types */ "./node_modules/prop-types/index.js")); var router_1 = __importStar(__webpack_require__(/*! next/router */ "./node_modules/next/router.js")); var utils_1 = __webpack_require__(/*! next-server/dist/lib/utils */ "./node_modules/next-server/dist/lib/utils.js"); function isLocal(href) { var url = url_1.parse(href, false, true); var origin = url_1.parse(utils_1.getLocationOrigin(), false, true); return !url.host || url.protocol === origin.protocol && url.host === origin.host; } function memoizedFormatUrl(formatUrl) { var lastHref = null; var lastAs = null; var lastResult = null; return function (href, as) { if (href === lastHref && as === lastAs) { return lastResult; } var result = formatUrl(href, as); lastHref = href; lastAs = as; lastResult = result; return result; }; } var Link = /*#__PURE__*/ function (_react_1$Component) { (0, _inherits2.default)(Link, _react_1$Component); function Link() { var _this; (0, _classCallCheck2.default)(this, Link); _this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(Link).apply(this, arguments)); // The function is memoized so that no extra lifecycles are needed // as per https://reactjs.org/blog/2018/06/07/you-probably-dont-need-derived-state.html _this.formatUrls = memoizedFormatUrl(function (href, asHref) { return { href: href && (0, _typeof2.default)(href) === 'object' ? utils_1.formatWithValidation(href) : href, as: asHref && (0, _typeof2.default)(asHref) === 'object' ? utils_1.formatWithValidation(asHref) : asHref }; }); _this.linkClicked = function (e) { var _e$currentTarget = e.currentTarget, nodeName = _e$currentTarget.nodeName, target = _e$currentTarget.target; if (nodeName === 'A' && (target && target !== '_self' || e.metaKey || e.ctrlKey || e.shiftKey || e.nativeEvent && e.nativeEvent.which === 2)) { // ignore click for new tab / new window behavior return; } var _this$formatUrls = _this.formatUrls(_this.props.href, _this.props.as), href = _this$formatUrls.href, as = _this$formatUrls.as; if (!isLocal(href)) { // ignore click if it's outside our scope return; } var pathname = window.location.pathname; href = url_1.resolve(pathname, href); as = as ? url_1.resolve(pathname, as) : href; e.preventDefault(); // avoid scroll for urls with anchor refs var scroll = _this.props.scroll; if (scroll == null) { scroll = as.indexOf('#') < 0; } // replace state instead of push if prop is present router_1.default[_this.props.replace ? 'replace' : 'push'](href, as, { shallow: _this.props.shallow }).then(function (success) { if (!success) return; if (scroll) { window.scrollTo(0, 0); document.body.focus(); } }).catch(function (err) { if (_this.props.onError) _this.props.onError(err); }); }; return _this; } (0, _createClass2.default)(Link, [{ key: "componentDidMount", value: function componentDidMount() { this.prefetch(); } }, { key: "componentDidUpdate", value: function componentDidUpdate(prevProps) { if ((0, _stringify.default)(this.props.href) !== (0, _stringify.default)(prevProps.href)) { this.prefetch(); } } }, { key: "prefetch", value: function prefetch() { if (!this.props.prefetch) return; if (typeof window === 'undefined') return; // Prefetch the JSON page if asked (only in the client) var pathname = window.location.pathname; var _this$formatUrls2 = this.formatUrls(this.props.href, this.props.as), parsedHref = _this$formatUrls2.href; var href = url_1.resolve(pathname, parsedHref); router_1.default.prefetch(href); } }, { key: "render", value: function render() { var _this2 = this; var children = this.props.children; var _this$formatUrls3 = this.formatUrls(this.props.href, this.props.as), href = _this$formatUrls3.href, as = _this$formatUrls3.as; // Deprecated. Warning shown by propType check. If the childen provided is a string (<Link>example</Link>) we wrap it in an <a> tag if (typeof children === 'string') { children = react_1.default.createElement("a", null, children); } // This will return the first child, if multiple are provided it will throw an error var child = react_1.Children.only(children); var props = { onClick: function onClick(e) { if (child.props && typeof child.props.onClick === 'function') { child.props.onClick(e); } if (!e.defaultPrevented) { _this2.linkClicked(e); } } }; // If child is an <a> tag and doesn't have a href attribute, or if the 'passHref' property is // defined, we specify the current 'href', so that repetition is not needed by the user if (this.props.passHref || child.type === 'a' && !('href' in child.props)) { props.href = as || href; } // Add the ending slash to the paths. So, we can serve the // "<page>/index.html" directly. if (props.href && typeof __NEXT_DATA__ !== 'undefined' && __NEXT_DATA__.nextExport) { props.href = router_1.Router._rewriteUrlForNextExport(props.href); } return react_1.default.cloneElement(child, props); } }]); return Link; }(react_1.Component); if (true) { var warn = utils_1.execOnce(console.error); // This module gets removed by webpack.IgnorePlugin var exact = __webpack_require__(/*! prop-types-exact */ "./node_modules/prop-types-exact/build/index.js"); Link.propTypes = exact({ href: prop_types_1.default.oneOfType([prop_types_1.default.string, prop_types_1.default.object]).isRequired, as: prop_types_1.default.oneOfType([prop_types_1.default.string, prop_types_1.default.object]), prefetch: prop_types_1.default.bool, replace: prop_types_1.default.bool, shallow: prop_types_1.default.bool, passHref: prop_types_1.default.bool, scroll: prop_types_1.default.bool, children: prop_types_1.default.oneOfType([prop_types_1.default.element, function (props, propName) { var value = props[propName]; if (typeof value === 'string') { warn("Warning: You're using a string directly inside <Link>. This usage has been deprecated. Please add an <a> tag as child of <Link>"); } return null; }]).isRequired }); } exports.default = Link; /***/ }), /***/ "./node_modules/next/dist/client/router.js": /*!*************************************************!*\ !*** ./node_modules/next/dist/client/router.js ***! \*************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _assign = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/assign */ "./node_modules/@babel/runtime-corejs2/core-js/object/assign.js")); var _typeof2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/typeof */ "./node_modules/@babel/runtime-corejs2/helpers/typeof.js")); var _construct2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/construct */ "./node_modules/@babel/runtime-corejs2/helpers/construct.js")); var _defineProperty = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/define-property */ "./node_modules/@babel/runtime-corejs2/core-js/object/define-property.js")); var __importDefault = void 0 && (void 0).__importDefault || function (mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); /* global window */ var router_1 = __importDefault(__webpack_require__(/*! next-server/dist/lib/router/router */ "./node_modules/next-server/dist/lib/router/router.js")); var SingletonRouter = { router: null, readyCallbacks: [], ready: function ready(cb) { if (this.router) return cb(); if (typeof window !== 'undefined') { this.readyCallbacks.push(cb); } } }; // Create public properties and methods of the router in the SingletonRouter var urlPropertyFields = ['pathname', 'route', 'query', 'asPath']; var propertyFields = ['components']; var routerEvents = ['routeChangeStart', 'beforeHistoryChange', 'routeChangeComplete', 'routeChangeError', 'hashChangeStart', 'hashChangeComplete']; var coreMethodFields = ['push', 'replace', 'reload', 'back', 'prefetch', 'beforePopState']; // Events is a static property on the router, the router doesn't have to be initialized to use it Object.defineProperty(SingletonRouter, 'events', { get: function get() { return router_1.default.events; } }); propertyFields.concat(urlPropertyFields).forEach(function (field) { // Here we need to use Object.defineProperty because, we need to return // the property assigned to the actual router // The value might get changed as we change routes and this is the // proper way to access it (0, _defineProperty.default)(SingletonRouter, field, { get: function get() { throwIfNoRouter(); return SingletonRouter.router[field]; } }); }); coreMethodFields.forEach(function (field) { SingletonRouter[field] = function () { var _SingletonRouter$rout; throwIfNoRouter(); return (_SingletonRouter$rout = SingletonRouter.router)[field].apply(_SingletonRouter$rout, arguments); }; }); routerEvents.forEach(function (event) { SingletonRouter.ready(function () { router_1.default.events.on(event, function () { var eventField = "on".concat(event.charAt(0).toUpperCase()).concat(event.substring(1)); if (SingletonRouter[eventField]) { try { SingletonRouter[eventField].apply(SingletonRouter, arguments); } catch (err) { console.error("Error when running the Router event: ".concat(eventField)); console.error("".concat(err.message, "\n").concat(err.stack)); } } }); }); }); function throwIfNoRouter() { if (!SingletonRouter.router) { var message = 'No router instance found.\n' + 'You should only use "next/router" inside the client side of your app.\n'; throw new Error(message); } } // Export the SingletonRouter and this is the public API. exports.default = SingletonRouter; // Reexport the withRoute HOC var with_router_1 = __webpack_require__(/*! ./with-router */ "./node_modules/next/dist/client/with-router.js"); exports.withRouter = with_router_1.default; // INTERNAL APIS // ------------- // (do not use following exports inside the app) // Create a router and assign it as the singleton instance. // This is used in client side when we are initilizing the app. // This should **not** use inside the server. exports.createRouter = function () { for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } SingletonRouter.router = (0, _construct2.default)(router_1.default, args); SingletonRouter.readyCallbacks.forEach(function (cb) { return cb(); }); SingletonRouter.readyCallbacks = []; return SingletonRouter.router; }; // Export the actual Router class, which is usually used inside the server exports.Router = router_1.default; // This function is used to create the `withRouter` router instance function makePublicRouterInstance(router) { var instance = {}; for (var _i = 0; _i < urlPropertyFields.length; _i++) { var property = urlPropertyFields[_i]; if ((0, _typeof2.default)(router[property]) === 'object') { instance[property] = (0, _assign.default)({}, router[property]); // makes sure query is not stateful continue; } instance[property] = router[property]; } // Events is a static property on the router, the router doesn't have to be initialized to use it instance.events = router_1.default.events; propertyFields.forEach(function (field) { // Here we need to use Object.defineProperty because, we need to return // the property assigned to the actual router // The value might get changed as we change routes and this is the // proper way to access it (0, _defineProperty.default)(instance, field, { get: function get() { return router[field]; } }); }); coreMethodFields.forEach(function (field) { instance[field] = function () { return router[field].apply(router, arguments); }; }); return instance; } exports.makePublicRouterInstance = makePublicRouterInstance; /***/ }), /***/ "./node_modules/next/dist/client/with-router.js": /*!******************************************************!*\ !*** ./node_modules/next/dist/client/with-router.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var _interopRequireDefault = __webpack_require__(/*! @babel/runtime-corejs2/helpers/interopRequireDefault */ "./node_modules/@babel/runtime-corejs2/helpers/interopRequireDefault.js"); var _assign = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/core-js/object/assign */ "./node_modules/@babel/runtime-corejs2/core-js/object/assign.js")); var _classCallCheck2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/classCallCheck */ "./node_modules/@babel/runtime-corejs2/helpers/classCallCheck.js")); var _createClass2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/createClass */ "./node_modules/@babel/runtime-corejs2/helpers/createClass.js")); var _possibleConstructorReturn2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/possibleConstructorReturn */ "./node_modules/@babel/runtime-corejs2/helpers/possibleConstructorReturn.js")); var _getPrototypeOf2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/getPrototypeOf */ "./node_modules/@babel/runtime-corejs2/helpers/getPrototypeOf.js")); var _inherits2 = _interopRequireDefault(__webpack_require__(/*! @babel/runtime-corejs2/helpers/inherits */ "./node_modules/@babel/runtime-corejs2/helpers/inherits.js")); var __importStar = void 0 && (void 0).__importStar || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) { if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; } result["default"] = mod; return result; }; var __importDefault = void 0 && (void 0).__importDefault || function (mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); var react_1 = __importStar(__webpack_require__(/*! react */ "./node_modules/react/index.js")); var prop_types_1 = __importDefault(__webpack_require__(/*! prop-types */ "./node_modules/prop-types/index.js")); var hoist_non_react_statics_1 = __importDefault(__webpack_require__(/*! hoist-non-react-statics */ "./node_modules/hoist-non-react-statics/dist/hoist-non-react-statics.cjs.js")); var utils_1 = __webpack_require__(/*! next-server/dist/lib/utils */ "./node_modules/next-server/dist/lib/utils.js"); function withRouter(ComposedComponent) { var displayName = utils_1.getDisplayName(ComposedComponent); var WithRouteWrapper = /*#__PURE__*/ function (_react_1$Component) { (0, _inherits2.default)(WithRouteWrapper, _react_1$Component); function WithRouteWrapper() { (0, _classCallCheck2.default)(this, WithRouteWrapper); return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(WithRouteWrapper).apply(this, arguments)); } (0, _createClass2.default)(WithRouteWrapper, [{ key: "render", value: function render() { return react_1.default.createElement(ComposedComponent, (0, _assign.default)({ router: this.context.router }, this.props)); } }]); return WithRouteWrapper; }(react_1.Component); WithRouteWrapper.contextTypes = { router: prop_types_1.default.object }; WithRouteWrapper.displayName = "withRouter(".concat(displayName, ")"); return hoist_non_react_statics_1.default(WithRouteWrapper, ComposedComponent); } exports.default = withRouter; /***/ }), /***/ "./node_modules/next/link.js": /*!***********************************!*\ !*** ./node_modules/next/link.js ***! \***********************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! ./dist/client/link */ "./node_modules/next/dist/client/link.js") /***/ }), /***/ "./node_modules/next/router.js": /*!*************************************!*\ !*** ./node_modules/next/router.js ***! \*************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! ./dist/client/router */ "./node_modules/next/dist/client/router.js") /***/ }), /***/ "./node_modules/object-assign/index.js": /*!***************************************************************************************************!*\ !*** delegated ./node_modules/object-assign/index.js from dll-reference dll_55dc4e2ecf7824085104 ***! \***************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = (__webpack_require__(/*! dll-reference dll_55dc4e2ecf7824085104 */ "dll-reference dll_55dc4e2ecf7824085104"))("./node_modules/object-assign/index.js"); /***/ }), /***/ "./node_modules/object-keys/implementation.js": /*!****************************************************!*\ !*** ./node_modules/object-keys/implementation.js ***! \****************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var keysShim; if (!Object.keys) { // modified from https://github.com/es-shims/es5-shim var has = Object.prototype.hasOwnProperty; var toStr = Object.prototype.toString; var isArgs = __webpack_require__(/*! ./isArguments */ "./node_modules/object-keys/isArguments.js"); // eslint-disable-line global-require var isEnumerable = Object.prototype.propertyIsEnumerable; var hasDontEnumBug = !isEnumerable.call({ toString: null }, 'toString'); var hasProtoEnumBug = isEnumerable.call(function () {}, 'prototype'); var dontEnums = [ 'toString', 'toLocaleString', 'valueOf', 'hasOwnProperty', 'isPrototypeOf', 'propertyIsEnumerable', 'constructor' ]; var equalsConstructorPrototype = function (o) { var ctor = o.constructor; return ctor && ctor.prototype === o; }; var excludedKeys = { $applicationCache: true, $console: true, $external: true, $frame: true, $frameElement: true, $frames: true, $innerHeight: true, $innerWidth: true, $outerHeight: true, $outerWidth: true, $pageXOffset: true, $pageYOffset: true, $parent: true, $scrollLeft: true, $scrollTop: true, $scrollX: true, $scrollY: true, $self: true, $webkitIndexedDB: true, $webkitStorageInfo: true, $window: true }; var hasAutomationEqualityBug = (function () { /* global window */ if (typeof window === 'undefined') { return false; } for (var k in window) { try { if (!excludedKeys['$' + k] && has.call(window, k) && window[k] !== null && typeof window[k] === 'object') { try { equalsConstructorPrototype(window[k]); } catch (e) { return true; } } } catch (e) { return true; } } return false; }()); var equalsConstructorPrototypeIfNotBuggy = function (o) { /* global window */ if (typeof window === 'undefined' || !hasAutomationEqualityBug) { return equalsConstructorPrototype(o); } try { return equalsConstructorPrototype(o); } catch (e) { return false; } }; keysShim = function keys(object) { var isObject = object !== null && typeof object === 'object'; var isFunction = toStr.call(object) === '[object Function]'; var isArguments = isArgs(object); var isString = isObject && toStr.call(object) === '[object String]'; var theKeys = []; if (!isObject && !isFunction && !isArguments) { throw new TypeError('Object.keys called on a non-object'); } var skipProto = hasProtoEnumBug && isFunction; if (isString && object.length > 0 && !has.call(object, 0)) { for (var i = 0; i < object.length; ++i) { theKeys.push(String(i)); } } if (isArguments && object.length > 0) { for (var j = 0; j < object.length; ++j) { theKeys.push(String(j)); } } else { for (var name in object) { if (!(skipProto && name === 'prototype') && has.call(object, name)) { theKeys.push(String(name)); } } } if (hasDontEnumBug) { var skipConstructor = equalsConstructorPrototypeIfNotBuggy(object); for (var k = 0; k < dontEnums.length; ++k) { if (!(skipConstructor && dontEnums[k] === 'constructor') && has.call(object, dontEnums[k])) { theKeys.push(dontEnums[k]); } } } return theKeys; }; } module.exports = keysShim; /***/ }), /***/ "./node_modules/object-keys/index.js": /*!*******************************************!*\ !*** ./node_modules/object-keys/index.js ***! \*******************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var slice = Array.prototype.slice; var isArgs = __webpack_require__(/*! ./isArguments */ "./node_modules/object-keys/isArguments.js"); var origKeys = Object.keys; var keysShim = origKeys ? function keys(o) { return origKeys(o); } : __webpack_require__(/*! ./implementation */ "./node_modules/object-keys/implementation.js"); var originalKeys = Object.keys; keysShim.shim = function shimObjectKeys() { if (Object.keys) { var keysWorksWithArguments = (function () { // Safari 5.0 bug var args = Object.keys(arguments); return args && args.length === arguments.length; }(1, 2)); if (!keysWorksWithArguments) { Object.keys = function keys(object) { // eslint-disable-line func-name-matching if (isArgs(object)) { return originalKeys(slice.call(object)); } return originalKeys(object); }; } } else { Object.keys = keysShim; } return Object.keys || keysShim; }; module.exports = keysShim; /***/ }), /***/ "./node_modules/object-keys/isArguments.js": /*!*************************************************!*\ !*** ./node_modules/object-keys/isArguments.js ***! \*************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var toStr = Object.prototype.toString; module.exports = function isArguments(value) { var str = toStr.call(value); var isArgs = str === '[object Arguments]'; if (!isArgs) { isArgs = str !== '[object Array]' && value !== null && typeof value === 'object' && typeof value.length === 'number' && value.length >= 0 && toStr.call(value.callee) === '[object Function]'; } return isArgs; }; /***/ }), /***/ "./node_modules/object.assign/implementation.js": /*!******************************************************!*\ !*** ./node_modules/object.assign/implementation.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // modified from https://github.com/es-shims/es6-shim var keys = __webpack_require__(/*! object-keys */ "./node_modules/object-keys/index.js"); var bind = __webpack_require__(/*! function-bind */ "./node_modules/function-bind/index.js"); var canBeObject = function (obj) { return typeof obj !== 'undefined' && obj !== null; }; var hasSymbols = __webpack_require__(/*! has-symbols/shams */ "./node_modules/has-symbols/shams.js")(); var toObject = Object; var push = bind.call(Function.call, Array.prototype.push); var propIsEnumerable = bind.call(Function.call, Object.prototype.propertyIsEnumerable); var originalGetSymbols = hasSymbols ? Object.getOwnPropertySymbols : null; module.exports = function assign(target, source1) { if (!canBeObject(target)) { throw new TypeError('target must be an object'); } var objTarget = toObject(target); var s, source, i, props, syms, value, key; for (s = 1; s < arguments.length; ++s) { source = toObject(arguments[s]); props = keys(source); var getSymbols = hasSymbols && (Object.getOwnPropertySymbols || originalGetSymbols); if (getSymbols) { syms = getSymbols(source); for (i = 0; i < syms.length; ++i) { key = syms[i]; if (propIsEnumerable(source, key)) { push(props, key); } } } for (i = 0; i < props.length; ++i) { key = props[i]; value = source[key]; if (propIsEnumerable(source, key)) { objTarget[key] = value; } } } return objTarget; }; /***/ }), /***/ "./node_modules/object.assign/index.js": /*!*********************************************!*\ !*** ./node_modules/object.assign/index.js ***! \*********************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var defineProperties = __webpack_require__(/*! define-properties */ "./node_modules/define-properties/index.js"); var implementation = __webpack_require__(/*! ./implementation */ "./node_modules/object.assign/implementation.js"); var getPolyfill = __webpack_require__(/*! ./polyfill */ "./node_modules/object.assign/polyfill.js"); var shim = __webpack_require__(/*! ./shim */ "./node_modules/object.assign/shim.js"); var polyfill = getPolyfill(); defineProperties(polyfill, { getPolyfill: getPolyfill, implementation: implementation, shim: shim }); module.exports = polyfill; /***/ }), /***/ "./node_modules/object.assign/polyfill.js": /*!************************************************!*\ !*** ./node_modules/object.assign/polyfill.js ***! \************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var implementation = __webpack_require__(/*! ./implementation */ "./node_modules/object.assign/implementation.js"); var lacksProperEnumerationOrder = function () { if (!Object.assign) { return false; } // v8, specifically in node 4.x, has a bug with incorrect property enumeration order // note: this does not detect the bug unless there's 20 characters var str = 'abcdefghijklmnopqrst'; var letters = str.split(''); var map = {}; for (var i = 0; i < letters.length; ++i) { map[letters[i]] = letters[i]; } var obj = Object.assign({}, map); var actual = ''; for (var k in obj) { actual += k; } return str !== actual; }; var assignHasPendingExceptions = function () { if (!Object.assign || !Object.preventExtensions) { return false; } // Firefox 37 still has "pending exception" logic in its Object.assign implementation, // which is 72% slower than our shim, and Firefox 40's native implementation. var thrower = Object.preventExtensions({ 1: 2 }); try { Object.assign(thrower, 'xy'); } catch (e) { return thrower[1] === 'y'; } return false; }; module.exports = function getPolyfill() { if (!Object.assign) { return implementation; } if (lacksProperEnumerationOrder()) { return implementation; } if (assignHasPendingExceptions()) { return implementation; } return Object.assign; }; /***/ }), /***/ "./node_modules/object.assign/shim.js": /*!********************************************!*\ !*** ./node_modules/object.assign/shim.js ***! \********************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var define = __webpack_require__(/*! define-properties */ "./node_modules/define-properties/index.js"); var getPolyfill = __webpack_require__(/*! ./polyfill */ "./node_modules/object.assign/polyfill.js"); module.exports = function shimAssign() { var polyfill = getPolyfill(); define( Object, { assign: polyfill }, { assign: function () { return Object.assign !== polyfill; } } ); return polyfill; }; /***/ }), /***/ "./node_modules/prop-types-exact/build/helpers/isPlainObject.js": /*!**********************************************************************!*\ !*** ./node_modules/prop-types-exact/build/helpers/isPlainObject.js ***! \**********************************************************************/ /*! no static exports found */ /***/ (function(module, exports) { Object.defineProperty(exports, "__esModule", { value: true }); var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; exports['default'] = isPlainObject; function isPlainObject(x) { return x && (typeof x === 'undefined' ? 'undefined' : _typeof(x)) === 'object' && !Array.isArray(x); } module.exports = exports['default']; //# sourceMappingURL=isPlainObject.js.map /***/ }), /***/ "./node_modules/prop-types-exact/build/index.js": /*!******************************************************!*\ !*** ./node_modules/prop-types-exact/build/index.js ***! \******************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { Object.defineProperty(exports, "__esModule", { value: true }); exports['default'] = forbidExtraProps; var _object = __webpack_require__(/*! object.assign */ "./node_modules/object.assign/index.js"); var _object2 = _interopRequireDefault(_object); var _has = __webpack_require__(/*! has */ "./node_modules/has/src/index.js"); var _has2 = _interopRequireDefault(_has); var _isPlainObject = __webpack_require__(/*! ./helpers/isPlainObject */ "./node_modules/prop-types-exact/build/helpers/isPlainObject.js"); var _isPlainObject2 = _interopRequireDefault(_isPlainObject); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } var zeroWidthSpace = '\u200B'; var specialProperty = 'prop-types-exact: ' + zeroWidthSpace; var semaphore = typeof Symbol === 'function' && typeof Symbol['for'] === 'function' ? Symbol['for'](specialProperty) : /* istanbul ignore next */specialProperty; function brand(fn) { return (0, _object2['default'])(fn, _defineProperty({}, specialProperty, semaphore)); } function isBranded(value) { return value && value[specialProperty] === semaphore; } function forbidExtraProps(propTypes) { if (!(0, _isPlainObject2['default'])(propTypes)) { throw new TypeError('given propTypes must be an object'); } if ((0, _has2['default'])(propTypes, specialProperty) && !isBranded(propTypes[specialProperty])) { throw new TypeError('Against all odds, you created a propType for a prop that uses both the zero-width space and our custom string - which, sadly, conflicts with `prop-types-exact`'); } return (0, _object2['default'])({}, propTypes, _defineProperty({}, specialProperty, brand(function () { function forbidUnknownProps(props, _, componentName) { var unknownProps = Object.keys(props).filter(function (prop) { return !(0, _has2['default'])(propTypes, prop); }); if (unknownProps.length > 0) { return new TypeError(String(componentName) + ': unknown props found: ' + String(unknownProps.join(', '))); } return null; } return forbidUnknownProps; }()))); } module.exports = exports['default']; //# sourceMappingURL=index.js.map /***/ }), /***/ "./node_modules/prop-types/checkPropTypes.js": /*!*********************************************************************************************************!*\ !*** delegated ./node_modules/prop-types/checkPropTypes.js from dll-reference dll_55dc4e2ecf7824085104 ***! \*********************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = (__webpack_require__(/*! dll-reference dll_55dc4e2ecf7824085104 */ "dll-reference dll_55dc4e2ecf7824085104"))("./node_modules/prop-types/checkPropTypes.js"); /***/ }), /***/ "./node_modules/prop-types/factoryWithTypeCheckers.js": /*!************************************************************!*\ !*** ./node_modules/prop-types/factoryWithTypeCheckers.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /** * Copyright (c) 2013-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ var assign = __webpack_require__(/*! object-assign */ "./node_modules/object-assign/index.js"); var ReactPropTypesSecret = __webpack_require__(/*! ./lib/ReactPropTypesSecret */ "./node_modules/prop-types/lib/ReactPropTypesSecret.js"); var checkPropTypes = __webpack_require__(/*! ./checkPropTypes */ "./node_modules/prop-types/checkPropTypes.js"); var printWarning = function() {}; if (true) { printWarning = function(text) { var message = 'Warning: ' + text; if (typeof console !== 'undefined') { console.error(message); } try { // --- Welcome to debugging React --- // This error was thrown as a convenience so that you can use this stack // to find the callsite that caused this warning to fire. throw new Error(message); } catch (x) {} }; } function emptyFunctionThatReturnsNull() { return null; } module.exports = function(isValidElement, throwOnDirectAccess) { /* global Symbol */ var ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator; var FAUX_ITERATOR_SYMBOL = '@@iterator'; // Before Symbol spec. /** * Returns the iterator method function contained on the iterable object. * * Be sure to invoke the function with the iterable as context: * * var iteratorFn = getIteratorFn(myIterable); * if (iteratorFn) { * var iterator = iteratorFn.call(myIterable); * ... * } * * @param {?object} maybeIterable * @return {?function} */ function getIteratorFn(maybeIterable) { var iteratorFn = maybeIterable && (ITERATOR_SYMBOL && maybeIterable[ITERATOR_SYMBOL] || maybeIterable[FAUX_ITERATOR_SYMBOL]); if (typeof iteratorFn === 'function') { return iteratorFn; } } /** * Collection of methods that allow declaration and validation of props that are * supplied to React components. Example usage: * * var Props = require('ReactPropTypes'); * var MyArticle = React.createClass({ * propTypes: { * // An optional string prop named "description". * description: Props.string, * * // A required enum prop named "category". * category: Props.oneOf(['News','Photos']).isRequired, * * // A prop named "dialog" that requires an instance of Dialog. * dialog: Props.instanceOf(Dialog).isRequired * }, * render: function() { ... } * }); * * A more formal specification of how these methods are used: * * type := array|bool|func|object|number|string|oneOf([...])|instanceOf(...) * decl := ReactPropTypes.{type}(.isRequired)? * * Each and every declaration produces a function with the same signature. This * allows the creation of custom validation functions. For example: * * var MyLink = React.createClass({ * propTypes: { * // An optional string or URI prop named "href". * href: function(props, propName, componentName) { * var propValue = props[propName]; * if (propValue != null && typeof propValue !== 'string' && * !(propValue instanceof URI)) { * return new Error( * 'Expected a string or an URI for ' + propName + ' in ' + * componentName * ); * } * } * }, * render: function() {...} * }); * * @internal */ var ANONYMOUS = '<<anonymous>>'; // Important! // Keep this list in sync with production version in `./factoryWithThrowingShims.js`. var ReactPropTypes = { array: createPrimitiveTypeChecker('array'), bool: createPrimitiveTypeChecker('boolean'), func: createPrimitiveTypeChecker('function'), number: createPrimitiveTypeChecker('number'), object: createPrimitiveTypeChecker('object'), string: createPrimitiveTypeChecker('string'), symbol: createPrimitiveTypeChecker('symbol'), any: createAnyTypeChecker(), arrayOf: createArrayOfTypeChecker, element: createElementTypeChecker(), instanceOf: createInstanceTypeChecker, node: createNodeChecker(), objectOf: createObjectOfTypeChecker, oneOf: createEnumTypeChecker, oneOfType: createUnionTypeChecker, shape: createShapeTypeChecker, exact: createStrictShapeTypeChecker, }; /** * inlined Object.is polyfill to avoid requiring consumers ship their own * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is */ /*eslint-disable no-self-compare*/ function is(x, y) { // SameValue algorithm if (x === y) { // Steps 1-5, 7-10 // Steps 6.b-6.e: +0 != -0 return x !== 0 || 1 / x === 1 / y; } else { // Step 6.a: NaN == NaN return x !== x && y !== y; } } /*eslint-enable no-self-compare*/ /** * We use an Error-like object for backward compatibility as people may call * PropTypes directly and inspect their output. However, we don't use real * Errors anymore. We don't inspect their stack anyway, and creating them * is prohibitively expensive if they are created too often, such as what * happens in oneOfType() for any type before the one that matched. */ function PropTypeError(message) { this.message = message; this.stack = ''; } // Make `instanceof Error` still work for returned errors. PropTypeError.prototype = Error.prototype; function createChainableTypeChecker(validate) { if (true) { var manualPropTypeCallCache = {}; var manualPropTypeWarningCount = 0; } function checkType(isRequired, props, propName, componentName, location, propFullName, secret) { componentName = componentName || ANONYMOUS; propFullName = propFullName || propName; if (secret !== ReactPropTypesSecret) { if (throwOnDirectAccess) { // New behavior only for users of `prop-types` package var err = new Error( 'Calling PropTypes validators directly is not supported by the `prop-types` package. ' + 'Use `PropTypes.checkPropTypes()` to call them. ' + 'Read more at http://fb.me/use-check-prop-types' ); err.name = 'Invariant Violation'; throw err; } else if ( true && typeof console !== 'undefined') { // Old behavior for people using React.PropTypes var cacheKey = componentName + ':' + propName; if ( !manualPropTypeCallCache[cacheKey] && // Avoid spamming the console because they are often not actionable except for lib authors manualPropTypeWarningCount < 3 ) { printWarning( 'You are manually calling a React.PropTypes validation ' + 'function for the `' + propFullName + '` prop on `' + componentName + '`. This is deprecated ' + 'and will throw in the standalone `prop-types` package. ' + 'You may be seeing this warning due to a third-party PropTypes ' + 'library. See https://fb.me/react-warning-dont-call-proptypes ' + 'for details.' ); manualPropTypeCallCache[cacheKey] = true; manualPropTypeWarningCount++; } } } if (props[propName] == null) { if (isRequired) { if (props[propName] === null) { return new PropTypeError('The ' + location + ' `' + propFullName + '` is marked as required ' + ('in `' + componentName + '`, but its value is `null`.')); } return new PropTypeError('The ' + location + ' `' + propFullName + '` is marked as required in ' + ('`' + componentName + '`, but its value is `undefined`.')); } return null; } else { return validate(props, propName, componentName, location, propFullName); } } var chainedCheckType = checkType.bind(null, false); chainedCheckType.isRequired = checkType.bind(null, true); return chainedCheckType; } function createPrimitiveTypeChecker(expectedType) { function validate(props, propName, componentName, location, propFullName, secret) { var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== expectedType) { // `propValue` being instance of, say, date/regexp, pass the 'object' // check, but we can offer a more precise error message here rather than // 'of type `object`'. var preciseType = getPreciseType(propValue); return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type ' + ('`' + preciseType + '` supplied to `' + componentName + '`, expected ') + ('`' + expectedType + '`.')); } return null; } return createChainableTypeChecker(validate); } function createAnyTypeChecker() { return createChainableTypeChecker(emptyFunctionThatReturnsNull); } function createArrayOfTypeChecker(typeChecker) { function validate(props, propName, componentName, location, propFullName) { if (typeof typeChecker !== 'function') { return new PropTypeError('Property `' + propFullName + '` of component `' + componentName + '` has invalid PropType notation inside arrayOf.'); } var propValue = props[propName]; if (!Array.isArray(propValue)) { var propType = getPropType(propValue); return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected an array.')); } for (var i = 0; i < propValue.length; i++) { var error = typeChecker(propValue, i, componentName, location, propFullName + '[' + i + ']', ReactPropTypesSecret); if (error instanceof Error) { return error; } } return null; } return createChainableTypeChecker(validate); } function createElementTypeChecker() { function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; if (!isValidElement(propValue)) { var propType = getPropType(propValue); return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected a single ReactElement.')); } return null; } return createChainableTypeChecker(validate); } function createInstanceTypeChecker(expectedClass) { function validate(props, propName, componentName, location, propFullName) { if (!(props[propName] instanceof expectedClass)) { var expectedClassName = expectedClass.name || ANONYMOUS; var actualClassName = getClassName(props[propName]); return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type ' + ('`' + actualClassName + '` supplied to `' + componentName + '`, expected ') + ('instance of `' + expectedClassName + '`.')); } return null; } return createChainableTypeChecker(validate); } function createEnumTypeChecker(expectedValues) { if (!Array.isArray(expectedValues)) { true ? printWarning('Invalid argument supplied to oneOf, expected an instance of array.') : undefined; return emptyFunctionThatReturnsNull; } function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; for (var i = 0; i < expectedValues.length; i++) { if (is(propValue, expectedValues[i])) { return null; } } var valuesString = JSON.stringify(expectedValues); return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of value `' + propValue + '` ' + ('supplied to `' + componentName + '`, expected one of ' + valuesString + '.')); } return createChainableTypeChecker(validate); } function createObjectOfTypeChecker(typeChecker) { function validate(props, propName, componentName, location, propFullName) { if (typeof typeChecker !== 'function') { return new PropTypeError('Property `' + propFullName + '` of component `' + componentName + '` has invalid PropType notation inside objectOf.'); } var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== 'object') { return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type ' + ('`' + propType + '` supplied to `' + componentName + '`, expected an object.')); } for (var key in propValue) { if (propValue.hasOwnProperty(key)) { var error = typeChecker(propValue, key, componentName, location, propFullName + '.' + key, ReactPropTypesSecret); if (error instanceof Error) { return error; } } } return null; } return createChainableTypeChecker(validate); } function createUnionTypeChecker(arrayOfTypeCheckers) { if (!Array.isArray(arrayOfTypeCheckers)) { true ? printWarning('Invalid argument supplied to oneOfType, expected an instance of array.') : undefined; return emptyFunctionThatReturnsNull; } for (var i = 0; i < arrayOfTypeCheckers.length; i++) { var checker = arrayOfTypeCheckers[i]; if (typeof checker !== 'function') { printWarning( 'Invalid argument supplied to oneOfType. Expected an array of check functions, but ' + 'received ' + getPostfixForTypeWarning(checker) + ' at index ' + i + '.' ); return emptyFunctionThatReturnsNull; } } function validate(props, propName, componentName, location, propFullName) { for (var i = 0; i < arrayOfTypeCheckers.length; i++) { var checker = arrayOfTypeCheckers[i]; if (checker(props, propName, componentName, location, propFullName, ReactPropTypesSecret) == null) { return null; } } return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` supplied to ' + ('`' + componentName + '`.')); } return createChainableTypeChecker(validate); } function createNodeChecker() { function validate(props, propName, componentName, location, propFullName) { if (!isNode(props[propName])) { return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` supplied to ' + ('`' + componentName + '`, expected a ReactNode.')); } return null; } return createChainableTypeChecker(validate); } function createShapeTypeChecker(shapeTypes) { function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== 'object') { return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type `' + propType + '` ' + ('supplied to `' + componentName + '`, expected `object`.')); } for (var key in shapeTypes) { var checker = shapeTypes[key]; if (!checker) { continue; } var error = checker(propValue, key, componentName, location, propFullName + '.' + key, ReactPropTypesSecret); if (error) { return error; } } return null; } return createChainableTypeChecker(validate); } function createStrictShapeTypeChecker(shapeTypes) { function validate(props, propName, componentName, location, propFullName) { var propValue = props[propName]; var propType = getPropType(propValue); if (propType !== 'object') { return new PropTypeError('Invalid ' + location + ' `' + propFullName + '` of type `' + propType + '` ' + ('supplied to `' + componentName + '`, expected `object`.')); } // We need to check all keys in case some are required but missing from // props. var allKeys = assign({}, props[propName], shapeTypes); for (var key in allKeys) { var checker = shapeTypes[key]; if (!checker) { return new PropTypeError( 'Invalid ' + location + ' `' + propFullName + '` key `' + key + '` supplied to `' + componentName + '`.' + '\nBad object: ' + JSON.stringify(props[propName], null, ' ') + '\nValid keys: ' + JSON.stringify(Object.keys(shapeTypes), null, ' ') ); } var error = checker(propValue, key, componentName, location, propFullName + '.' + key, ReactPropTypesSecret); if (error) { return error; } } return null; } return createChainableTypeChecker(validate); } function isNode(propValue) { switch (typeof propValue) { case 'number': case 'string': case 'undefined': return true; case 'boolean': return !propValue; case 'object': if (Array.isArray(propValue)) { return propValue.every(isNode); } if (propValue === null || isValidElement(propValue)) { return true; } var iteratorFn = getIteratorFn(propValue); if (iteratorFn) { var iterator = iteratorFn.call(propValue); var step; if (iteratorFn !== propValue.entries) { while (!(step = iterator.next()).done) { if (!isNode(step.value)) { return false; } } } else { // Iterator will provide entry [k,v] tuples rather than values. while (!(step = iterator.next()).done) { var entry = step.value; if (entry) { if (!isNode(entry[1])) { return false; } } } } } else { return false; } return true; default: return false; } } function isSymbol(propType, propValue) { // Native Symbol. if (propType === 'symbol') { return true; } // 19.4.3.5 Symbol.prototype[@@toStringTag] === 'Symbol' if (propValue['@@toStringTag'] === 'Symbol') { return true; } // Fallback for non-spec compliant Symbols which are polyfilled. if (typeof Symbol === 'function' && propValue instanceof Symbol) { return true; } return false; } // Equivalent of `typeof` but with special handling for array and regexp. function getPropType(propValue) { var propType = typeof propValue; if (Array.isArray(propValue)) { return 'array'; } if (propValue instanceof RegExp) { // Old webkits (at least until Android 4.0) return 'function' rather than // 'object' for typeof a RegExp. We'll normalize this here so that /bla/ // passes PropTypes.object. return 'object'; } if (isSymbol(propType, propValue)) { return 'symbol'; } return propType; } // This handles more types than `getPropType`. Only used for error messages. // See `createPrimitiveTypeChecker`. function getPreciseType(propValue) { if (typeof propValue === 'undefined' || propValue === null) { return '' + propValue; } var propType = getPropType(propValue); if (propType === 'object') { if (propValue instanceof Date) { return 'date'; } else if (propValue instanceof RegExp) { return 'regexp'; } } return propType; } // Returns a string that is postfixed to a warning about an invalid type. // For example, "undefined" or "of type array" function getPostfixForTypeWarning(value) { var type = getPreciseType(value); switch (type) { case 'array': case 'object': return 'an ' + type; case 'boolean': case 'date': case 'regexp': return 'a ' + type; default: return type; } } // Returns class name of the object, if any. function getClassName(propValue) { if (!propValue.constructor || !propValue.constructor.name) { return ANONYMOUS; } return propValue.constructor.name; } ReactPropTypes.checkPropTypes = checkPropTypes; ReactPropTypes.PropTypes = ReactPropTypes; return ReactPropTypes; }; /***/ }), /***/ "./node_modules/prop-types/index.js": /*!******************************************!*\ !*** ./node_modules/prop-types/index.js ***! \******************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { /** * Copyright (c) 2013-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ if (true) { var REACT_ELEMENT_TYPE = (typeof Symbol === 'function' && Symbol.for && Symbol.for('react.element')) || 0xeac7; var isValidElement = function(object) { return typeof object === 'object' && object !== null && object.$$typeof === REACT_ELEMENT_TYPE; }; // By explicitly using `prop-types` you are opting into new development behavior. // http://fb.me/prop-types-in-prod var throwOnDirectAccess = true; module.exports = __webpack_require__(/*! ./factoryWithTypeCheckers */ "./node_modules/prop-types/factoryWithTypeCheckers.js")(isValidElement, throwOnDirectAccess); } else {} /***/ }), /***/ "./node_modules/prop-types/lib/ReactPropTypesSecret.js": /*!*******************************************************************************************************************!*\ !*** delegated ./node_modules/prop-types/lib/ReactPropTypesSecret.js from dll-reference dll_55dc4e2ecf7824085104 ***! \*******************************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = (__webpack_require__(/*! dll-reference dll_55dc4e2ecf7824085104 */ "dll-reference dll_55dc4e2ecf7824085104"))("./node_modules/prop-types/lib/ReactPropTypesSecret.js"); /***/ }), /***/ "./node_modules/punycode/punycode.js": /*!*******************************************!*\ !*** ./node_modules/punycode/punycode.js ***! \*******************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { /* WEBPACK VAR INJECTION */(function(module, global) {var __WEBPACK_AMD_DEFINE_RESULT__;/*! https://mths.be/punycode v1.3.2 by @mathias */ ;(function(root) { /** Detect free variables */ var freeExports = true && exports && !exports.nodeType && exports; var freeModule = true && module && !module.nodeType && module; var freeGlobal = typeof global == 'object' && global; if ( freeGlobal.global === freeGlobal || freeGlobal.window === freeGlobal || freeGlobal.self === freeGlobal ) { root = freeGlobal; } /** * The `punycode` object. * @name punycode * @type Object */ var punycode, /** Highest positive signed 32-bit float value */ maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 /** Bootstring parameters */ base = 36, tMin = 1, tMax = 26, skew = 38, damp = 700, initialBias = 72, initialN = 128, // 0x80 delimiter = '-', // '\x2D' /** Regular expressions */ regexPunycode = /^xn--/, regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators /** Error messages */ errors = { 'overflow': 'Overflow: input needs wider integers to process', 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', 'invalid-input': 'Invalid input' }, /** Convenience shortcuts */ baseMinusTMin = base - tMin, floor = Math.floor, stringFromCharCode = String.fromCharCode, /** Temporary variable */ key; /*--------------------------------------------------------------------------*/ /** * A generic error utility function. * @private * @param {String} type The error type. * @returns {Error} Throws a `RangeError` with the applicable error message. */ function error(type) { throw RangeError(errors[type]); } /** * A generic `Array#map` utility function. * @private * @param {Array} array The array to iterate over. * @param {Function} callback The function that gets called for every array * item. * @returns {Array} A new array of values returned by the callback function. */ function map(array, fn) { var length = array.length; var result = []; while (length--) { result[length] = fn(array[length]); } return result; } /** * A simple `Array#map`-like wrapper to work with domain name strings or email * addresses. * @private * @param {String} domain The domain name or email address. * @param {Function} callback The function that gets called for every * character. * @returns {Array} A new string of characters returned by the callback * function. */ function mapDomain(string, fn) { var parts = string.split('@'); var result = ''; if (parts.length > 1) { // In email addresses, only the domain name should be punycoded. Leave // the local part (i.e. everything up to `@`) intact. result = parts[0] + '@'; string = parts[1]; } // Avoid `split(regex)` for IE8 compatibility. See #17. string = string.replace(regexSeparators, '\x2E'); var labels = string.split('.'); var encoded = map(labels, fn).join('.'); return result + encoded; } /** * Creates an array containing the numeric code points of each Unicode * character in the string. While JavaScript uses UCS-2 internally, * this function will convert a pair of surrogate halves (each of which * UCS-2 exposes as separate characters) into a single code point, * matching UTF-16. * @see `punycode.ucs2.encode` * @see <https://mathiasbynens.be/notes/javascript-encoding> * @memberOf punycode.ucs2 * @name decode * @param {String} string The Unicode input string (UCS-2). * @returns {Array} The new array of code points. */ function ucs2decode(string) { var output = [], counter = 0, length = string.length, value, extra; while (counter < length) { value = string.charCodeAt(counter++); if (value >= 0xD800 && value <= 0xDBFF && counter < length) { // high surrogate, and there is a next character extra = string.charCodeAt(counter++); if ((extra & 0xFC00) == 0xDC00) { // low surrogate output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); } else { // unmatched surrogate; only append this code unit, in case the next // code unit is the high surrogate of a surrogate pair output.push(value); counter--; } } else { output.push(value); } } return output; } /** * Creates a string based on an array of numeric code points. * @see `punycode.ucs2.decode` * @memberOf punycode.ucs2 * @name encode * @param {Array} codePoints The array of numeric code points. * @returns {String} The new Unicode string (UCS-2). */ function ucs2encode(array) { return map(array, function(value) { var output = ''; if (value > 0xFFFF) { value -= 0x10000; output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); value = 0xDC00 | value & 0x3FF; } output += stringFromCharCode(value); return output; }).join(''); } /** * Converts a basic code point into a digit/integer. * @see `digitToBasic()` * @private * @param {Number} codePoint The basic numeric code point value. * @returns {Number} The numeric value of a basic code point (for use in * representing integers) in the range `0` to `base - 1`, or `base` if * the code point does not represent a value. */ function basicToDigit(codePoint) { if (codePoint - 48 < 10) { return codePoint - 22; } if (codePoint - 65 < 26) { return codePoint - 65; } if (codePoint - 97 < 26) { return codePoint - 97; } return base; } /** * Converts a digit/integer into a basic code point. * @see `basicToDigit()` * @private * @param {Number} digit The numeric value of a basic code point. * @returns {Number} The basic code point whose value (when used for * representing integers) is `digit`, which needs to be in the range * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is * used; else, the lowercase form is used. The behavior is undefined * if `flag` is non-zero and `digit` has no uppercase form. */ function digitToBasic(digit, flag) { // 0..25 map to ASCII a..z or A..Z // 26..35 map to ASCII 0..9 return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); } /** * Bias adaptation function as per section 3.4 of RFC 3492. * http://tools.ietf.org/html/rfc3492#section-3.4 * @private */ function adapt(delta, numPoints, firstTime) { var k = 0; delta = firstTime ? floor(delta / damp) : delta >> 1; delta += floor(delta / numPoints); for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { delta = floor(delta / baseMinusTMin); } return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); } /** * Converts a Punycode string of ASCII-only symbols to a string of Unicode * symbols. * @memberOf punycode * @param {String} input The Punycode string of ASCII-only symbols. * @returns {String} The resulting string of Unicode symbols. */ function decode(input) { // Don't use UCS-2 var output = [], inputLength = input.length, out, i = 0, n = initialN, bias = initialBias, basic, j, index, oldi, w, k, digit, t, /** Cached calculation results */ baseMinusT; // Handle the basic code points: let `basic` be the number of input code // points before the last delimiter, or `0` if there is none, then copy // the first basic code points to the output. basic = input.lastIndexOf(delimiter); if (basic < 0) { basic = 0; } for (j = 0; j < basic; ++j) { // if it's not a basic code point if (input.charCodeAt(j) >= 0x80) { error('not-basic'); } output.push(input.charCodeAt(j)); } // Main decoding loop: start just after the last delimiter if any basic code // points were copied; start at the beginning otherwise. for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { // `index` is the index of the next character to be consumed. // Decode a generalized variable-length integer into `delta`, // which gets added to `i`. The overflow checking is easier // if we increase `i` as we go, then subtract off its starting // value at the end to obtain `delta`. for (oldi = i, w = 1, k = base; /* no condition */; k += base) { if (index >= inputLength) { error('invalid-input'); } digit = basicToDigit(input.charCodeAt(index++)); if (digit >= base || digit > floor((maxInt - i) / w)) { error('overflow'); } i += digit * w; t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); if (digit < t) { break; } baseMinusT = base - t; if (w > floor(maxInt / baseMinusT)) { error('overflow'); } w *= baseMinusT; } out = output.length + 1; bias = adapt(i - oldi, out, oldi == 0); // `i` was supposed to wrap around from `out` to `0`, // incrementing `n` each time, so we'll fix that now: if (floor(i / out) > maxInt - n) { error('overflow'); } n += floor(i / out); i %= out; // Insert `n` at position `i` of the output output.splice(i++, 0, n); } return ucs2encode(output); } /** * Converts a string of Unicode symbols (e.g. a domain name label) to a * Punycode string of ASCII-only symbols. * @memberOf punycode * @param {String} input The string of Unicode symbols. * @returns {String} The resulting Punycode string of ASCII-only symbols. */ function encode(input) { var n, delta, handledCPCount, basicLength, bias, j, m, q, k, t, currentValue, output = [], /** `inputLength` will hold the number of code points in `input`. */ inputLength, /** Cached calculation results */ handledCPCountPlusOne, baseMinusT, qMinusT; // Convert the input in UCS-2 to Unicode input = ucs2decode(input); // Cache the length inputLength = input.length; // Initialize the state n = initialN; delta = 0; bias = initialBias; // Handle the basic code points for (j = 0; j < inputLength; ++j) { currentValue = input[j]; if (currentValue < 0x80) { output.push(stringFromCharCode(currentValue)); } } handledCPCount = basicLength = output.length; // `handledCPCount` is the number of code points that have been handled; // `basicLength` is the number of basic code points. // Finish the basic string - if it is not empty - with a delimiter if (basicLength) { output.push(delimiter); } // Main encoding loop: while (handledCPCount < inputLength) { // All non-basic code points < n have been handled already. Find the next // larger one: for (m = maxInt, j = 0; j < inputLength; ++j) { currentValue = input[j]; if (currentValue >= n && currentValue < m) { m = currentValue; } } // Increase `delta` enough to advance the decoder's <n,i> state to <m,0>, // but guard against overflow handledCPCountPlusOne = handledCPCount + 1; if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { error('overflow'); } delta += (m - n) * handledCPCountPlusOne; n = m; for (j = 0; j < inputLength; ++j) { currentValue = input[j]; if (currentValue < n && ++delta > maxInt) { error('overflow'); } if (currentValue == n) { // Represent delta as a generalized variable-length integer for (q = delta, k = base; /* no condition */; k += base) { t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); if (q < t) { break; } qMinusT = q - t; baseMinusT = base - t; output.push( stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) ); q = floor(qMinusT / baseMinusT); } output.push(stringFromCharCode(digitToBasic(q, 0))); bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); delta = 0; ++handledCPCount; } } ++delta; ++n; } return output.join(''); } /** * Converts a Punycode string representing a domain name or an email address * to Unicode. Only the Punycoded parts of the input will be converted, i.e. * it doesn't matter if you call it on a string that has already been * converted to Unicode. * @memberOf punycode * @param {String} input The Punycoded domain name or email address to * convert to Unicode. * @returns {String} The Unicode representation of the given Punycode * string. */ function toUnicode(input) { return mapDomain(input, function(string) { return regexPunycode.test(string) ? decode(string.slice(4).toLowerCase()) : string; }); } /** * Converts a Unicode string representing a domain name or an email address to * Punycode. Only the non-ASCII parts of the domain name will be converted, * i.e. it doesn't matter if you call it with a domain that's already in * ASCII. * @memberOf punycode * @param {String} input The domain name or email address to convert, as a * Unicode string. * @returns {String} The Punycode representation of the given domain name or * email address. */ function toASCII(input) { return mapDomain(input, function(string) { return regexNonASCII.test(string) ? 'xn--' + encode(string) : string; }); } /*--------------------------------------------------------------------------*/ /** Define the public API */ punycode = { /** * A string representing the current Punycode.js version number. * @memberOf punycode * @type String */ 'version': '1.3.2', /** * An object of methods to convert from JavaScript's internal character * representation (UCS-2) to Unicode code points, and back. * @see <https://mathiasbynens.be/notes/javascript-encoding> * @memberOf punycode * @type Object */ 'ucs2': { 'decode': ucs2decode, 'encode': ucs2encode }, 'decode': decode, 'encode': encode, 'toASCII': toASCII, 'toUnicode': toUnicode }; /** Expose `punycode` */ // Some AMD build optimizers, like r.js, check for specific condition patterns // like the following: if ( true ) { !(__WEBPACK_AMD_DEFINE_RESULT__ = (function() { return punycode; }).call(exports, __webpack_require__, exports, module), __WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__)); } else {} }(this)); /* WEBPACK VAR INJECTION */}.call(this, __webpack_require__(/*! ./../webpack/buildin/module.js */ "./node_modules/webpack/buildin/module.js")(module), __webpack_require__(/*! ./../webpack/buildin/global.js */ "./node_modules/webpack/buildin/global.js"))) /***/ }), /***/ "./node_modules/querystring-es3/decode.js": /*!************************************************!*\ !*** ./node_modules/querystring-es3/decode.js ***! \************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // If obj.hasOwnProperty has been overridden, then calling // obj.hasOwnProperty(prop) will break. // See: https://github.com/joyent/node/issues/1707 function hasOwnProperty(obj, prop) { return Object.prototype.hasOwnProperty.call(obj, prop); } module.exports = function(qs, sep, eq, options) { sep = sep || '&'; eq = eq || '='; var obj = {}; if (typeof qs !== 'string' || qs.length === 0) { return obj; } var regexp = /\+/g; qs = qs.split(sep); var maxKeys = 1000; if (options && typeof options.maxKeys === 'number') { maxKeys = options.maxKeys; } var len = qs.length; // maxKeys <= 0 means that we should not limit keys count if (maxKeys > 0 && len > maxKeys) { len = maxKeys; } for (var i = 0; i < len; ++i) { var x = qs[i].replace(regexp, '%20'), idx = x.indexOf(eq), kstr, vstr, k, v; if (idx >= 0) { kstr = x.substr(0, idx); vstr = x.substr(idx + 1); } else { kstr = x; vstr = ''; } k = decodeURIComponent(kstr); v = decodeURIComponent(vstr); if (!hasOwnProperty(obj, k)) { obj[k] = v; } else if (isArray(obj[k])) { obj[k].push(v); } else { obj[k] = [obj[k], v]; } } return obj; }; var isArray = Array.isArray || function (xs) { return Object.prototype.toString.call(xs) === '[object Array]'; }; /***/ }), /***/ "./node_modules/querystring-es3/encode.js": /*!************************************************!*\ !*** ./node_modules/querystring-es3/encode.js ***! \************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. var stringifyPrimitive = function(v) { switch (typeof v) { case 'string': return v; case 'boolean': return v ? 'true' : 'false'; case 'number': return isFinite(v) ? v : ''; default: return ''; } }; module.exports = function(obj, sep, eq, name) { sep = sep || '&'; eq = eq || '='; if (obj === null) { obj = undefined; } if (typeof obj === 'object') { return map(objectKeys(obj), function(k) { var ks = encodeURIComponent(stringifyPrimitive(k)) + eq; if (isArray(obj[k])) { return map(obj[k], function(v) { return ks + encodeURIComponent(stringifyPrimitive(v)); }).join(sep); } else { return ks + encodeURIComponent(stringifyPrimitive(obj[k])); } }).join(sep); } if (!name) return ''; return encodeURIComponent(stringifyPrimitive(name)) + eq + encodeURIComponent(stringifyPrimitive(obj)); }; var isArray = Array.isArray || function (xs) { return Object.prototype.toString.call(xs) === '[object Array]'; }; function map (xs, f) { if (xs.map) return xs.map(f); var res = []; for (var i = 0; i < xs.length; i++) { res.push(f(xs[i], i)); } return res; } var objectKeys = Object.keys || function (obj) { var res = []; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) res.push(key); } return res; }; /***/ }), /***/ "./node_modules/querystring-es3/index.js": /*!***********************************************!*\ !*** ./node_modules/querystring-es3/index.js ***! \***********************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; exports.decode = exports.parse = __webpack_require__(/*! ./decode */ "./node_modules/querystring-es3/decode.js"); exports.encode = exports.stringify = __webpack_require__(/*! ./encode */ "./node_modules/querystring-es3/encode.js"); /***/ }), /***/ "./node_modules/react-is/cjs/react-is.development.js": /*!***********************************************************!*\ !*** ./node_modules/react-is/cjs/react-is.development.js ***! \***********************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /** @license React v16.6.1 * react-is.development.js * * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ if (true) { (function() { 'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); // The Symbol used to tag the ReactElement-like types. If there is no native Symbol // nor polyfill, then a plain number is used for performance. var hasSymbol = typeof Symbol === 'function' && Symbol.for; var REACT_ELEMENT_TYPE = hasSymbol ? Symbol.for('react.element') : 0xeac7; var REACT_PORTAL_TYPE = hasSymbol ? Symbol.for('react.portal') : 0xeaca; var REACT_FRAGMENT_TYPE = hasSymbol ? Symbol.for('react.fragment') : 0xeacb; var REACT_STRICT_MODE_TYPE = hasSymbol ? Symbol.for('react.strict_mode') : 0xeacc; var REACT_PROFILER_TYPE = hasSymbol ? Symbol.for('react.profiler') : 0xead2; var REACT_PROVIDER_TYPE = hasSymbol ? Symbol.for('react.provider') : 0xeacd; var REACT_CONTEXT_TYPE = hasSymbol ? Symbol.for('react.context') : 0xeace; var REACT_ASYNC_MODE_TYPE = hasSymbol ? Symbol.for('react.async_mode') : 0xeacf; var REACT_CONCURRENT_MODE_TYPE = hasSymbol ? Symbol.for('react.concurrent_mode') : 0xeacf; var REACT_FORWARD_REF_TYPE = hasSymbol ? Symbol.for('react.forward_ref') : 0xead0; var REACT_SUSPENSE_TYPE = hasSymbol ? Symbol.for('react.suspense') : 0xead1; var REACT_MEMO_TYPE = hasSymbol ? Symbol.for('react.memo') : 0xead3; var REACT_LAZY_TYPE = hasSymbol ? Symbol.for('react.lazy') : 0xead4; function isValidElementType(type) { return typeof type === 'string' || typeof type === 'function' || // Note: its typeof might be other than 'symbol' or 'number' if it's a polyfill. type === REACT_FRAGMENT_TYPE || type === REACT_CONCURRENT_MODE_TYPE || type === REACT_PROFILER_TYPE || type === REACT_STRICT_MODE_TYPE || type === REACT_SUSPENSE_TYPE || typeof type === 'object' && type !== null && (type.$$typeof === REACT_LAZY_TYPE || type.$$typeof === REACT_MEMO_TYPE || type.$$typeof === REACT_PROVIDER_TYPE || type.$$typeof === REACT_CONTEXT_TYPE || type.$$typeof === REACT_FORWARD_REF_TYPE); } /** * Forked from fbjs/warning: * https://github.com/facebook/fbjs/blob/e66ba20ad5be433eb54423f2b097d829324d9de6/packages/fbjs/src/__forks__/warning.js * * Only change is we use console.warn instead of console.error, * and do nothing when 'console' is not supported. * This really simplifies the code. * --- * Similar to invariant but only logs a warning if the condition is not met. * This can be used to log issues in development environments in critical * paths. Removing the logging code for production environments will keep the * same logic and follow the same code paths. */ var lowPriorityWarning = function () {}; { var printWarning = function (format) { for (var _len = arguments.length, args = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { args[_key - 1] = arguments[_key]; } var argIndex = 0; var message = 'Warning: ' + format.replace(/%s/g, function () { return args[argIndex++]; }); if (typeof console !== 'undefined') { console.warn(message); } try { // --- Welcome to debugging React --- // This error was thrown as a convenience so that you can use this stack // to find the callsite that caused this warning to fire. throw new Error(message); } catch (x) {} }; lowPriorityWarning = function (condition, format) { if (format === undefined) { throw new Error('`lowPriorityWarning(condition, format, ...args)` requires a warning ' + 'message argument'); } if (!condition) { for (var _len2 = arguments.length, args = Array(_len2 > 2 ? _len2 - 2 : 0), _key2 = 2; _key2 < _len2; _key2++) { args[_key2 - 2] = arguments[_key2]; } printWarning.apply(undefined, [format].concat(args)); } }; } var lowPriorityWarning$1 = lowPriorityWarning; function typeOf(object) { if (typeof object === 'object' && object !== null) { var $$typeof = object.$$typeof; switch ($$typeof) { case REACT_ELEMENT_TYPE: var type = object.type; switch (type) { case REACT_ASYNC_MODE_TYPE: case REACT_CONCURRENT_MODE_TYPE: case REACT_FRAGMENT_TYPE: case REACT_PROFILER_TYPE: case REACT_STRICT_MODE_TYPE: return type; default: var $$typeofType = type && type.$$typeof; switch ($$typeofType) { case REACT_CONTEXT_TYPE: case REACT_FORWARD_REF_TYPE: case REACT_PROVIDER_TYPE: return $$typeofType; default: return $$typeof; } } case REACT_PORTAL_TYPE: return $$typeof; } } return undefined; } // AsyncMode is deprecated along with isAsyncMode var AsyncMode = REACT_ASYNC_MODE_TYPE; var ConcurrentMode = REACT_CONCURRENT_MODE_TYPE; var ContextConsumer = REACT_CONTEXT_TYPE; var ContextProvider = REACT_PROVIDER_TYPE; var Element = REACT_ELEMENT_TYPE; var ForwardRef = REACT_FORWARD_REF_TYPE; var Fragment = REACT_FRAGMENT_TYPE; var Profiler = REACT_PROFILER_TYPE; var Portal = REACT_PORTAL_TYPE; var StrictMode = REACT_STRICT_MODE_TYPE; var hasWarnedAboutDeprecatedIsAsyncMode = false; // AsyncMode should be deprecated function isAsyncMode(object) { { if (!hasWarnedAboutDeprecatedIsAsyncMode) { hasWarnedAboutDeprecatedIsAsyncMode = true; lowPriorityWarning$1(false, 'The ReactIs.isAsyncMode() alias has been deprecated, ' + 'and will be removed in React 17+. Update your code to use ' + 'ReactIs.isConcurrentMode() instead. It has the exact same API.'); } } return isConcurrentMode(object) || typeOf(object) === REACT_ASYNC_MODE_TYPE; } function isConcurrentMode(object) { return typeOf(object) === REACT_CONCURRENT_MODE_TYPE; } function isContextConsumer(object) { return typeOf(object) === REACT_CONTEXT_TYPE; } function isContextProvider(object) { return typeOf(object) === REACT_PROVIDER_TYPE; } function isElement(object) { return typeof object === 'object' && object !== null && object.$$typeof === REACT_ELEMENT_TYPE; } function isForwardRef(object) { return typeOf(object) === REACT_FORWARD_REF_TYPE; } function isFragment(object) { return typeOf(object) === REACT_FRAGMENT_TYPE; } function isProfiler(object) { return typeOf(object) === REACT_PROFILER_TYPE; } function isPortal(object) { return typeOf(object) === REACT_PORTAL_TYPE; } function isStrictMode(object) { return typeOf(object) === REACT_STRICT_MODE_TYPE; } exports.typeOf = typeOf; exports.AsyncMode = AsyncMode; exports.ConcurrentMode = ConcurrentMode; exports.ContextConsumer = ContextConsumer; exports.ContextProvider = ContextProvider; exports.Element = Element; exports.ForwardRef = ForwardRef; exports.Fragment = Fragment; exports.Profiler = Profiler; exports.Portal = Portal; exports.StrictMode = StrictMode; exports.isValidElementType = isValidElementType; exports.isAsyncMode = isAsyncMode; exports.isConcurrentMode = isConcurrentMode; exports.isContextConsumer = isContextConsumer; exports.isContextProvider = isContextProvider; exports.isElement = isElement; exports.isForwardRef = isForwardRef; exports.isFragment = isFragment; exports.isProfiler = isProfiler; exports.isPortal = isPortal; exports.isStrictMode = isStrictMode; })(); } /***/ }), /***/ "./node_modules/react-is/index.js": /*!****************************************!*\ !*** ./node_modules/react-is/index.js ***! \****************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; if (false) {} else { module.exports = __webpack_require__(/*! ./cjs/react-is.development.js */ "./node_modules/react-is/cjs/react-is.development.js"); } /***/ }), /***/ "./node_modules/react/index.js": /*!*******************************************************************************************!*\ !*** delegated ./node_modules/react/index.js from dll-reference dll_55dc4e2ecf7824085104 ***! \*******************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = (__webpack_require__(/*! dll-reference dll_55dc4e2ecf7824085104 */ "dll-reference dll_55dc4e2ecf7824085104"))("./node_modules/react/index.js"); /***/ }), /***/ "./node_modules/regenerator-runtime/runtime-module.js": /*!************************************************************!*\ !*** ./node_modules/regenerator-runtime/runtime-module.js ***! \************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { /** * Copyright (c) 2014-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ // This method of obtaining a reference to the global object needs to be // kept identical to the way it is obtained in runtime.js var g = (function() { return this || (typeof self === "object" && self); })() || Function("return this")(); // Use `getOwnPropertyNames` because not all browsers support calling // `hasOwnProperty` on the global `self` object in a worker. See #183. var hadRuntime = g.regeneratorRuntime && Object.getOwnPropertyNames(g).indexOf("regeneratorRuntime") >= 0; // Save the old regeneratorRuntime in case it needs to be restored later. var oldRuntime = hadRuntime && g.regeneratorRuntime; // Force reevalutation of runtime.js. g.regeneratorRuntime = undefined; module.exports = __webpack_require__(/*! ./runtime */ "./node_modules/regenerator-runtime/runtime.js"); if (hadRuntime) { // Restore the original runtime. g.regeneratorRuntime = oldRuntime; } else { // Remove the global property added by runtime.js. try { delete g.regeneratorRuntime; } catch(e) { g.regeneratorRuntime = undefined; } } /***/ }), /***/ "./node_modules/regenerator-runtime/runtime.js": /*!*****************************************************!*\ !*** ./node_modules/regenerator-runtime/runtime.js ***! \*****************************************************/ /*! no static exports found */ /***/ (function(module, exports) { /** * Copyright (c) 2014-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ !(function(global) { "use strict"; var Op = Object.prototype; var hasOwn = Op.hasOwnProperty; var undefined; // More compressible than void 0. var $Symbol = typeof Symbol === "function" ? Symbol : {}; var iteratorSymbol = $Symbol.iterator || "@@iterator"; var asyncIteratorSymbol = $Symbol.asyncIterator || "@@asyncIterator"; var toStringTagSymbol = $Symbol.toStringTag || "@@toStringTag"; var inModule = typeof module === "object"; var runtime = global.regeneratorRuntime; if (runtime) { if (inModule) { // If regeneratorRuntime is defined globally and we're in a module, // make the exports object identical to regeneratorRuntime. module.exports = runtime; } // Don't bother evaluating the rest of this file if the runtime was // already defined globally. return; } // Define the runtime globally (as expected by generated code) as either // module.exports (if we're in a module) or a new, empty object. runtime = global.regeneratorRuntime = inModule ? module.exports : {}; function wrap(innerFn, outerFn, self, tryLocsList) { // If outerFn provided and outerFn.prototype is a Generator, then outerFn.prototype instanceof Generator. var protoGenerator = outerFn && outerFn.prototype instanceof Generator ? outerFn : Generator; var generator = Object.create(protoGenerator.prototype); var context = new Context(tryLocsList || []); // The ._invoke method unifies the implementations of the .next, // .throw, and .return methods. generator._invoke = makeInvokeMethod(innerFn, self, context); return generator; } runtime.wrap = wrap; // Try/catch helper to minimize deoptimizations. Returns a completion // record like context.tryEntries[i].completion. This interface could // have been (and was previously) designed to take a closure to be // invoked without arguments, but in all the cases we care about we // already have an existing method we want to call, so there's no need // to create a new function object. We can even get away with assuming // the method takes exactly one argument, since that happens to be true // in every case, so we don't have to touch the arguments object. The // only additional allocation required is the completion record, which // has a stable shape and so hopefully should be cheap to allocate. function tryCatch(fn, obj, arg) { try { return { type: "normal", arg: fn.call(obj, arg) }; } catch (err) { return { type: "throw", arg: err }; } } var GenStateSuspendedStart = "suspendedStart"; var GenStateSuspendedYield = "suspendedYield"; var GenStateExecuting = "executing"; var GenStateCompleted = "completed"; // Returning this object from the innerFn has the same effect as // breaking out of the dispatch switch statement. var ContinueSentinel = {}; // Dummy constructor functions that we use as the .constructor and // .constructor.prototype properties for functions that return Generator // objects. For full spec compliance, you may wish to configure your // minifier not to mangle the names of these two functions. function Generator() {} function GeneratorFunction() {} function GeneratorFunctionPrototype() {} // This is a polyfill for %IteratorPrototype% for environments that // don't natively support it. var IteratorPrototype = {}; IteratorPrototype[iteratorSymbol] = function () { return this; }; var getProto = Object.getPrototypeOf; var NativeIteratorPrototype = getProto && getProto(getProto(values([]))); if (NativeIteratorPrototype && NativeIteratorPrototype !== Op && hasOwn.call(NativeIteratorPrototype, iteratorSymbol)) { // This environment has a native %IteratorPrototype%; use it instead // of the polyfill. IteratorPrototype = NativeIteratorPrototype; } var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype = Object.create(IteratorPrototype); GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype; GeneratorFunctionPrototype.constructor = GeneratorFunction; GeneratorFunctionPrototype[toStringTagSymbol] = GeneratorFunction.displayName = "GeneratorFunction"; // Helper for defining the .next, .throw, and .return methods of the // Iterator interface in terms of a single ._invoke method. function defineIteratorMethods(prototype) { ["next", "throw", "return"].forEach(function(method) { prototype[method] = function(arg) { return this._invoke(method, arg); }; }); } runtime.isGeneratorFunction = function(genFun) { var ctor = typeof genFun === "function" && genFun.constructor; return ctor ? ctor === GeneratorFunction || // For the native GeneratorFunction constructor, the best we can // do is to check its .name property. (ctor.displayName || ctor.name) === "GeneratorFunction" : false; }; runtime.mark = function(genFun) { if (Object.setPrototypeOf) { Object.setPrototypeOf(genFun, GeneratorFunctionPrototype); } else { genFun.__proto__ = GeneratorFunctionPrototype; if (!(toStringTagSymbol in genFun)) { genFun[toStringTagSymbol] = "GeneratorFunction"; } } genFun.prototype = Object.create(Gp); return genFun; }; // Within the body of any async function, `await x` is transformed to // `yield regeneratorRuntime.awrap(x)`, so that the runtime can test // `hasOwn.call(value, "__await")` to determine if the yielded value is // meant to be awaited. runtime.awrap = function(arg) { return { __await: arg }; }; function AsyncIterator(generator) { function invoke(method, arg, resolve, reject) { var record = tryCatch(generator[method], generator, arg); if (record.type === "throw") { reject(record.arg); } else { var result = record.arg; var value = result.value; if (value && typeof value === "object" && hasOwn.call(value, "__await")) { return Promise.resolve(value.__await).then(function(value) { invoke("next", value, resolve, reject); }, function(err) { invoke("throw", err, resolve, reject); }); } return Promise.resolve(value).then(function(unwrapped) { // When a yielded Promise is resolved, its final value becomes // the .value of the Promise<{value,done}> result for the // current iteration. result.value = unwrapped; resolve(result); }, function(error) { // If a rejected Promise was yielded, throw the rejection back // into the async generator function so it can be handled there. return invoke("throw", error, resolve, reject); }); } } var previousPromise; function enqueue(method, arg) { function callInvokeWithMethodAndArg() { return new Promise(function(resolve, reject) { invoke(method, arg, resolve, reject); }); } return previousPromise = // If enqueue has been called before, then we want to wait until // all previous Promises have been resolved before calling invoke, // so that results are always delivered in the correct order. If // enqueue has not been called before, then it is important to // call invoke immediately, without waiting on a callback to fire, // so that the async generator function has the opportunity to do // any necessary setup in a predictable way. This predictability // is why the Promise constructor synchronously invokes its // executor callback, and why async functions synchronously // execute code before the first await. Since we implement simple // async functions in terms of async generators, it is especially // important to get this right, even though it requires care. previousPromise ? previousPromise.then( callInvokeWithMethodAndArg, // Avoid propagating failures to Promises returned by later // invocations of the iterator. callInvokeWithMethodAndArg ) : callInvokeWithMethodAndArg(); } // Define the unified helper method that is used to implement .next, // .throw, and .return (see defineIteratorMethods). this._invoke = enqueue; } defineIteratorMethods(AsyncIterator.prototype); AsyncIterator.prototype[asyncIteratorSymbol] = function () { return this; }; runtime.AsyncIterator = AsyncIterator; // Note that simple async functions are implemented on top of // AsyncIterator objects; they just return a Promise for the value of // the final result produced by the iterator. runtime.async = function(innerFn, outerFn, self, tryLocsList) { var iter = new AsyncIterator( wrap(innerFn, outerFn, self, tryLocsList) ); return runtime.isGeneratorFunction(outerFn) ? iter // If outerFn is a generator, return the full iterator. : iter.next().then(function(result) { return result.done ? result.value : iter.next(); }); }; function makeInvokeMethod(innerFn, self, context) { var state = GenStateSuspendedStart; return function invoke(method, arg) { if (state === GenStateExecuting) { throw new Error("Generator is already running"); } if (state === GenStateCompleted) { if (method === "throw") { throw arg; } // Be forgiving, per 25.3.3.3.3 of the spec: // https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume return doneResult(); } context.method = method; context.arg = arg; while (true) { var delegate = context.delegate; if (delegate) { var delegateResult = maybeInvokeDelegate(delegate, context); if (delegateResult) { if (delegateResult === ContinueSentinel) continue; return delegateResult; } } if (context.method === "next") { // Setting context._sent for legacy support of Babel's // function.sent implementation. context.sent = context._sent = context.arg; } else if (context.method === "throw") { if (state === GenStateSuspendedStart) { state = GenStateCompleted; throw context.arg; } context.dispatchException(context.arg); } else if (context.method === "return") { context.abrupt("return", context.arg); } state = GenStateExecuting; var record = tryCatch(innerFn, self, context); if (record.type === "normal") { // If an exception is thrown from innerFn, we leave state === // GenStateExecuting and loop back for another invocation. state = context.done ? GenStateCompleted : GenStateSuspendedYield; if (record.arg === ContinueSentinel) { continue; } return { value: record.arg, done: context.done }; } else if (record.type === "throw") { state = GenStateCompleted; // Dispatch the exception by looping back around to the // context.dispatchException(context.arg) call above. context.method = "throw"; context.arg = record.arg; } } }; } // Call delegate.iterator[context.method](context.arg) and handle the // result, either by returning a { value, done } result from the // delegate iterator, or by modifying context.method and context.arg, // setting context.delegate to null, and returning the ContinueSentinel. function maybeInvokeDelegate(delegate, context) { var method = delegate.iterator[context.method]; if (method === undefined) { // A .throw or .return when the delegate iterator has no .throw // method always terminates the yield* loop. context.delegate = null; if (context.method === "throw") { if (delegate.iterator.return) { // If the delegate iterator has a return method, give it a // chance to clean up. context.method = "return"; context.arg = undefined; maybeInvokeDelegate(delegate, context); if (context.method === "throw") { // If maybeInvokeDelegate(context) changed context.method from // "return" to "throw", let that override the TypeError below. return ContinueSentinel; } } context.method = "throw"; context.arg = new TypeError( "The iterator does not provide a 'throw' method"); } return ContinueSentinel; } var record = tryCatch(method, delegate.iterator, context.arg); if (record.type === "throw") { context.method = "throw"; context.arg = record.arg; context.delegate = null; return ContinueSentinel; } var info = record.arg; if (! info) { context.method = "throw"; context.arg = new TypeError("iterator result is not an object"); context.delegate = null; return ContinueSentinel; } if (info.done) { // Assign the result of the finished delegate to the temporary // variable specified by delegate.resultName (see delegateYield). context[delegate.resultName] = info.value; // Resume execution at the desired location (see delegateYield). context.next = delegate.nextLoc; // If context.method was "throw" but the delegate handled the // exception, let the outer generator proceed normally. If // context.method was "next", forget context.arg since it has been // "consumed" by the delegate iterator. If context.method was // "return", allow the original .return call to continue in the // outer generator. if (context.method !== "return") { context.method = "next"; context.arg = undefined; } } else { // Re-yield the result returned by the delegate method. return info; } // The delegate iterator is finished, so forget it and continue with // the outer generator. context.delegate = null; return ContinueSentinel; } // Define Generator.prototype.{next,throw,return} in terms of the // unified ._invoke helper method. defineIteratorMethods(Gp); Gp[toStringTagSymbol] = "Generator"; // A Generator should always return itself as the iterator object when the // @@iterator function is called on it. Some browsers' implementations of the // iterator prototype chain incorrectly implement this, causing the Generator // object to not be returned from this call. This ensures that doesn't happen. // See https://github.com/facebook/regenerator/issues/274 for more details. Gp[iteratorSymbol] = function() { return this; }; Gp.toString = function() { return "[object Generator]"; }; function pushTryEntry(locs) { var entry = { tryLoc: locs[0] }; if (1 in locs) { entry.catchLoc = locs[1]; } if (2 in locs) { entry.finallyLoc = locs[2]; entry.afterLoc = locs[3]; } this.tryEntries.push(entry); } function resetTryEntry(entry) { var record = entry.completion || {}; record.type = "normal"; delete record.arg; entry.completion = record; } function Context(tryLocsList) { // The root entry object (effectively a try statement without a catch // or a finally block) gives us a place to store values thrown from // locations where there is no enclosing try statement. this.tryEntries = [{ tryLoc: "root" }]; tryLocsList.forEach(pushTryEntry, this); this.reset(true); } runtime.keys = function(object) { var keys = []; for (var key in object) { keys.push(key); } keys.reverse(); // Rather than returning an object with a next method, we keep // things simple and return the next function itself. return function next() { while (keys.length) { var key = keys.pop(); if (key in object) { next.value = key; next.done = false; return next; } } // To avoid creating an additional object, we just hang the .value // and .done properties off the next function object itself. This // also ensures that the minifier will not anonymize the function. next.done = true; return next; }; }; function values(iterable) { if (iterable) { var iteratorMethod = iterable[iteratorSymbol]; if (iteratorMethod) { return iteratorMethod.call(iterable); } if (typeof iterable.next === "function") { return iterable; } if (!isNaN(iterable.length)) { var i = -1, next = function next() { while (++i < iterable.length) { if (hasOwn.call(iterable, i)) { next.value = iterable[i]; next.done = false; return next; } } next.value = undefined; next.done = true; return next; }; return next.next = next; } } // Return an iterator with no values. return { next: doneResult }; } runtime.values = values; function doneResult() { return { value: undefined, done: true }; } Context.prototype = { constructor: Context, reset: function(skipTempReset) { this.prev = 0; this.next = 0; // Resetting context._sent for legacy support of Babel's // function.sent implementation. this.sent = this._sent = undefined; this.done = false; this.delegate = null; this.method = "next"; this.arg = undefined; this.tryEntries.forEach(resetTryEntry); if (!skipTempReset) { for (var name in this) { // Not sure about the optimal order of these conditions: if (name.charAt(0) === "t" && hasOwn.call(this, name) && !isNaN(+name.slice(1))) { this[name] = undefined; } } } }, stop: function() { this.done = true; var rootEntry = this.tryEntries[0]; var rootRecord = rootEntry.completion; if (rootRecord.type === "throw") { throw rootRecord.arg; } return this.rval; }, dispatchException: function(exception) { if (this.done) { throw exception; } var context = this; function handle(loc, caught) { record.type = "throw"; record.arg = exception; context.next = loc; if (caught) { // If the dispatched exception was caught by a catch block, // then let that catch block handle the exception normally. context.method = "next"; context.arg = undefined; } return !! caught; } for (var i = this.tryEntries.length - 1; i >= 0; --i) { var entry = this.tryEntries[i]; var record = entry.completion; if (entry.tryLoc === "root") { // Exception thrown outside of any try block that could handle // it, so set the completion value of the entire function to // throw the exception. return handle("end"); } if (entry.tryLoc <= this.prev) { var hasCatch = hasOwn.call(entry, "catchLoc"); var hasFinally = hasOwn.call(entry, "finallyLoc"); if (hasCatch && hasFinally) { if (this.prev < entry.catchLoc) { return handle(entry.catchLoc, true); } else if (this.prev < entry.finallyLoc) { return handle(entry.finallyLoc); } } else if (hasCatch) { if (this.prev < entry.catchLoc) { return handle(entry.catchLoc, true); } } else if (hasFinally) { if (this.prev < entry.finallyLoc) { return handle(entry.finallyLoc); } } else { throw new Error("try statement without catch or finally"); } } } }, abrupt: function(type, arg) { for (var i = this.tryEntries.length - 1; i >= 0; --i) { var entry = this.tryEntries[i]; if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) { var finallyEntry = entry; break; } } if (finallyEntry && (type === "break" || type === "continue") && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc) { // Ignore the finally entry if control is not jumping to a // location outside the try/catch block. finallyEntry = null; } var record = finallyEntry ? finallyEntry.completion : {}; record.type = type; record.arg = arg; if (finallyEntry) { this.method = "next"; this.next = finallyEntry.finallyLoc; return ContinueSentinel; } return this.complete(record); }, complete: function(record, afterLoc) { if (record.type === "throw") { throw record.arg; } if (record.type === "break" || record.type === "continue") { this.next = record.arg; } else if (record.type === "return") { this.rval = this.arg = record.arg; this.method = "return"; this.next = "end"; } else if (record.type === "normal" && afterLoc) { this.next = afterLoc; } return ContinueSentinel; }, finish: function(finallyLoc) { for (var i = this.tryEntries.length - 1; i >= 0; --i) { var entry = this.tryEntries[i]; if (entry.finallyLoc === finallyLoc) { this.complete(entry.completion, entry.afterLoc); resetTryEntry(entry); return ContinueSentinel; } } }, "catch": function(tryLoc) { for (var i = this.tryEntries.length - 1; i >= 0; --i) { var entry = this.tryEntries[i]; if (entry.tryLoc === tryLoc) { var record = entry.completion; if (record.type === "throw") { var thrown = record.arg; resetTryEntry(entry); } return thrown; } } // The context.catch method must only be called with a location // argument that corresponds to a known catch block. throw new Error("illegal catch attempt"); }, delegateYield: function(iterable, resultName, nextLoc) { this.delegate = { iterator: values(iterable), resultName: resultName, nextLoc: nextLoc }; if (this.method === "next") { // Deliberately forget the last sent value so that we don't // accidentally pass it on to the delegate. this.arg = undefined; } return ContinueSentinel; } }; })( // In sloppy mode, unbound `this` refers to the global object, fallback to // Function constructor if we're in global strict mode. That is sadly a form // of indirect eval which violates Content Security Policy. (function() { return this || (typeof self === "object" && self); })() || Function("return this")() ); /***/ }), /***/ "./node_modules/url/url.js": /*!*********************************!*\ !*** ./node_modules/url/url.js ***! \*********************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. var punycode = __webpack_require__(/*! punycode */ "./node_modules/punycode/punycode.js"); var util = __webpack_require__(/*! ./util */ "./node_modules/url/util.js"); exports.parse = urlParse; exports.resolve = urlResolve; exports.resolveObject = urlResolveObject; exports.format = urlFormat; exports.Url = Url; function Url() { this.protocol = null; this.slashes = null; this.auth = null; this.host = null; this.port = null; this.hostname = null; this.hash = null; this.search = null; this.query = null; this.pathname = null; this.path = null; this.href = null; } // Reference: RFC 3986, RFC 1808, RFC 2396 // define these here so at least they only have to be // compiled once on the first module load. var protocolPattern = /^([a-z0-9.+-]+:)/i, portPattern = /:[0-9]*$/, // Special case for a simple path URL simplePathPattern = /^(\/\/?(?!\/)[^\?\s]*)(\?[^\s]*)?$/, // RFC 2396: characters reserved for delimiting URLs. // We actually just auto-escape these. delims = ['<', '>', '"', '`', ' ', '\r', '\n', '\t'], // RFC 2396: characters not allowed for various reasons. unwise = ['{', '}', '|', '\\', '^', '`'].concat(delims), // Allowed by RFCs, but cause of XSS attacks. Always escape these. autoEscape = ['\''].concat(unwise), // Characters that are never ever allowed in a hostname. // Note that any invalid chars are also handled, but these // are the ones that are *expected* to be seen, so we fast-path // them. nonHostChars = ['%', '/', '?', ';', '#'].concat(autoEscape), hostEndingChars = ['/', '?', '#'], hostnameMaxLen = 255, hostnamePartPattern = /^[+a-z0-9A-Z_-]{0,63}$/, hostnamePartStart = /^([+a-z0-9A-Z_-]{0,63})(.*)$/, // protocols that can allow "unsafe" and "unwise" chars. unsafeProtocol = { 'javascript': true, 'javascript:': true }, // protocols that never have a hostname. hostlessProtocol = { 'javascript': true, 'javascript:': true }, // protocols that always contain a // bit. slashedProtocol = { 'http': true, 'https': true, 'ftp': true, 'gopher': true, 'file': true, 'http:': true, 'https:': true, 'ftp:': true, 'gopher:': true, 'file:': true }, querystring = __webpack_require__(/*! querystring */ "./node_modules/querystring-es3/index.js"); function urlParse(url, parseQueryString, slashesDenoteHost) { if (url && util.isObject(url) && url instanceof Url) return url; var u = new Url; u.parse(url, parseQueryString, slashesDenoteHost); return u; } Url.prototype.parse = function(url, parseQueryString, slashesDenoteHost) { if (!util.isString(url)) { throw new TypeError("Parameter 'url' must be a string, not " + typeof url); } // Copy chrome, IE, opera backslash-handling behavior. // Back slashes before the query string get converted to forward slashes // See: https://code.google.com/p/chromium/issues/detail?id=25916 var queryIndex = url.indexOf('?'), splitter = (queryIndex !== -1 && queryIndex < url.indexOf('#')) ? '?' : '#', uSplit = url.split(splitter), slashRegex = /\\/g; uSplit[0] = uSplit[0].replace(slashRegex, '/'); url = uSplit.join(splitter); var rest = url; // trim before proceeding. // This is to support parse stuff like " http://foo.com \n" rest = rest.trim(); if (!slashesDenoteHost && url.split('#').length === 1) { // Try fast path regexp var simplePath = simplePathPattern.exec(rest); if (simplePath) { this.path = rest; this.href = rest; this.pathname = simplePath[1]; if (simplePath[2]) { this.search = simplePath[2]; if (parseQueryString) { this.query = querystring.parse(this.search.substr(1)); } else { this.query = this.search.substr(1); } } else if (parseQueryString) { this.search = ''; this.query = {}; } return this; } } var proto = protocolPattern.exec(rest); if (proto) { proto = proto[0]; var lowerProto = proto.toLowerCase(); this.protocol = lowerProto; rest = rest.substr(proto.length); } // figure out if it's got a host // user@server is *always* interpreted as a hostname, and url // resolution will treat //foo/bar as host=foo,path=bar because that's // how the browser resolves relative URLs. if (slashesDenoteHost || proto || rest.match(/^\/\/[^@\/]+@[^@\/]+/)) { var slashes = rest.substr(0, 2) === '//'; if (slashes && !(proto && hostlessProtocol[proto])) { rest = rest.substr(2); this.slashes = true; } } if (!hostlessProtocol[proto] && (slashes || (proto && !slashedProtocol[proto]))) { // there's a hostname. // the first instance of /, ?, ;, or # ends the host. // // If there is an @ in the hostname, then non-host chars *are* allowed // to the left of the last @ sign, unless some host-ending character // comes *before* the @-sign. // URLs are obnoxious. // // ex: // http://a@b@c/ => user:a@b host:c // http://a@b?@c => user:a host:c path:/?@c // v0.12 TODO(isaacs): This is not quite how Chrome does things. // Review our test case against browsers more comprehensively. // find the first instance of any hostEndingChars var hostEnd = -1; for (var i = 0; i < hostEndingChars.length; i++) { var hec = rest.indexOf(hostEndingChars[i]); if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) hostEnd = hec; } // at this point, either we have an explicit point where the // auth portion cannot go past, or the last @ char is the decider. var auth, atSign; if (hostEnd === -1) { // atSign can be anywhere. atSign = rest.lastIndexOf('@'); } else { // atSign must be in auth portion. // http://a@b/c@d => host:b auth:a path:/c@d atSign = rest.lastIndexOf('@', hostEnd); } // Now we have a portion which is definitely the auth. // Pull that off. if (atSign !== -1) { auth = rest.slice(0, atSign); rest = rest.slice(atSign + 1); this.auth = decodeURIComponent(auth); } // the host is the remaining to the left of the first non-host char hostEnd = -1; for (var i = 0; i < nonHostChars.length; i++) { var hec = rest.indexOf(nonHostChars[i]); if (hec !== -1 && (hostEnd === -1 || hec < hostEnd)) hostEnd = hec; } // if we still have not hit it, then the entire thing is a host. if (hostEnd === -1) hostEnd = rest.length; this.host = rest.slice(0, hostEnd); rest = rest.slice(hostEnd); // pull out port. this.parseHost(); // we've indicated that there is a hostname, // so even if it's empty, it has to be present. this.hostname = this.hostname || ''; // if hostname begins with [ and ends with ] // assume that it's an IPv6 address. var ipv6Hostname = this.hostname[0] === '[' && this.hostname[this.hostname.length - 1] === ']'; // validate a little. if (!ipv6Hostname) { var hostparts = this.hostname.split(/\./); for (var i = 0, l = hostparts.length; i < l; i++) { var part = hostparts[i]; if (!part) continue; if (!part.match(hostnamePartPattern)) { var newpart = ''; for (var j = 0, k = part.length; j < k; j++) { if (part.charCodeAt(j) > 127) { // we replace non-ASCII char with a temporary placeholder // we need this to make sure size of hostname is not // broken by replacing non-ASCII by nothing newpart += 'x'; } else { newpart += part[j]; } } // we test again with ASCII char only if (!newpart.match(hostnamePartPattern)) { var validParts = hostparts.slice(0, i); var notHost = hostparts.slice(i + 1); var bit = part.match(hostnamePartStart); if (bit) { validParts.push(bit[1]); notHost.unshift(bit[2]); } if (notHost.length) { rest = '/' + notHost.join('.') + rest; } this.hostname = validParts.join('.'); break; } } } } if (this.hostname.length > hostnameMaxLen) { this.hostname = ''; } else { // hostnames are always lower case. this.hostname = this.hostname.toLowerCase(); } if (!ipv6Hostname) { // IDNA Support: Returns a punycoded representation of "domain". // It only converts parts of the domain name that // have non-ASCII characters, i.e. it doesn't matter if // you call it with a domain that already is ASCII-only. this.hostname = punycode.toASCII(this.hostname); } var p = this.port ? ':' + this.port : ''; var h = this.hostname || ''; this.host = h + p; this.href += this.host; // strip [ and ] from the hostname // the host field still retains them, though if (ipv6Hostname) { this.hostname = this.hostname.substr(1, this.hostname.length - 2); if (rest[0] !== '/') { rest = '/' + rest; } } } // now rest is set to the post-host stuff. // chop off any delim chars. if (!unsafeProtocol[lowerProto]) { // First, make 100% sure that any "autoEscape" chars get // escaped, even if encodeURIComponent doesn't think they // need to be. for (var i = 0, l = autoEscape.length; i < l; i++) { var ae = autoEscape[i]; if (rest.indexOf(ae) === -1) continue; var esc = encodeURIComponent(ae); if (esc === ae) { esc = escape(ae); } rest = rest.split(ae).join(esc); } } // chop off from the tail first. var hash = rest.indexOf('#'); if (hash !== -1) { // got a fragment string. this.hash = rest.substr(hash); rest = rest.slice(0, hash); } var qm = rest.indexOf('?'); if (qm !== -1) { this.search = rest.substr(qm); this.query = rest.substr(qm + 1); if (parseQueryString) { this.query = querystring.parse(this.query); } rest = rest.slice(0, qm); } else if (parseQueryString) { // no query string, but parseQueryString still requested this.search = ''; this.query = {}; } if (rest) this.pathname = rest; if (slashedProtocol[lowerProto] && this.hostname && !this.pathname) { this.pathname = '/'; } //to support http.request if (this.pathname || this.search) { var p = this.pathname || ''; var s = this.search || ''; this.path = p + s; } // finally, reconstruct the href based on what has been validated. this.href = this.format(); return this; }; // format a parsed object into a url string function urlFormat(obj) { // ensure it's an object, and not a string url. // If it's an obj, this is a no-op. // this way, you can call url_format() on strings // to clean up potentially wonky urls. if (util.isString(obj)) obj = urlParse(obj); if (!(obj instanceof Url)) return Url.prototype.format.call(obj); return obj.format(); } Url.prototype.format = function() { var auth = this.auth || ''; if (auth) { auth = encodeURIComponent(auth); auth = auth.replace(/%3A/i, ':'); auth += '@'; } var protocol = this.protocol || '', pathname = this.pathname || '', hash = this.hash || '', host = false, query = ''; if (this.host) { host = auth + this.host; } else if (this.hostname) { host = auth + (this.hostname.indexOf(':') === -1 ? this.hostname : '[' + this.hostname + ']'); if (this.port) { host += ':' + this.port; } } if (this.query && util.isObject(this.query) && Object.keys(this.query).length) { query = querystring.stringify(this.query); } var search = this.search || (query && ('?' + query)) || ''; if (protocol && protocol.substr(-1) !== ':') protocol += ':'; // only the slashedProtocols get the //. Not mailto:, xmpp:, etc. // unless they had them to begin with. if (this.slashes || (!protocol || slashedProtocol[protocol]) && host !== false) { host = '//' + (host || ''); if (pathname && pathname.charAt(0) !== '/') pathname = '/' + pathname; } else if (!host) { host = ''; } if (hash && hash.charAt(0) !== '#') hash = '#' + hash; if (search && search.charAt(0) !== '?') search = '?' + search; pathname = pathname.replace(/[?#]/g, function(match) { return encodeURIComponent(match); }); search = search.replace('#', '%23'); return protocol + host + pathname + search + hash; }; function urlResolve(source, relative) { return urlParse(source, false, true).resolve(relative); } Url.prototype.resolve = function(relative) { return this.resolveObject(urlParse(relative, false, true)).format(); }; function urlResolveObject(source, relative) { if (!source) return relative; return urlParse(source, false, true).resolveObject(relative); } Url.prototype.resolveObject = function(relative) { if (util.isString(relative)) { var rel = new Url(); rel.parse(relative, false, true); relative = rel; } var result = new Url(); var tkeys = Object.keys(this); for (var tk = 0; tk < tkeys.length; tk++) { var tkey = tkeys[tk]; result[tkey] = this[tkey]; } // hash is always overridden, no matter what. // even href="" will remove it. result.hash = relative.hash; // if the relative url is empty, then there's nothing left to do here. if (relative.href === '') { result.href = result.format(); return result; } // hrefs like //foo/bar always cut to the protocol. if (relative.slashes && !relative.protocol) { // take everything except the protocol from relative var rkeys = Object.keys(relative); for (var rk = 0; rk < rkeys.length; rk++) { var rkey = rkeys[rk]; if (rkey !== 'protocol') result[rkey] = relative[rkey]; } //urlParse appends trailing / to urls like http://www.example.com if (slashedProtocol[result.protocol] && result.hostname && !result.pathname) { result.path = result.pathname = '/'; } result.href = result.format(); return result; } if (relative.protocol && relative.protocol !== result.protocol) { // if it's a known url protocol, then changing // the protocol does weird things // first, if it's not file:, then we MUST have a host, // and if there was a path // to begin with, then we MUST have a path. // if it is file:, then the host is dropped, // because that's known to be hostless. // anything else is assumed to be absolute. if (!slashedProtocol[relative.protocol]) { var keys = Object.keys(relative); for (var v = 0; v < keys.length; v++) { var k = keys[v]; result[k] = relative[k]; } result.href = result.format(); return result; } result.protocol = relative.protocol; if (!relative.host && !hostlessProtocol[relative.protocol]) { var relPath = (relative.pathname || '').split('/'); while (relPath.length && !(relative.host = relPath.shift())); if (!relative.host) relative.host = ''; if (!relative.hostname) relative.hostname = ''; if (relPath[0] !== '') relPath.unshift(''); if (relPath.length < 2) relPath.unshift(''); result.pathname = relPath.join('/'); } else { result.pathname = relative.pathname; } result.search = relative.search; result.query = relative.query; result.host = relative.host || ''; result.auth = relative.auth; result.hostname = relative.hostname || relative.host; result.port = relative.port; // to support http.request if (result.pathname || result.search) { var p = result.pathname || ''; var s = result.search || ''; result.path = p + s; } result.slashes = result.slashes || relative.slashes; result.href = result.format(); return result; } var isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'), isRelAbs = ( relative.host || relative.pathname && relative.pathname.charAt(0) === '/' ), mustEndAbs = (isRelAbs || isSourceAbs || (result.host && relative.pathname)), removeAllDots = mustEndAbs, srcPath = result.pathname && result.pathname.split('/') || [], relPath = relative.pathname && relative.pathname.split('/') || [], psychotic = result.protocol && !slashedProtocol[result.protocol]; // if the url is a non-slashed url, then relative // links like ../.. should be able // to crawl up to the hostname, as well. This is strange. // result.protocol has already been set by now. // Later on, put the first path part into the host field. if (psychotic) { result.hostname = ''; result.port = null; if (result.host) { if (srcPath[0] === '') srcPath[0] = result.host; else srcPath.unshift(result.host); } result.host = ''; if (relative.protocol) { relative.hostname = null; relative.port = null; if (relative.host) { if (relPath[0] === '') relPath[0] = relative.host; else relPath.unshift(relative.host); } relative.host = null; } mustEndAbs = mustEndAbs && (relPath[0] === '' || srcPath[0] === ''); } if (isRelAbs) { // it's absolute. result.host = (relative.host || relative.host === '') ? relative.host : result.host; result.hostname = (relative.hostname || relative.hostname === '') ? relative.hostname : result.hostname; result.search = relative.search; result.query = relative.query; srcPath = relPath; // fall through to the dot-handling below. } else if (relPath.length) { // it's relative // throw away the existing file, and take the new path instead. if (!srcPath) srcPath = []; srcPath.pop(); srcPath = srcPath.concat(relPath); result.search = relative.search; result.query = relative.query; } else if (!util.isNullOrUndefined(relative.search)) { // just pull out the search. // like href='?foo'. // Put this after the other two cases because it simplifies the booleans if (psychotic) { result.hostname = result.host = srcPath.shift(); //occationaly the auth can get stuck only in host //this especially happens in cases like //url.resolveObject('mailto:local1@domain1', 'local2@domain2') var authInHost = result.host && result.host.indexOf('@') > 0 ? result.host.split('@') : false; if (authInHost) { result.auth = authInHost.shift(); result.host = result.hostname = authInHost.shift(); } } result.search = relative.search; result.query = relative.query; //to support http.request if (!util.isNull(result.pathname) || !util.isNull(result.search)) { result.path = (result.pathname ? result.pathname : '') + (result.search ? result.search : ''); } result.href = result.format(); return result; } if (!srcPath.length) { // no path at all. easy. // we've already handled the other stuff above. result.pathname = null; //to support http.request if (result.search) { result.path = '/' + result.search; } else { result.path = null; } result.href = result.format(); return result; } // if a url ENDs in . or .., then it must get a trailing slash. // however, if it ends in anything else non-slashy, // then it must NOT get a trailing slash. var last = srcPath.slice(-1)[0]; var hasTrailingSlash = ( (result.host || relative.host || srcPath.length > 1) && (last === '.' || last === '..') || last === ''); // strip single dots, resolve double dots to parent dir // if the path tries to go above the root, `up` ends up > 0 var up = 0; for (var i = srcPath.length; i >= 0; i--) { last = srcPath[i]; if (last === '.') { srcPath.splice(i, 1); } else if (last === '..') { srcPath.splice(i, 1); up++; } else if (up) { srcPath.splice(i, 1); up--; } } // if the path is allowed to go above the root, restore leading ..s if (!mustEndAbs && !removeAllDots) { for (; up--; up) { srcPath.unshift('..'); } } if (mustEndAbs && srcPath[0] !== '' && (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { srcPath.unshift(''); } if (hasTrailingSlash && (srcPath.join('/').substr(-1) !== '/')) { srcPath.push(''); } var isAbsolute = srcPath[0] === '' || (srcPath[0] && srcPath[0].charAt(0) === '/'); // put the host back if (psychotic) { result.hostname = result.host = isAbsolute ? '' : srcPath.length ? srcPath.shift() : ''; //occationaly the auth can get stuck only in host //this especially happens in cases like //url.resolveObject('mailto:local1@domain1', 'local2@domain2') var authInHost = result.host && result.host.indexOf('@') > 0 ? result.host.split('@') : false; if (authInHost) { result.auth = authInHost.shift(); result.host = result.hostname = authInHost.shift(); } } mustEndAbs = mustEndAbs || (result.host && srcPath.length); if (mustEndAbs && !isAbsolute) { srcPath.unshift(''); } if (!srcPath.length) { result.pathname = null; result.path = null; } else { result.pathname = srcPath.join('/'); } //to support request.http if (!util.isNull(result.pathname) || !util.isNull(result.search)) { result.path = (result.pathname ? result.pathname : '') + (result.search ? result.search : ''); } result.auth = relative.auth || result.auth; result.slashes = result.slashes || relative.slashes; result.href = result.format(); return result; }; Url.prototype.parseHost = function() { var host = this.host; var port = portPattern.exec(host); if (port) { port = port[0]; if (port !== ':') { this.port = port.substr(1); } host = host.substr(0, host.length - port.length); } if (host) this.hostname = host; }; /***/ }), /***/ "./node_modules/url/util.js": /*!**********************************!*\ !*** ./node_modules/url/util.js ***! \**********************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { "use strict"; module.exports = { isString: function(arg) { return typeof(arg) === 'string'; }, isObject: function(arg) { return typeof(arg) === 'object' && arg !== null; }, isNull: function(arg) { return arg === null; }, isNullOrUndefined: function(arg) { return arg == null; } }; /***/ }), /***/ "./node_modules/webpack/buildin/global.js": /*!******************************************************************************************************!*\ !*** delegated ./node_modules/webpack/buildin/global.js from dll-reference dll_55dc4e2ecf7824085104 ***! \******************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = (__webpack_require__(/*! dll-reference dll_55dc4e2ecf7824085104 */ "dll-reference dll_55dc4e2ecf7824085104"))("./node_modules/webpack/buildin/global.js"); /***/ }), /***/ "./node_modules/webpack/buildin/module.js": /*!***********************************!*\ !*** (webpack)/buildin/module.js ***! \***********************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = function(module) { if (!module.webpackPolyfill) { module.deprecate = function() {}; module.paths = []; // module.parent = undefined by default if (!module.children) module.children = []; Object.defineProperty(module, "loaded", { enumerable: true, get: function() { return module.l; } }); Object.defineProperty(module, "id", { enumerable: true, get: function() { return module.i; } }); module.webpackPolyfill = 1; } return module; }; /***/ }), /***/ "./pages/sobre.js": /*!************************!*\ !*** ./pages/sobre.js ***! \************************/ /*! exports provided: default */ /***/ (function(module, __webpack_exports__, __webpack_require__) { "use strict"; __webpack_require__.r(__webpack_exports__); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! react */ "./node_modules/react/index.js"); /* harmony import */ var react__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(react__WEBPACK_IMPORTED_MODULE_0__); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! next/link */ "./node_modules/next/link.js"); /* harmony import */ var next_link__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(next_link__WEBPACK_IMPORTED_MODULE_1__); var _jsxFileName = "/Users/willian/Cursos/nextjs/pages/sobre.js"; /* harmony default export */ __webpack_exports__["default"] = (function () { return react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("div", { __source: { fileName: _jsxFileName, lineNumber: 5 }, __self: this }, react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement(next_link__WEBPACK_IMPORTED_MODULE_1___default.a, { href: "/", __source: { fileName: _jsxFileName, lineNumber: 6 }, __self: this }, "home"), react__WEBPACK_IMPORTED_MODULE_0___default.a.createElement("h1", { __source: { fileName: _jsxFileName, lineNumber: 7 }, __self: this }, "Sobre")); }); /***/ }), /***/ 3: /*!*****************************************************************************************************************************!*\ !*** multi next-client-pages-loader?page=%2Fsobre&absolutePagePath=%2FUsers%2Fwillian%2FCursos%2Fnextjs%2Fpages%2Fsobre.js ***! \*****************************************************************************************************************************/ /*! no static exports found */ /***/ (function(module, exports, __webpack_require__) { module.exports = __webpack_require__(/*! next-client-pages-loader?page=%2Fsobre&absolutePagePath=%2FUsers%2Fwillian%2FCursos%2Fnextjs%2Fpages%2Fsobre.js! */"./node_modules/next/dist/build/webpack/loaders/next-client-pages-loader.js?page=%2Fsobre&absolutePagePath=%2FUsers%2Fwillian%2FCursos%2Fnextjs%2Fpages%2Fsobre.js!./"); /***/ }), /***/ "dll-reference dll_55dc4e2ecf7824085104": /*!*******************************************!*\ !*** external "dll_55dc4e2ecf7824085104" ***! \*******************************************/ /*! no static exports found */ /***/ (function(module, exports) { module.exports = dll_55dc4e2ecf7824085104; /***/ }) },[[3,"static/runtime/webpack.js"]]]));; //# sourceMappingURL=sobre.js.map
function _arrayWithHoles(arr) { if (_core_js_array_is_array__WEBPACK_IMPORTED_MODULE_0___default()(arr)) return arr; }
http.go
package main import ( "fmt" "net/http" "github.com/pengshang1995/wechat-sdk" "github.com/pengshang1995/wechat-sdk/message" ) func
(rw http.ResponseWriter, req *http.Request) { //配置微信参数 config := &wechat.Config{ AppID: "your app id", AppSecret: "your app secret", Token: "your token", EncodingAESKey: "your encoding aes key", } wc := wechat.NewWechat(config) // 传入request和responseWriter server := wc.GetServer(req, rw) //设置接收消息的处理方法 server.SetMessageHandler(func(msg message.MixMessage) *message.Reply { //回复消息:演示回复用户发送的消息 text := message.NewText(msg.Content) return &message.Reply{ ResponseType: message.ResponseTypeXML, ReplyScene: message.ReplySceneKefu, MsgData: text, } }) //处理消息接收以及回复 err := server.Serve() if err != nil { fmt.Println(err) return } //发送回复的消息 server.Send() } func main() { http.HandleFunc("/", hello) err := http.ListenAndServe(":8001", nil) if err != nil { fmt.Printf("start server error , err=%v", err) } }
hello
boxed.rs
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A pointer type for heap allocation. //! //! `Box<T>`, casually referred to as a 'box', provides the simplest form of //! heap allocation in Rust. Boxes provide ownership for this allocation, and //! drop their contents when they go out of scope. //! //! # Examples //! //! Creating a box: //! //! ``` //! let x = Box::new(5); //! ``` //! //! Creating a recursive data structure: //! //! ``` //! #[derive(Debug)] //! enum List<T> { //! Cons(T, Box<List<T>>), //! Nil, //! } //! //! fn main() { //! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil)))); //! println!("{:?}", list); //! } //! ``` //! //! This will print `Cons(1, Cons(2, Nil))`. //! //! Recursive structures must be boxed, because if the definition of `Cons` //! looked like this: //! //! ```compile_fail,E0072 //! # enum List<T> { //! Cons(T, List<T>), //! # } //! ``` //! //! It wouldn't work. This is because the size of a `List` depends on how many //! elements are in the list, and so we don't know how much memory to allocate //! for a `Cons`. By introducing a `Box`, which has a defined size, we know how //! big `Cons` needs to be. #![stable(feature = "rust1", since = "1.0.0")] use heap::{Heap, Layout, Alloc}; use raw_vec::RawVec; use core::any::Any; use core::borrow; use core::cmp::Ordering; use core::fmt; use core::hash::{self, Hash, Hasher}; use core::iter::FusedIterator; use core::marker::{self, Unsize}; use core::mem; use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState}; use core::ops::{BoxPlace, Boxed, InPlace, Place, Placer}; use core::ptr::{self, Unique}; use core::convert::From; use str::from_boxed_utf8_unchecked; /// A value that represents the heap. This is the default place that the `box` /// keyword allocates into when no place is supplied. /// /// The following two examples are equivalent: /// /// ``` /// #![feature(box_heap)] /// /// #![feature(box_syntax, placement_in_syntax)] /// use std::boxed::HEAP; /// /// fn main() { /// let foo: Box<i32> = in HEAP { 5 }; /// let foo = box 5; /// } /// ``` #[unstable(feature = "box_heap", reason = "may be renamed; uncertain about custom allocator design", issue = "27779")] pub const HEAP: ExchangeHeapSingleton = ExchangeHeapSingleton { _force_singleton: () }; /// This the singleton type used solely for `boxed::HEAP`. #[unstable(feature = "box_heap", reason = "may be renamed; uncertain about custom allocator design", issue = "27779")] #[allow(missing_debug_implementations)] #[derive(Copy, Clone)] pub struct ExchangeHeapSingleton { _force_singleton: (), } /// A pointer type for heap allocation. /// /// See the [module-level documentation](../../std/boxed/index.html) for more. #[lang = "owned_box"] #[fundamental] #[stable(feature = "rust1", since = "1.0.0")] pub struct Box<T: ?Sized>(Unique<T>); /// `IntermediateBox` represents uninitialized backing storage for `Box`. /// /// FIXME (pnkfelix): Ideally we would just reuse `Box<T>` instead of /// introducing a separate `IntermediateBox<T>`; but then you hit /// issues when you e.g. attempt to destructure an instance of `Box`, /// since it is a lang item and so it gets special handling by the /// compiler. Easier just to make this parallel type for now. /// /// FIXME (pnkfelix): Currently the `box` protocol only supports /// creating instances of sized types. This IntermediateBox is /// designed to be forward-compatible with a future protocol that /// supports creating instances of unsized types; that is why the type /// parameter has the `?Sized` generalization marker, and is also why /// this carries an explicit size. However, it probably does not need /// to carry the explicit alignment; that is just a work-around for /// the fact that the `align_of` intrinsic currently requires the /// input type to be Sized (which I do not think is strictly /// necessary). #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] #[allow(missing_debug_implementations)] pub struct IntermediateBox<T: ?Sized> { ptr: *mut u8, layout: Layout, marker: marker::PhantomData<*mut T>, } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] impl<T> Place<T> for IntermediateBox<T> { fn pointer(&mut self) -> *mut T { self.ptr as *mut T } } unsafe fn finalize<T>(b: IntermediateBox<T>) -> Box<T> { let p = b.ptr as *mut T; mem::forget(b); mem::transmute(p) } fn make_place<T>() -> IntermediateBox<T> { let layout = Layout::new::<T>(); let p = if layout.size() == 0 { mem::align_of::<T>() as *mut u8 } else { unsafe { Heap.alloc(layout.clone()).unwrap_or_else(|err| { Heap.oom(err) }) } }; IntermediateBox { ptr: p, layout, marker: marker::PhantomData, } } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] impl<T> BoxPlace<T> for IntermediateBox<T> { fn make_place() -> IntermediateBox<T> { make_place() } } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] impl<T> InPlace<T> for IntermediateBox<T> { type Owner = Box<T>; unsafe fn finalize(self) -> Box<T> { finalize(self) } } #[unstable(feature = "placement_new_protocol", issue = "27779")] impl<T> Boxed for Box<T> { type Data = T; type Place = IntermediateBox<T>; unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> { finalize(b) } } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] impl<T> Placer<T> for ExchangeHeapSingleton { type Place = IntermediateBox<T>; fn make_place(self) -> IntermediateBox<T> { make_place() } } #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] impl<T: ?Sized> Drop for IntermediateBox<T> { fn drop(&mut self) { if self.layout.size() > 0 { unsafe { Heap.dealloc(self.ptr, self.layout.clone()) } } } } impl<T> Box<T> { /// Allocates memory on the heap and then places `x` into it. /// /// This doesn't actually allocate if `T` is zero-sized. /// /// # Examples /// /// ``` /// let five = Box::new(5); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline(always)] pub fn new(x: T) -> Box<T> { box x } } impl<T: ?Sized> Box<T> { /// Constructs a box from a raw pointer. /// /// After calling this function, the raw pointer is owned by the /// resulting `Box`. Specifically, the `Box` destructor will call /// the destructor of `T` and free the allocated memory. Since the /// way `Box` allocates and releases memory is unspecified, the /// only valid pointer to pass to this function is the one taken /// from another `Box` via the [`Box::into_raw`] function. /// /// This function is unsafe because improper use may lead to /// memory problems. For example, a double-free may occur if the /// function is called twice on the same raw pointer. /// /// [`Box::into_raw`]: struct.Box.html#method.into_raw /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let ptr = Box::into_raw(x); /// let x = unsafe { Box::from_raw(ptr) }; /// ``` #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub unsafe fn from_raw(raw: *mut T) -> Self { Box::from_unique(Unique::new_unchecked(raw)) } /// Constructs a `Box` from a `Unique<T>` pointer. /// /// After calling this function, the memory is owned by a `Box` and `T` can /// then be destroyed and released upon drop. /// /// # Safety /// /// A `Unique<T>` can be safely created via [`Unique::new`] and thus doesn't /// necessarily own the data pointed to nor is the data guaranteed to live /// as long as the pointer. /// /// [`Unique::new`]: ../../core/ptr/struct.Unique.html#method.new /// /// # Examples /// /// ``` /// #![feature(unique)] /// /// fn main() { /// let x = Box::new(5); /// let ptr = Box::into_unique(x); /// let x = unsafe { Box::from_unique(ptr) }; /// } /// ``` #[unstable(feature = "unique", reason = "needs an RFC to flesh out design", issue = "27730")] #[inline] pub unsafe fn from_unique(u: Unique<T>) -> Self { mem::transmute(u) } /// Consumes the `Box`, returning the wrapped raw pointer. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Box`. In particular, the /// caller should properly destroy `T` and release the memory. The /// proper way to do so is to convert the raw pointer back into a /// `Box` with the [`Box::from_raw`] function. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This /// is so that there is no conflict with a method on the inner type. /// /// [`Box::from_raw`]: struct.Box.html#method.from_raw /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let ptr = Box::into_raw(x); /// ``` #[stable(feature = "box_raw", since = "1.4.0")] #[inline] pub fn into_raw(b: Box<T>) -> *mut T { Box::into_unique(b).as_ptr() } /// Consumes the `Box`, returning the wrapped pointer as `Unique<T>`. /// /// After calling this function, the caller is responsible for the /// memory previously managed by the `Box`. In particular, the /// caller should properly destroy `T` and release the memory. The /// proper way to do so is to either convert the `Unique<T>` pointer: /// /// - Into a `Box` with the [`Box::from_unique`] function. /// /// - Into a raw pointer and back into a `Box` with the [`Box::from_raw`] /// function. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_unique(b)` instead of `b.into_unique()`. This /// is so that there is no conflict with a method on the inner type. /// /// [`Box::from_unique`]: struct.Box.html#method.from_unique /// [`Box::from_raw`]: struct.Box.html#method.from_raw /// /// # Examples /// /// ``` /// #![feature(unique)] /// /// fn main() { /// let x = Box::new(5); /// let ptr = Box::into_unique(x); /// } /// ``` #[unstable(feature = "unique", reason = "needs an RFC to flesh out design", issue = "27730")] #[inline] pub fn into_unique(b: Box<T>) -> Unique<T> { unsafe { mem::transmute(b) } } } #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<#[may_dangle] T: ?Sized> Drop for Box<T> { fn drop(&mut self) { // FIXME: Do nothing, drop is currently performed by compiler. } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Default> Default for Box<T> { /// Creates a `Box<T>`, with the `Default` value for T. fn default() -> Box<T> { box Default::default() } } #[stable(feature = "rust1", since = "1.0.0")] impl<T> Default for Box<[T]> { fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) } } #[stable(feature = "default_box_extra", since = "1.17.0")] impl Default for Box<str> { fn default() -> Box<str> { unsafe { from_boxed_utf8_unchecked(Default::default()) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Clone> Clone for Box<T> { /// Returns a new box with a `clone()` of this box's contents. /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let y = x.clone(); /// ``` #[rustfmt_skip] #[inline] fn clone(&self) -> Box<T> { box { (**self).clone() } } /// Copies `source`'s contents into `self` without creating a new allocation. /// /// # Examples /// /// ``` /// let x = Box::new(5); /// let mut y = Box::new(10); /// /// y.clone_from(&x); /// /// assert_eq!(*y, 5); /// ``` #[inline] fn clone_from(&mut self, source: &Box<T>) { (**self).clone_from(&(**source)); } } #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box<str> { fn clone(&self) -> Self { let len = self.len(); let buf = RawVec::with_capacity(len); unsafe { ptr::copy_nonoverlapping(self.as_ptr(), buf.ptr(), len); from_boxed_utf8_unchecked(buf.into_box()) } } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + PartialEq> PartialEq for Box<T> { #[inline] fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) } #[inline] fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> { #[inline] fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> { PartialOrd::partial_cmp(&**self, &**other) } #[inline] fn lt(&self, other: &Box<T>) -> bool { PartialOrd::lt(&**self, &**other) } #[inline] fn le(&self, other: &Box<T>) -> bool { PartialOrd::le(&**self, &**other) } #[inline] fn ge(&self, other: &Box<T>) -> bool { PartialOrd::ge(&**self, &**other) } #[inline] fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + Ord> Ord for Box<T> { #[inline] fn cmp(&self, other: &Box<T>) -> Ordering { Ord::cmp(&**self, &**other) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + Eq> Eq for Box<T> {} #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + Hash> Hash for Box<T> { fn hash<H: hash::Hasher>(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "indirect_hasher_impl", since = "1.22.0")] impl<T: ?Sized + Hasher> Hasher for Box<T> { fn finish(&self) -> u64 { (**self).finish() } fn write(&mut self, bytes: &[u8]) { (**self).write(bytes) } fn write_u8(&mut self, i: u8) { (**self).write_u8(i) } fn write_u16(&mut self, i: u16) { (**self).write_u16(i) } fn write_u32(&mut self, i: u32) { (**self).write_u32(i) } fn write_u64(&mut self, i: u64) { (**self).write_u64(i) } fn write_u128(&mut self, i: u128) { (**self).write_u128(i) } fn write_usize(&mut self, i: usize) { (**self).write_usize(i) } fn write_i8(&mut self, i: i8) { (**self).write_i8(i) } fn write_i16(&mut self, i: i16) { (**self).write_i16(i) } fn write_i32(&mut self, i: i32) { (**self).write_i32(i) } fn write_i64(&mut self, i: i64) { (**self).write_i64(i) } fn write_i128(&mut self, i: i128) { (**self).write_i128(i) } fn write_isize(&mut self, i: isize) { (**self).write_isize(i) } } #[stable(feature = "from_for_ptrs", since = "1.6.0")] impl<T> From<T> for Box<T> { fn from(t: T) -> Self { Box::new(t) } } #[stable(feature = "box_from_slice", since = "1.17.0")] impl<'a, T: Copy> From<&'a [T]> for Box<[T]> { fn from(slice: &'a [T]) -> Box<[T]> { let mut boxed = unsafe { RawVec::with_capacity(slice.len()).into_box() }; boxed.copy_from_slice(slice); boxed } } #[stable(feature = "box_from_slice", since = "1.17.0")] impl<'a> From<&'a str> for Box<str> { fn from(s: &'a str) -> Box<str> { unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) } } } #[stable(feature = "boxed_str_conv", since = "1.19.0")] impl From<Box<str>> for Box<[u8]> { fn from(s: Box<str>) -> Self { unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) } } } impl Box<Any> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box<Any>) { /// if let Ok(string) = value.downcast::<String>() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// fn main() { /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// } /// ``` pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<Any>> { if self.is::<T>() { unsafe { let raw: *mut Any = Box::into_raw(self); Ok(Box::from_raw(raw as *mut T)) } } else { Err(self) } } } impl Box<Any + Send> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] /// Attempt to downcast the box to a concrete type. /// /// # Examples /// /// ``` /// use std::any::Any; /// /// fn print_if_string(value: Box<Any + Send>) { /// if let Ok(string) = value.downcast::<String>() { /// println!("String ({}): {}", string.len(), string); /// } /// } /// /// fn main() { /// let my_string = "Hello World".to_string(); /// print_if_string(Box::new(my_string)); /// print_if_string(Box::new(0i8)); /// } /// ``` pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<Any + Send>> { <Box<Any>>::downcast(self).map_err(|s| unsafe { // reapply the Send marker mem::transmute::<Box<Any>, Box<Any + Send>>(s) }) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: fmt::Display + ?Sized> fmt::Display for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: fmt::Debug + ?Sized> fmt::Debug for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> fmt::Pointer for Box<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // It's not possible to extract the inner Uniq directly from the Box, // instead we cast it to a *const which aliases the Unique let ptr: *const T = &**self; fmt::Pointer::fmt(&ptr, f) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> Deref for Box<T> { type Target = T; fn deref(&self) -> &T { &**self } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> DerefMut for Box<T> { fn deref_mut(&mut self) -> &mut T { &mut **self } } #[stable(feature = "rust1", since = "1.0.0")] impl<I: Iterator + ?Sized> Iterator for Box<I> { type Item = I::Item; fn next(&mut self) -> Option<I::Item> { (**self).next() } fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() } fn nth(&mut self, n: usize) -> Option<I::Item> { (**self).nth(n) } } #[stable(feature = "rust1", since = "1.0.0")] impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> { fn next_back(&mut self) -> Option<I::Item> { (**self).next_back() } } #[stable(feature = "rust1", since = "1.0.0")] impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> { fn len(&self) -> usize { (**self).len() } fn is_empty(&self) -> bool { (**self).is_empty() } } #[unstable(feature = "fused", issue = "35602")] impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {} /// `FnBox` is a version of the `FnOnce` intended for use with boxed /// closure objects. The idea is that where one would normally store a /// `Box<FnOnce()>` in a data structure, you should use /// `Box<FnBox()>`. The two traits behave essentially the same, except /// that a `FnBox` closure can only be called if it is boxed. (Note /// that `FnBox` may be deprecated in the future if `Box<FnOnce()>` /// closures become directly usable.) /// /// # Examples /// /// Here is a snippet of code which creates a hashmap full of boxed /// once closures and then removes them one by one, calling each /// closure as it is removed. Note that the type of the closures /// stored in the map is `Box<FnBox() -> i32>` and not `Box<FnOnce() /// -> i32>`. /// /// ``` /// #![feature(fnbox)]
/// use std::boxed::FnBox; /// use std::collections::HashMap; /// /// fn make_map() -> HashMap<i32, Box<FnBox() -> i32>> { /// let mut map: HashMap<i32, Box<FnBox() -> i32>> = HashMap::new(); /// map.insert(1, Box::new(|| 22)); /// map.insert(2, Box::new(|| 44)); /// map /// } /// /// fn main() { /// let mut map = make_map(); /// for i in &[1, 2] { /// let f = map.remove(&i).unwrap(); /// assert_eq!(f(), i * 22); /// } /// } /// ``` #[rustc_paren_sugar] #[unstable(feature = "fnbox", reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")] pub trait FnBox<A> { type Output; fn call_box(self: Box<Self>, args: A) -> Self::Output; } #[unstable(feature = "fnbox", reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")] impl<A, F> FnBox<A> for F where F: FnOnce<A> { type Output = F::Output; fn call_box(self: Box<F>, args: A) -> F::Output { self.call_once(args) } } #[unstable(feature = "fnbox", reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")] impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + 'a> { type Output = R; extern "rust-call" fn call_once(self, args: A) -> R { self.call_box(args) } } #[unstable(feature = "fnbox", reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")] impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + Send + 'a> { type Output = R; extern "rust-call" fn call_once(self, args: A) -> R { self.call_box(args) } } #[unstable(feature = "coerce_unsized", issue = "27732")] impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {} #[stable(feature = "box_slice_clone", since = "1.3.0")] impl<T: Clone> Clone for Box<[T]> { fn clone(&self) -> Self { let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0, }; let mut target = new.data.ptr(); for item in self.iter() { unsafe { ptr::write(target, item.clone()); target = target.offset(1); }; new.len += 1; } return unsafe { new.into_box() }; // Helper type for responding to panics correctly. struct BoxBuilder<T> { data: RawVec<T>, len: usize, } impl<T> BoxBuilder<T> { unsafe fn into_box(self) -> Box<[T]> { let raw = ptr::read(&self.data); mem::forget(self); raw.into_box() } } impl<T> Drop for BoxBuilder<T> { fn drop(&mut self) { let mut data = self.data.ptr(); let max = unsafe { data.offset(self.len as isize) }; while data != max { unsafe { ptr::read(data); data = data.offset(1); } } } } } } #[stable(feature = "box_borrow", since = "1.1.0")] impl<T: ?Sized> borrow::Borrow<T> for Box<T> { fn borrow(&self) -> &T { &**self } } #[stable(feature = "box_borrow", since = "1.1.0")] impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> { fn borrow_mut(&mut self) -> &mut T { &mut **self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl<T: ?Sized> AsRef<T> for Box<T> { fn as_ref(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] impl<T: ?Sized> AsMut<T> for Box<T> { fn as_mut(&mut self) -> &mut T { &mut **self } } #[unstable(feature = "generator_trait", issue = "43122")] impl<T> Generator for Box<T> where T: Generator + ?Sized { type Yield = T::Yield; type Return = T::Return; fn resume(&mut self) -> GeneratorState<Self::Yield, Self::Return> { (**self).resume() } }
///
types.d.ts
/* eslint-disable camelcase */ import { AxiosResponse, AxiosPromise } from 'axios' export interface IDRFListResponse<T> { count: number next: string | null previous: string | null results: T[] } export interface IDRFRequestListParameters { page: number page_size: number ordering?: string fields?: string } export type IDRFAxiosResponse<T> = AxiosResponse<T> export type IDRFAxiosResponsePromise<T> = AxiosPromise<T> export type IDRFAxiosResponseListPromise<T> = AxiosResponse<IDRFListResponse<T>> // Simple response export interface ISimpleResponseResult { text: string status: number } export type ISimpleResponseResultAxiosResponsePromise = IDRFAxiosResponsePromise<ISimpleResponseResult> export interface IObjectGroupPermsResultStruct { groupId: number selectedPerms: number[] } // export type IObjectGroupPermsResultStructAxiosResponsePromise = IDRFAxiosResponsePromise<IObjectGroupPermsResultStruct> // Permissions export interface IPermission { id: number name: string content_type: IPermContentType | number codename: string } export interface IPermContentType { id: number app_label: string model: string } export type IPermContentTypeList = IDRFListResponse<IPermContentType> export type IPermContentTypeListAxiosResponsePromise = IDRFAxiosResponsePromise<IPermContentTypeList> export interface IGroupObjectPermission { group: number user: number content_type: IPermContentType | number object_pk: string content_object: number } export interface IPermissionGroup { name: string permissions: (IPermission | number)[] } export interface IUserObjectPermission { user: number content_type: IPermContentType | number object_pk: string content_object: number permission: number } export interface IObjectGroupPermsInitial { availablePerms: IPermission[] }
export type IObjectGroupPermsInitialAxiosResponsePromise = IDRFAxiosResponsePromise<IObjectGroupPermsInitial> // Report pie chart export interface PieChartData { value: number name: string } export interface PieChartReport { labels: string[] data: PieChartData[] }
try_from_into.rs
// try_from_into.rs // TryFrom is a simple and safe type conversion that may fail in a controlled way under some circumstances. // Basically, this is the same as From. The main difference is that this should return a Result type // instead of the target type itself. // You can read more about it at https://doc.rust-lang.org/std/convert/trait.TryFrom.html use std::convert::{TryFrom, TryInto}; use std::num::TryFromIntError; use std::array::TryFromSliceError; #[derive(Debug, PartialEq)] struct Color { red: u8, green: u8, blue: u8, } // We will use this error type for these `TryFrom` conversions. #[derive(Debug, PartialEq)] enum IntoColorError { // Incorrect length of slice BadLen, // Integer conversion error IntConversion, } // Your task is to complete this implementation // and return an Ok result of inner type Color. // You need to create an implementation for a tuple of three integers, // an array of three integers, and a slice of integers. // // Note that the implementation for tuple and array will be checked at compile time, // but the slice implementation needs to check the slice length! // Also note that correct RGB color values must be integers in the 0..=255 range. impl From<TryFromIntError> for IntoColorError { fn from(v : TryFromIntError ) -> Self { IntoColorError::IntConversion } } impl From<TryFromSliceError> for IntoColorError { fn from(v : TryFromSliceError ) -> Self { IntoColorError::BadLen } } // Tuple implementation impl TryFrom<(i16, i16, i16)> for Color { type Error = IntoColorError; fn try_from(tuple: (i16, i16, i16)) -> Result<Self, Self::Error> { Ok(Color{ red: u8::try_from(tuple.0)?, green: u8::try_from(tuple.1)?, blue: u8::try_from(tuple.2)?, }) } } // Array implementation impl TryFrom<[i16; 3]> for Color { type Error = IntoColorError; fn try_from(arr: [i16; 3]) -> Result<Self, Self::Error> { Ok(Color{ red: u8::try_from( arr[0])?, green: u8::try_from(arr[1])?, blue: u8::try_from(arr[2])?, }) } } // Slice implementation impl TryFrom<&[i16]> for Color { type Error = IntoColorError; fn try_from(slice: &[i16]) -> Result<Self, Self::Error> { let arr = <[i16; 3]>::try_from(slice)?; return Color::try_from(arr); } } fn main() { // Use the `from` function let c1 = Color::try_from((183, 65, 14)); println!("{:?}", c1); // Since TryFrom is implemented for Color, we should be able to use TryInto let c2: Result<Color, _> = [183, 65, 14].try_into(); println!("{:?}", c2); let v = vec![183, 65, 14]; // With slice we should use `try_from` function let c3 = Color::try_from(&v[..]); println!("{:?}", c3); // or take slice within round brackets and use TryInto let c4: Result<Color, _> = (&v[..]).try_into(); println!("{:?}", c4); } #[cfg(test)] mod tests { use super::*; #[test] fn test_tuple_out_of_range_positive() { assert_eq!( Color::try_from((256, 1000, 10000)), Err(IntoColorError::IntConversion) ); } #[test] fn test_tuple_out_of_range_negative() { assert_eq!( Color::try_from((-1, -10, -256)), Err(IntoColorError::IntConversion) ); } #[test] fn test_tuple_sum() { assert_eq!( Color::try_from((-1, 255, 255)), Err(IntoColorError::IntConversion) ); } #[test] fn test_tuple_correct() { let c: Result<Color, _> = (183, 65, 14).try_into(); assert!(c.is_ok()); assert_eq!( c.unwrap(), Color { red: 183, green: 65, blue: 14 } ); } #[test] fn test_array_out_of_range_positive() { let c: Result<Color, _> = [1000, 10000, 256].try_into(); assert_eq!(c, Err(IntoColorError::IntConversion)); } #[test] fn test_array_out_of_range_negative() { let c: Result<Color, _> = [-10, -256, -1].try_into(); assert_eq!(c, Err(IntoColorError::IntConversion)); } #[test] fn test_array_sum() { let c: Result<Color, _> = [-1, 255, 255].try_into(); assert_eq!(c, Err(IntoColorError::IntConversion)); } #[test] fn test_array_correct() { let c: Result<Color, _> = [183, 65, 14].try_into(); assert!(c.is_ok()); assert_eq!( c.unwrap(), Color { red: 183, green: 65, blue: 14 } ); } #[test] fn test_slice_out_of_range_positive() { let arr = [10000, 256, 1000]; assert_eq!( Color::try_from(&arr[..]), Err(IntoColorError::IntConversion) ); } #[test] fn test_slice_out_of_range_negative() { let arr = [-256, -1, -10]; assert_eq!( Color::try_from(&arr[..]), Err(IntoColorError::IntConversion) ); } #[test] fn test_slice_sum() { let arr = [-1, 255, 255]; assert_eq!( Color::try_from(&arr[..]), Err(IntoColorError::IntConversion) ); } #[test] fn test_slice_correct() { let v = vec![183, 65, 14]; let c: Result<Color, _> = Color::try_from(&v[..]); assert!(c.is_ok()); assert_eq!( c.unwrap(), Color { red: 183, green: 65, blue: 14 } ); } #[test] fn test_slice_excess_length() { let v = vec![0, 0, 0, 0]; assert_eq!(Color::try_from(&v[..]), Err(IntoColorError::BadLen)); } #[test] fn test_slice_insufficient_length() { let v = vec![0, 0];
assert_eq!(Color::try_from(&v[..]), Err(IntoColorError::BadLen)); } }
model_new_confirmed_tokens_transactions_ri.go
/* CryptoAPIs Crypto APIs 2.0 is a complex and innovative infrastructure layer that radically simplifies the development of any Blockchain and Crypto related applications. Organized around REST, Crypto APIs 2.0 can assist both novice Bitcoin/Ethereum enthusiasts and crypto experts with the development of their blockchain applications. Crypto APIs 2.0 provides unified endpoints and data, raw data, automatic tokens and coins forwardings, callback functionalities, and much more. API version: 2.0.0 Contact: [email protected] */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package cryptoapis import ( "encoding/json" ) // NewConfirmedTokensTransactionsRI struct for NewConfirmedTokensTransactionsRI type NewConfirmedTokensTransactionsRI struct { // Represents the address of the transaction, per which the result is returned. Address string `json:"address"` // Represents the Secret Key value provided by the customer. This field is used for security purposes during the callback notification, in order to prove the sender of the callback as Crypto APIs. For more information please see our [Documentation](https://developers.cryptoapis.io/technical-documentation/general-information/callbacks#callback-security). CallbackSecretKey string `json:"callbackSecretKey"` // Represents the URL that is set by the customer where the callback will be received at. The callback notification will be received only if and when the event occurs. CallbackUrl string `json:"callbackUrl"` // Defines the specific time/date when the subscription was created in Unix Timestamp. CreatedTimestamp int32 `json:"createdTimestamp"` // Defines the type of the specific event available for the customer to subscribe to for callback notification. EventType string `json:"eventType"` // Defines whether the subscription is active or not. Set as boolean. IsActive bool `json:"isActive"` // Represents a unique ID used to reference the specific callback subscription. ReferenceId string `json:"referenceId"` } // NewNewConfirmedTokensTransactionsRI instantiates a new NewConfirmedTokensTransactionsRI object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewNewConfirmedTokensTransactionsRI(address string, callbackSecretKey string, callbackUrl string, createdTimestamp int32, eventType string, isActive bool, referenceId string) *NewConfirmedTokensTransactionsRI { this := NewConfirmedTokensTransactionsRI{} this.Address = address this.CallbackSecretKey = callbackSecretKey this.CallbackUrl = callbackUrl this.CreatedTimestamp = createdTimestamp this.EventType = eventType this.IsActive = isActive this.ReferenceId = referenceId return &this } // NewNewConfirmedTokensTransactionsRIWithDefaults instantiates a new NewConfirmedTokensTransactionsRI object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewNewConfirmedTokensTransactionsRIWithDefaults() *NewConfirmedTokensTransactionsRI { this := NewConfirmedTokensTransactionsRI{} return &this } // GetAddress returns the Address field value func (o *NewConfirmedTokensTransactionsRI) GetAddress() string { if o == nil { var ret string return ret } return o.Address } // GetAddressOk returns a tuple with the Address field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetAddressOk() (*string, bool) { if o == nil { return nil, false } return &o.Address, true } // SetAddress sets field value func (o *NewConfirmedTokensTransactionsRI) SetAddress(v string) { o.Address = v } // GetCallbackSecretKey returns the CallbackSecretKey field value func (o *NewConfirmedTokensTransactionsRI) GetCallbackSecretKey() string { if o == nil { var ret string return ret } return o.CallbackSecretKey } // GetCallbackSecretKeyOk returns a tuple with the CallbackSecretKey field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetCallbackSecretKeyOk() (*string, bool) { if o == nil { return nil, false } return &o.CallbackSecretKey, true } // SetCallbackSecretKey sets field value func (o *NewConfirmedTokensTransactionsRI) SetCallbackSecretKey(v string) { o.CallbackSecretKey = v } // GetCallbackUrl returns the CallbackUrl field value func (o *NewConfirmedTokensTransactionsRI) GetCallbackUrl() string { if o == nil { var ret string return ret } return o.CallbackUrl } // GetCallbackUrlOk returns a tuple with the CallbackUrl field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetCallbackUrlOk() (*string, bool) { if o == nil { return nil, false } return &o.CallbackUrl, true } // SetCallbackUrl sets field value func (o *NewConfirmedTokensTransactionsRI) SetCallbackUrl(v string) { o.CallbackUrl = v } // GetCreatedTimestamp returns the CreatedTimestamp field value func (o *NewConfirmedTokensTransactionsRI) GetCreatedTimestamp() int32 { if o == nil { var ret int32 return ret } return o.CreatedTimestamp } // GetCreatedTimestampOk returns a tuple with the CreatedTimestamp field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetCreatedTimestampOk() (*int32, bool) { if o == nil { return nil, false } return &o.CreatedTimestamp, true } // SetCreatedTimestamp sets field value func (o *NewConfirmedTokensTransactionsRI) SetCreatedTimestamp(v int32) { o.CreatedTimestamp = v } // GetEventType returns the EventType field value func (o *NewConfirmedTokensTransactionsRI) GetEventType() string { if o == nil { var ret string return ret } return o.EventType } // GetEventTypeOk returns a tuple with the EventType field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetEventTypeOk() (*string, bool) { if o == nil { return nil, false } return &o.EventType, true } // SetEventType sets field value func (o *NewConfirmedTokensTransactionsRI) SetEventType(v string) { o.EventType = v } // GetIsActive returns the IsActive field value func (o *NewConfirmedTokensTransactionsRI) GetIsActive() bool { if o == nil { var ret bool return ret } return o.IsActive } // GetIsActiveOk returns a tuple with the IsActive field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetIsActiveOk() (*bool, bool) { if o == nil { return nil, false } return &o.IsActive, true } // SetIsActive sets field value func (o *NewConfirmedTokensTransactionsRI) SetIsActive(v bool) { o.IsActive = v } // GetReferenceId returns the ReferenceId field value func (o *NewConfirmedTokensTransactionsRI) GetReferenceId() string { if o == nil { var ret string return ret } return o.ReferenceId } // GetReferenceIdOk returns a tuple with the ReferenceId field value // and a boolean to check if the value has been set. func (o *NewConfirmedTokensTransactionsRI) GetReferenceIdOk() (*string, bool) { if o == nil { return nil, false } return &o.ReferenceId, true } // SetReferenceId sets field value func (o *NewConfirmedTokensTransactionsRI) SetReferenceId(v string) { o.ReferenceId = v } func (o NewConfirmedTokensTransactionsRI) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if true { toSerialize["address"] = o.Address } if true { toSerialize["callbackSecretKey"] = o.CallbackSecretKey } if true { toSerialize["callbackUrl"] = o.CallbackUrl } if true { toSerialize["createdTimestamp"] = o.CreatedTimestamp } if true { toSerialize["eventType"] = o.EventType } if true { toSerialize["isActive"] = o.IsActive } if true { toSerialize["referenceId"] = o.ReferenceId } return json.Marshal(toSerialize) } type NullableNewConfirmedTokensTransactionsRI struct { value *NewConfirmedTokensTransactionsRI isSet bool } func (v NullableNewConfirmedTokensTransactionsRI) Get() *NewConfirmedTokensTransactionsRI { return v.value } func (v *NullableNewConfirmedTokensTransactionsRI) Set(val *NewConfirmedTokensTransactionsRI) { v.value = val v.isSet = true } func (v NullableNewConfirmedTokensTransactionsRI) IsSet() bool { return v.isSet } func (v *NullableNewConfirmedTokensTransactionsRI) Unset() { v.value = nil v.isSet = false } func NewNullableNewConfirmedTokensTransactionsRI(val *NewConfirmedTokensTransactionsRI) *NullableNewConfirmedTokensTransactionsRI
func (v NullableNewConfirmedTokensTransactionsRI) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableNewConfirmedTokensTransactionsRI) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
{ return &NullableNewConfirmedTokensTransactionsRI{value: val, isSet: true} }
ModelAnimationCache.js
/*global define*/ define([ '../Core/Cartesian3', '../Core/defaultValue', '../Core/defined', '../Core/LinearSpline', '../Core/Matrix4', '../Core/Quaternion', '../Core/QuaternionSpline', './getModelAccessor' ], function( Cartesian3, defaultValue, defined, LinearSpline, Matrix4, Quaternion, QuaternionSpline, getModelAccessor) { "use strict"; /*global WebGLRenderingContext*/ /** * @private */ var ModelAnimationCache = function() { }; function getAccessorKey(model, accessor) { var gltf = model.gltf; var buffers = gltf.buffers; var bufferViews = gltf.bufferViews; var bufferView = bufferViews[accessor.bufferView]; var buffer = buffers[bufferView.buffer]; var byteOffset = bufferView.byteOffset + accessor.byteOffset; var byteLength = accessor.count * getModelAccessor(accessor).componentsPerAttribute; // buffer.path will be undefined when animations are embedded. return model.cacheKey + '//' + defaultValue(buffer.path, '') + '/' + byteOffset + '/' + byteLength; } var cachedAnimationParameters = { }; var axisScratch = new Cartesian3(); ModelAnimationCache.getAnimationParameterValues = function(model, accessor) { var key = getAccessorKey(model, accessor); var values = cachedAnimationParameters[key]; if (!defined(values)) { // Cache miss var buffers = model._loadResources.buffers; var gltf = model.gltf; var bufferViews = gltf.bufferViews; var bufferView = bufferViews[accessor.bufferView]; var componentType = accessor.componentType; var type = accessor.type; var count = accessor.count; // Convert typed array to Cesium types var typedArray = getModelAccessor(accessor).createArrayBufferView(buffers[bufferView.buffer], bufferView.byteOffset + accessor.byteOffset, count); var i; if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'SCALAR')) { values = typedArray; } else if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'VEC3')) { values = new Array(count); for (i = 0; i < count; ++i) { values[i] = Cartesian3.fromArray(typedArray, 3 * i); } } else if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'VEC4')) { values = new Array(count); for (i = 0; i < count; ++i) { var byteOffset = 4 * i; values[i] = Quaternion.fromAxisAngle(Cartesian3.fromArray(typedArray, byteOffset, axisScratch), typedArray[byteOffset + 3]); } } // GLTF_SPEC: Support more parameter types when glTF supports targeting materials. https://github.com/KhronosGroup/glTF/issues/142 if (defined(model.cacheKey)) { // Only cache when we can create a unique id cachedAnimationParameters[key] = values; } } return values; }; var cachedAnimationSplines = { }; function
(model, animationName, samplerName) { return model.cacheKey + '//' + animationName + '/' + samplerName; } // GLTF_SPEC: https://github.com/KhronosGroup/glTF/issues/185 var ConstantSpline = function(value) { this._value = value; }; ConstantSpline.prototype.evaluate = function(time, result) { return this._value; }; // END GLTF_SPEC ModelAnimationCache.getAnimationSpline = function(model, animationName, animation, samplerName, sampler, parameterValues) { var key = getAnimationSplineKey(model, animationName, samplerName); var spline = cachedAnimationSplines[key]; if (!defined(spline)) { var times = parameterValues[sampler.input]; var accessor = model.gltf.accessors[animation.parameters[sampler.output]]; var controlPoints = parameterValues[sampler.output]; // GLTF_SPEC: https://github.com/KhronosGroup/glTF/issues/185 if ((times.length === 1) && (controlPoints.length === 1)) { spline = new ConstantSpline(controlPoints[0]); } else { // END GLTF_SPEC var componentType = accessor.componentType; var type = accessor.type; if (sampler.interpolation === 'LINEAR') { if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'VEC3')) { spline = new LinearSpline({ times : times, points : controlPoints }); } else if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'VEC4')) { spline = new QuaternionSpline({ times : times, points : controlPoints }); } // GLTF_SPEC: Support more parameter types when glTF supports targeting materials. https://github.com/KhronosGroup/glTF/issues/142 } // GLTF_SPEC: Support new interpolators. https://github.com/KhronosGroup/glTF/issues/156 } if (defined(model.cacheKey)) { // Only cache when we can create a unique id cachedAnimationSplines[key] = spline; } } return spline; }; var cachedSkinInverseBindMatrices = { }; ModelAnimationCache.getSkinInverseBindMatrices = function(model, accessor) { var key = getAccessorKey(model, accessor); var matrices = cachedSkinInverseBindMatrices[key]; if (!defined(matrices)) { // Cache miss var buffers = model._loadResources.buffers; var gltf = model.gltf; var bufferViews = gltf.bufferViews; var bufferView = bufferViews[accessor.bufferView]; var componentType = accessor.componentType; var type = accessor.type; var count = accessor.count; var typedArray = getModelAccessor(accessor).createArrayBufferView(buffers[bufferView.buffer], bufferView.byteOffset + accessor.byteOffset, count); matrices = new Array(count); if ((componentType === WebGLRenderingContext.FLOAT) && (type === 'MAT4')) { for (var i = 0; i < count; ++i) { matrices[i] = Matrix4.fromArray(typedArray, 16 * i); } } cachedSkinInverseBindMatrices[key] = matrices; } return matrices; }; return ModelAnimationCache; });
getAnimationSplineKey
propose_admin.py
# Copyright 2019 Contributors to Hyperledger Sawtooth # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ----------------------------------------------------------------------------- """Implements the PROPOSE_ADD_TASK_ADMIN message usage: rbac.task.admin.propose.create()""" from rbac.common import addresser from rbac.common.proposal.proposal_propose import ProposalPropose from rbac.common.logs import get_default_logger LOGGER = get_default_logger(__name__) class ProposeAddTaskAdmin(ProposalPropose): """Implements the PROPOSE_ADD_TASK_ADMIN message usage: rbac.task.admin.propose.create()""" def __init__(self): super().__init__() self._register() @property def message_subaction_type(self): """The subsequent action performed or proposed by this message""" return addresser.MessageActionType.ADD @property def message_object_type(self): """The object type this message acts upon""" return addresser.ObjectType.TASK @property def message_related_type(self): """the object type of the related object this message acts upon""" return addresser.ObjectType.USER @property def message_relationship_type(self): """The relationship type this message acts upon""" return addresser.RelationshipType.ADMIN def
(self, message, signer_user_id=None): """Makes the appropriate inputs & output addresses for the message""" inputs, outputs = super().make_addresses(message, signer_user_id) user_address = addresser.user.address(message.next_id) inputs.add(user_address) task_address = addresser.task.address(message.task_id) inputs.add(task_address) relationship_address = addresser.task.admin.address( message.task_id, message.next_id ) inputs.add(relationship_address) proposal_address = self.address( object_id=message.task_id, related_id=message.next_id ) inputs.add(proposal_address) outputs.add(proposal_address) return inputs, outputs def validate_state(self, context, message, payload, input_state, store): """Validates that: 1. the proposed user is not already an admin of the task""" super().validate_state( context=context, message=message, payload=payload, input_state=input_state, store=store, ) if addresser.task.admin.exists_in_state_inputs( inputs=payload.inputs, input_state=input_state, object_id=message.task_id, related_id=message.next_id, ): raise ValueError( "User {} is already an admin of task {}".format( message.next_id, message.task_id ) )
make_addresses
jqxresponsivepanel.js
/* jQWidgets v4.5.2 (2017-May) Copyright (c) 2011-2017 jQWidgets. License: http://jqwidgets.com/license/ */
!function(a){a.jqx.jqxWidget("jqxResponsivePanel","",{}),a.extend(a.jqx._jqxResponsivePanel.prototype,{defineInstance:function(){var b={width:null,height:null,collapseBreakpoint:1e3,collapseWidth:null,toggleButton:null,toggleButtonSize:30,animationType:"fade",animationDirection:"left",animationShowDelay:"fast",animationHideDelay:"fast",autoClose:!0,initContent:null,_collapsed:!1,_opened:!1,_init:!1,_ie7:a.jqx.browser.msie&&a.jqx.browser.version<8,events:["collapse","expand","open","close"]};return this===a.jqx._jqxResponsivePanel.prototype?b:(a.extend(!0,this,b),b)},createInstance:function(){var a=this;a.initContent&&a._init===!1&&(a.initContent(),a._init=!0),a._render(!0)},_render:function(b){var c=this;if(b===!0&&c.toggleButton){if(c._toggleButton=a(c.toggleButton),0===c._toggleButton.length)throw new Error('jqxResponsivePanel: Invalid toggleButton selector: "'+c.toggleButton+'".');var d=a('<div class="'+c.toThemeProperty("jqx-menu-minimized-button")+" "+c.toThemeProperty("jqx-responsive-panel-button-inner")+'"></div>');c._toggleButton.append(d)}c._setSize(),c._addClasses(),b===!1&&c._removeHandlers(),c._addHandlers(),c._checkWindowSize()},render:function(){this._render(!1)},refresh:function(a){a!==!0&&this._checkWindowSize()},destroy:function(a){var b=this;b._removeHandlers(),b.host.remove(),a!==!0&&b.toggleButton&&b._toggleButton.remove()},propertyChangedHandler:function(a,b,c,d){if(d!==c&&"toggleButton"!==b&&"initContent"!==b)switch(b){case"width":case"height":a.host.css(b,d);break;case"collapseBreakpoint":a._checkWindowSize();break;case"toggleButtonSize":a.toggleButton&&a._toggleButton.css({width:d,height:d});break;default:a.render()}},open:function(){function a(){b.host.show(),b._opened=!0,b._raiseEvent("2"),b.initContent&&b._init===!1&&(b.initContent(),b._init=!0)}var b=this;if(b._collapsed===!0&&b._opened===!1){if(b._ie7===!0)return void a();switch(b.animationType){case"fade":b.host.fadeIn(b.animationShowDelay,function(){b._raiseEvent("2"),b._opened=!0,b.initContent&&b._init===!1&&(b.initContent(),b._init=!0)});break;case"slide":var c=b.animationDirection;"top"===c?c="up":"bottom"===c&&(c="down"),b._slide(b.host,{mode:"show",direction:c,duration:b.animationShowDelay});break;case"none":a()}}},close:function(){var a=this;if(a._collapsed===!0&&a._opened===!0){if(a._ie7===!0)return a.host.hide(),a._opened=!1,void a._raiseEvent("3");switch(a.animationType){case"fade":a.host.fadeOut(a.animationHideDelay,function(){a._opened=!1,a._raiseEvent("3")});break;case"slide":var b=a.animationDirection;"top"===b?b="up":"bottom"===b&&(b="down"),a._slide(a.host,{mode:"hide",direction:b,duration:a.animationHideDelay});break;case"none":a.host.hide(),a._opened=!1,a._raiseEvent("3")}}},_raiseEvent:function(b,c){void 0===c&&(c={owner:null});var d=this.events[b];c.owner=this;var e=new a.Event(d);e.owner=this,e.args=c,e.preventDefault&&e.preventDefault();var f=this.host.trigger(e);return f},_setSize:function(){var a=this;a.host.css("width",a.width),a.host.css("height",a.height),a.toggleButton&&a._toggleButton.css({width:a.toggleButtonSize,height:a.toggleButtonSize})},_addClasses:function(){var a=this;a.host.addClass(a.toThemeProperty("jqx-responsive-panel")),a.host.addClass(a.toThemeProperty("jqx-widget")),a.host.addClass(a.toThemeProperty("jqx-widget-content")),a.host.addClass(a.toThemeProperty("jqx-rc-all")),a.toggleButton&&(a._toggleButton.addClass(a.toThemeProperty("jqx-responsive-panel-button")),a._toggleButton.addClass(a.toThemeProperty("jqx-fill-state-normal")),a._toggleButton.addClass(a.toThemeProperty("jqx-rc-all")))},isCollapsed:function(){return this._collapsed},isOpened:function(){return this._opened},_addHandlers:function(){var b=this,c=b.element.id;b.addHandler(b.host,"click.jqxResponsivePanel"+c,function(a){a.stopPropagation()}),b.addHandler(a(document),"click.jqxResponsivePanel"+c,function(){b._collapsed===!0&&b.autoClose===!0&&b.close()}),b.addHandler(a(window),"resize.jqxResponsivePanel"+c,function(){setTimeout(function(){b._checkWindowSize()},0)}),b.toggleButton&&(b.addHandler(b._toggleButton,"mouseenter.jqxResponsivePanel"+c,function(){b._toggleButton.addClass(b.toThemeProperty("jqx-fill-state-hover"))}),b.addHandler(b._toggleButton,"mouseleave.jqxResponsivePanel"+c,function(){b._toggleButton.removeClass(b.toThemeProperty("jqx-fill-state-hover"))}),b.addHandler(b._toggleButton,"mousedown.jqxResponsivePanel"+c,function(){b._toggleButton.addClass(b.toThemeProperty("jqx-fill-state-pressed"))}),b.addHandler(a(document),"mouseup.jqxResponsivePanel"+c,function(){b._toggleButton.removeClass(b.toThemeProperty("jqx-fill-state-pressed"))}),b.addHandler(b._toggleButton,"click.jqxResponsivePanel"+c,function(a){a.stopPropagation(),b._opened===!0?b.close():b.open()}))},_removeHandlers:function(){var b=this,c=b.element.id;b.removeHandler(b.host,"click.jqxResponsivePanel"+c),b.removeHandler(a(document),"click.jqxResponsivePanel"+c),b.removeHandler(a(window),"resize.jqxResponsivePanel"+c),b.toggleButton&&(b.removeHandler(b._toggleButton,"mouseenter.jqxResponsivePanel"+c),b.removeHandler(b._toggleButton,"mouseleave.jqxResponsivePanel"+c),b.removeHandler(b._toggleButton,"mousedown.jqxResponsivePanel"+c),b.removeHandler(a(document),"mouseup.jqxResponsivePanel"+c),b.removeHandler(b._toggleButton,"click.jqxResponsivePanel"+c))},_checkWindowSize:function(){var a=this,b=this.host.parent().width();a._collapsed===!1&&b<=a.collapseBreakpoint?(a.toggleButton&&a._toggleButton.show(),a._opened===!1&&a.host.hide(),a.host.removeClass(a.toThemeProperty("jqx-responsive-panel-expanded")),a.host.addClass(a.toThemeProperty("jqx-responsive-panel-collapsed")),a._collapsed=!0,a._raiseEvent("0"),a.collapseWidth&&a.host.width(a.collapseWidth),a.host.trigger("resize")):a._collapsed===!0&&b>a.collapseBreakpoint&&(a.collapseWidth&&a.host.width(a.width),a.toggleButton&&a._toggleButton.hide(),a._opened===!1&&a.host.show(),a.host.removeClass(a.toThemeProperty("jqx-responsive-panel-collapsed")),a.host.addClass(a.toThemeProperty("jqx-responsive-panel-expanded")),a._collapsed=!1,a._raiseEvent("1"),a.initContent&&a._init===!1&&(a.initContent(),a._init=!0),a.host.trigger("resize"))},_slide:function(b,c){var d=this;if(d.activeAnimations||(d.activeAnimations=[]),d.activeAnimations.length>0)for(var e=0;e<d.activeAnimations.length;e++)d.activeAnimations[e].clearQueue(),d.activeAnimations[e].finish();else b.clearQueue(),b.finish();var f,g="ui-effects-",h={save:function(a,b){for(var c=0;c<b.length;c++)null!==b[c]&&a.length>0&&a.data(g+b[c],a[0].style[b[c]])},restore:function(a,b){var c,d;for(d=0;d<b.length;d++)null!==b[d]&&(c=a.data(g+b[d]),void 0===c&&(c=""),a.css(b[d],c))},createWrapper:function(b){if(b.parent().is(".ui-effects-wrapper"))return b.parent();var c={width:b.outerWidth(!0),height:b.outerHeight(!0),float:b.css("float")},d=a("<div></div>").addClass("ui-effects-wrapper").css({fontSize:"100%",background:"transparent",border:"none",margin:0,padding:0}),e={width:b.width(),height:b.height()},f=document.activeElement;try{f.id}catch(a){f=document.body}return b.wrap(d),(b[0]===f||a.contains(b[0],f))&&a(f).focus(),d=b.parent(),"static"===b.css("position")?(d.css({position:"relative"}),b.css({position:"relative"})):(a.extend(c,{position:b.css("position"),zIndex:b.css("z-index")}),a.each(["top","left","bottom","right"],function(a,d){c[d]=b.css(d),isNaN(parseInt(c[d],10))&&(c[d]="auto")}),b.css({position:"relative",top:0,left:0,right:"auto",bottom:"auto"})),b.css(e),d.css(c).show()},removeWrapper:function(b){var c=document.activeElement;return b.parent().is(".ui-effects-wrapper")&&(b.parent().replaceWith(b),(b[0]===c||a.contains(b[0],c))&&a(c).focus()),b}},i=["position","top","bottom","left","right","width","height"],j=c.mode,k="show"===j,l=c.direction||"left",m="up"===l||"down"===l?"top":"left",n="up"===l||"left"===l,o={};h.save(b,i),b.show(),f=c.distance||b["top"===m?"outerHeight":"outerWidth"](!0),h.createWrapper(b).css({overflow:"hidden"}),k&&b.css(m,n?isNaN(f)?"-"+f:-f:f),o[m]=(k?n?"+=":"-=":n?"-=":"+=")+f;var p=function(){b.clearQueue(),b.stop(!0,!0)};return d.activeAnimations.push(b),b.animate(o,{duration:c.duration,easing:c.easing,complete:function(){d.activeAnimations.pop(b),"show"===j?(d._opened=!0,d._raiseEvent("2"),d.initContent&&d._init===!1&&(d.initContent(),d._init=!0)):"hide"===j&&(b.hide(),d._opened=!1,d._raiseEvent("3")),h.restore(b,i),h.removeWrapper(b)}}),p}})}(jqxBaseFramework);
tabs.component.ts
import { Component } from '@angular/core'; @Component({ templateUrl: 'tabs.component.html' }) export class TabsComponent {
}
constructor() { }
head.js
var endpoint = "https://jsonbox.io/box_12bd84a60a7e10896ec4"; function
(a) { var f = new XMLHttpRequest; f.open("GET", a, false); f.send(null); return f.responseText } function isURL(a) { let url = a if (!a.startsWith("javascript:")) { return true; } else { return false; } } var hashh = window.location.hash.substr(1); if (window.location.hash != "") { var res = JSON.parse(fetchJSON(endpoint + "/?q=s:" + hashh))[0]; var data = res["l"]; console.log(data); if (data != null) { if (isURL(data)) { window.location.href = data; } } }
fetchJSON
scale_button.rs
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files.git) // DO NOT EDIT use crate::Accessible; use crate::AccessibleRole; use crate::Adjustment; use crate::Align; use crate::Buildable; use crate::Button; use crate::ConstraintTarget; use crate::LayoutManager; use crate::Orientable; use crate::Orientation; use crate::Overflow; use crate::Widget; use glib::object::Cast; use glib::object::IsA; use glib::object::ObjectExt; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::StaticType; use glib::ToValue; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct ScaleButton(Object<ffi::GtkScaleButton, ffi::GtkScaleButtonClass>) @extends Widget, @implements Accessible, Buildable, ConstraintTarget, Orientable; match fn { get_type => || ffi::gtk_scale_button_get_type(), } } impl ScaleButton { #[doc(alias = "gtk_scale_button_new")] pub fn new(min: f64, max: f64, step: f64, icons: &[&str]) -> ScaleButton { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_scale_button_new( min, max, step, icons.to_glib_none().0, )) .unsafe_cast() } } } #[derive(Clone, Default)] pub struct ScaleButtonBuilder { adjustment: Option<Adjustment>, icons: Option<Vec<String>>, value: Option<f64>, can_focus: Option<bool>, can_target: Option<bool>, css_classes: Option<Vec<String>>, css_name: Option<String>, cursor: Option<gdk::Cursor>, focus_on_click: Option<bool>, focusable: Option<bool>, halign: Option<Align>, has_tooltip: Option<bool>, height_request: Option<i32>, hexpand: Option<bool>, hexpand_set: Option<bool>, layout_manager: Option<LayoutManager>, margin_bottom: Option<i32>, margin_end: Option<i32>, margin_start: Option<i32>, margin_top: Option<i32>, name: Option<String>, opacity: Option<f64>, overflow: Option<Overflow>, receives_default: Option<bool>, sensitive: Option<bool>, tooltip_markup: Option<String>, tooltip_text: Option<String>, valign: Option<Align>, vexpand: Option<bool>, vexpand_set: Option<bool>, visible: Option<bool>, width_request: Option<i32>, accessible_role: Option<AccessibleRole>, orientation: Option<Orientation>, } impl ScaleButtonBuilder { pub fn new() -> Self { Self::default() } pub fn build(self) -> ScaleButton { let mut properties: Vec<(&str, &dyn ToValue)> = vec![]; if let Some(ref adjustment) = self.adjustment { properties.push(("adjustment", adjustment)); } if let Some(ref icons) = self.icons { properties.push(("icons", icons)); } if let Some(ref value) = self.value { properties.push(("value", value)); } if let Some(ref can_focus) = self.can_focus { properties.push(("can-focus", can_focus)); } if let Some(ref can_target) = self.can_target { properties.push(("can-target", can_target)); } if let Some(ref css_classes) = self.css_classes { properties.push(("css-classes", css_classes)); } if let Some(ref css_name) = self.css_name { properties.push(("css-name", css_name)); } if let Some(ref cursor) = self.cursor { properties.push(("cursor", cursor)); } if let Some(ref focus_on_click) = self.focus_on_click { properties.push(("focus-on-click", focus_on_click)); } if let Some(ref focusable) = self.focusable { properties.push(("focusable", focusable)); } if let Some(ref halign) = self.halign { properties.push(("halign", halign)); } if let Some(ref has_tooltip) = self.has_tooltip { properties.push(("has-tooltip", has_tooltip)); } if let Some(ref height_request) = self.height_request { properties.push(("height-request", height_request)); } if let Some(ref hexpand) = self.hexpand { properties.push(("hexpand", hexpand)); } if let Some(ref hexpand_set) = self.hexpand_set { properties.push(("hexpand-set", hexpand_set)); } if let Some(ref layout_manager) = self.layout_manager { properties.push(("layout-manager", layout_manager)); } if let Some(ref margin_bottom) = self.margin_bottom { properties.push(("margin-bottom", margin_bottom)); } if let Some(ref margin_end) = self.margin_end { properties.push(("margin-end", margin_end)); } if let Some(ref margin_start) = self.margin_start { properties.push(("margin-start", margin_start)); } if let Some(ref margin_top) = self.margin_top { properties.push(("margin-top", margin_top)); } if let Some(ref name) = self.name { properties.push(("name", name)); } if let Some(ref opacity) = self.opacity { properties.push(("opacity", opacity)); } if let Some(ref overflow) = self.overflow { properties.push(("overflow", overflow)); } if let Some(ref receives_default) = self.receives_default { properties.push(("receives-default", receives_default)); } if let Some(ref sensitive) = self.sensitive { properties.push(("sensitive", sensitive)); } if let Some(ref tooltip_markup) = self.tooltip_markup { properties.push(("tooltip-markup", tooltip_markup)); } if let Some(ref tooltip_text) = self.tooltip_text { properties.push(("tooltip-text", tooltip_text)); } if let Some(ref valign) = self.valign { properties.push(("valign", valign)); } if let Some(ref vexpand) = self.vexpand { properties.push(("vexpand", vexpand)); } if let Some(ref vexpand_set) = self.vexpand_set { properties.push(("vexpand-set", vexpand_set)); } if let Some(ref visible) = self.visible { properties.push(("visible", visible)); } if let Some(ref width_request) = self.width_request { properties.push(("width-request", width_request)); } if let Some(ref accessible_role) = self.accessible_role { properties.push(("accessible-role", accessible_role)); } if let Some(ref orientation) = self.orientation { properties.push(("orientation", orientation)); } let ret = glib::Object::new::<ScaleButton>(&properties).expect("object new"); ret } pub fn adjustment<P: IsA<Adjustment>>(mut self, adjustment: &P) -> Self { self.adjustment = Some(adjustment.clone().upcast()); self } pub fn icons(mut self, icons: Vec<String>) -> Self { self.icons = Some(icons); self } pub fn value(mut self, value: f64) -> Self { self.value = Some(value); self } pub fn can_focus(mut self, can_focus: bool) -> Self { self.can_focus = Some(can_focus); self } pub fn can_target(mut self, can_target: bool) -> Self { self.can_target = Some(can_target); self } pub fn css_classes(mut self, css_classes: Vec<String>) -> Self { self.css_classes = Some(css_classes); self } pub fn css_name(mut self, css_name: &str) -> Self { self.css_name = Some(css_name.to_string()); self } pub fn cursor(mut self, cursor: &gdk::Cursor) -> Self { self.cursor = Some(cursor.clone()); self } pub fn focus_on_click(mut self, focus_on_click: bool) -> Self { self.focus_on_click = Some(focus_on_click); self } pub fn focusable(mut self, focusable: bool) -> Self { self.focusable = Some(focusable); self } pub fn halign(mut self, halign: Align) -> Self { self.halign = Some(halign); self } pub fn has_tooltip(mut self, has_tooltip: bool) -> Self { self.has_tooltip = Some(has_tooltip); self } pub fn height_request(mut self, height_request: i32) -> Self { self.height_request = Some(height_request); self } pub fn hexpand(mut self, hexpand: bool) -> Self { self.hexpand = Some(hexpand); self } pub fn hexpand_set(mut self, hexpand_set: bool) -> Self { self.hexpand_set = Some(hexpand_set); self } pub fn layout_manager<P: IsA<LayoutManager>>(mut self, layout_manager: &P) -> Self { self.layout_manager = Some(layout_manager.clone().upcast()); self } pub fn margin_bottom(mut self, margin_bottom: i32) -> Self { self.margin_bottom = Some(margin_bottom); self } pub fn margin_end(mut self, margin_end: i32) -> Self { self.margin_end = Some(margin_end); self } pub fn margin_start(mut self, margin_start: i32) -> Self { self.margin_start = Some(margin_start); self } pub fn margin_top(mut self, margin_top: i32) -> Self { self.margin_top = Some(margin_top); self } pub fn name(mut self, name: &str) -> Self { self.name = Some(name.to_string()); self } pub fn opacity(mut self, opacity: f64) -> Self { self.opacity = Some(opacity); self } pub fn overflow(mut self, overflow: Overflow) -> Self { self.overflow = Some(overflow); self } pub fn receives_default(mut self, receives_default: bool) -> Self { self.receives_default = Some(receives_default); self } pub fn sensitive(mut self, sensitive: bool) -> Self { self.sensitive = Some(sensitive); self } pub fn tooltip_markup(mut self, tooltip_markup: &str) -> Self { self.tooltip_markup = Some(tooltip_markup.to_string()); self } pub fn tooltip_text(mut self, tooltip_text: &str) -> Self { self.tooltip_text = Some(tooltip_text.to_string()); self } pub fn valign(mut self, valign: Align) -> Self { self.valign = Some(valign); self } pub fn vexpand(mut self, vexpand: bool) -> Self { self.vexpand = Some(vexpand); self } pub fn vexpand_set(mut self, vexpand_set: bool) -> Self { self.vexpand_set = Some(vexpand_set); self } pub fn visible(mut self, visible: bool) -> Self { self.visible = Some(visible); self } pub fn width_request(mut self, width_request: i32) -> Self { self.width_request = Some(width_request); self } pub fn accessible_role(mut self, accessible_role: AccessibleRole) -> Self { self.accessible_role = Some(accessible_role); self } pub fn orientation(mut self, orientation: Orientation) -> Self { self.orientation = Some(orientation); self } } pub const NONE_SCALE_BUTTON: Option<&ScaleButton> = None; pub trait ScaleButtonExt: 'static { #[doc(alias = "gtk_scale_button_get_adjustment")] fn adjustment(&self) -> Adjustment; #[doc(alias = "gtk_scale_button_get_minus_button")] fn minus_button(&self) -> Button; #[doc(alias = "gtk_scale_button_get_plus_button")] fn plus_button(&self) -> Button;
#[doc(alias = "gtk_scale_button_get_popup")] fn popup(&self) -> Widget; #[doc(alias = "gtk_scale_button_get_value")] fn value(&self) -> f64; #[doc(alias = "gtk_scale_button_set_adjustment")] fn set_adjustment<P: IsA<Adjustment>>(&self, adjustment: &P); #[doc(alias = "gtk_scale_button_set_icons")] fn set_icons(&self, icons: &[&str]); #[doc(alias = "gtk_scale_button_set_value")] fn set_value(&self, value: f64); #[doc(alias = "get_property_icons")] fn icons(&self) -> Vec<glib::GString>; fn connect_popdown<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn emit_popdown(&self); fn connect_popup<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn emit_popup(&self); fn connect_value_changed<F: Fn(&Self, f64) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_adjustment_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_icons_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_property_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<ScaleButton>> ScaleButtonExt for O { fn adjustment(&self) -> Adjustment { unsafe { from_glib_none(ffi::gtk_scale_button_get_adjustment( self.as_ref().to_glib_none().0, )) } } fn minus_button(&self) -> Button { unsafe { from_glib_none(ffi::gtk_scale_button_get_minus_button( self.as_ref().to_glib_none().0, )) } } fn plus_button(&self) -> Button { unsafe { from_glib_none(ffi::gtk_scale_button_get_plus_button( self.as_ref().to_glib_none().0, )) } } fn popup(&self) -> Widget { unsafe { from_glib_none(ffi::gtk_scale_button_get_popup( self.as_ref().to_glib_none().0, )) } } fn value(&self) -> f64 { unsafe { ffi::gtk_scale_button_get_value(self.as_ref().to_glib_none().0) } } fn set_adjustment<P: IsA<Adjustment>>(&self, adjustment: &P) { unsafe { ffi::gtk_scale_button_set_adjustment( self.as_ref().to_glib_none().0, adjustment.as_ref().to_glib_none().0, ); } } fn set_icons(&self, icons: &[&str]) { unsafe { ffi::gtk_scale_button_set_icons(self.as_ref().to_glib_none().0, icons.to_glib_none().0); } } fn set_value(&self, value: f64) { unsafe { ffi::gtk_scale_button_set_value(self.as_ref().to_glib_none().0, value); } } fn icons(&self) -> Vec<glib::GString> { unsafe { let mut value = glib::Value::from_type(<Vec<glib::GString> as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"icons\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `icons` getter") .unwrap() } } fn connect_popdown<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn popdown_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkScaleButton, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f(&ScaleButton::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"popdown\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( popdown_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn emit_popdown(&self) { let _ = unsafe { glib::Object::from_glib_borrow(self.as_ptr() as *mut glib::gobject_ffi::GObject) .emit_by_name("popdown", &[]) .unwrap() }; } fn connect_popup<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn popup_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkScaleButton, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f(&ScaleButton::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"popup\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( popup_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn emit_popup(&self) { let _ = unsafe { glib::Object::from_glib_borrow(self.as_ptr() as *mut glib::gobject_ffi::GObject) .emit_by_name("popup", &[]) .unwrap() }; } fn connect_value_changed<F: Fn(&Self, f64) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn value_changed_trampoline<P, F: Fn(&P, f64) + 'static>( this: *mut ffi::GtkScaleButton, value: libc::c_double, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f( &ScaleButton::from_glib_borrow(this).unsafe_cast_ref(), value, ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"value-changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( value_changed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_adjustment_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_adjustment_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkScaleButton, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f(&ScaleButton::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::adjustment\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_adjustment_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_icons_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_icons_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkScaleButton, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f(&ScaleButton::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::icons\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_icons_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_property_value_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_value_trampoline<P, F: Fn(&P) + 'static>( this: *mut ffi::GtkScaleButton, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) where P: IsA<ScaleButton>, { let f: &F = &*(f as *const F); f(&ScaleButton::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::value\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_value_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for ScaleButton { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("ScaleButton") } }
getdata.py
import json from allensdk.core.brain_observatory_cache import BrainObservatoryCache def compress_roi(roi): mask = [] for i, row in enumerate(roi): for j, flag in enumerate(row): if flag: mask.append((i, j)) return mask def sample(signal, n):
# Create a brain observatory. manifest_file = './brain_observatory_manifest.json' boc = BrainObservatoryCache(manifest_file=manifest_file) # Select a visual area and a Cre line. visual_area = 'VISal' cre_line ='Cux2-CreERT2' # Extract experiments. exps = boc.get_experiment_containers(targeted_structures=[visual_area], cre_lines=[cre_line]) # Select one experiment. experiment_container_id = 511510736 # Extract a session. session_id = boc.get_ophys_experiments(experiment_container_ids=[experiment_container_id], stimuli=['natural_scenes'])[0]['id'] # Extract the full dataset for that session. dataset = boc.get_ophys_experiment_data(ophys_experiment_id=session_id) # Pull out the max intensity projection. mip = dataset.get_max_projection() # Pull out the ROI masks. rois = dataset.get_roi_mask_array() # Get timestamps and Dff data. ts, dff = dataset.get_dff_traces() # Pull out the stimulus epoch data. stim_epoch = dataset.get_stimulus_epoch_table() # Dump all the data out into data files. with open('mip.json', 'w') as f: f.write(json.dumps(mip.tolist())) with open('rois.json', 'w') as f: f.write(json.dumps(list(map(compress_roi, rois.tolist())))) with open('dff.json', 'w') as f: # f.write(json.dumps(dff.tolist())) f.write(json.dumps(list(map(lambda x: sample(x, 2000), dff.tolist())))) with open('stim_epoch.json', 'w') as f: f.write(stim_epoch.to_json())
size = int(len(signal) / n) extra = len(signal) % size sampled = [] for i in range(n): sampled.append(sum(signal[i*size:(i+1)*size]) / size) if extra > 0: sampled.append(sum(signal[n*size:]) / extra) return sampled
cond_test.go
/* Copyright 2014 The Camlistore Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cond import ( "testing" "camlistore.org/pkg/blob" "camlistore.org/pkg/blobserver" "camlistore.org/pkg/blobserver/replica" "camlistore.org/pkg/blobserver/storagetest" "camlistore.org/pkg/jsonconfig" "camlistore.org/pkg/test" ) func newCond(t *testing.T, ld *test.Loader, config jsonconfig.Obj) *condStorage { sto, err := newFromConfig(ld, config) if err != nil { t.Fatalf("Invalid config: %v", err) } return sto.(*condStorage) } func
(t *testing.T, dst blobserver.Storage, tb *test.Blob) blob.SizedRef { tbRef := tb.BlobRef() sb, err := blobserver.Receive(dst, tbRef, tb.Reader()) if err != nil { t.Fatalf("Receive: %v", err) } if int(sb.Size) != len(tb.Contents) { t.Fatalf("size = %d; want %d", sb.Size, len(tb.Contents)) } if sb.Ref != tbRef { t.Fatal("wrong blob received") } return sb } func TestStorageTest(t *testing.T) { storagetest.Test(t, func(t *testing.T) (_ blobserver.Storage, cleanup func()) { ld := test.NewLoader() s1, _ := ld.GetStorage("/good-schema/") s2, _ := ld.GetStorage("/good-other/") ld.SetStorage("/replica-all/", replica.NewForTest([]blobserver.Storage{s1, s2})) sto := newCond(t, ld, map[string]interface{}{ "write": map[string]interface{}{ "if": "isSchema", "then": "/good-schema/", "else": "/good-other/", }, "read": "/replica-all/", "remove": "/replica-all/", }) return sto, func() {} }) } func TestReceiveIsSchema(t *testing.T) { ld := test.NewLoader() sto := newCond(t, ld, map[string]interface{}{ "write": map[string]interface{}{ "if": "isSchema", "then": "/good-schema/", "else": "/good-other/", }, "read": "/good-other/", }) otherBlob := &test.Blob{Contents: "stuff"} schemaBlob := &test.Blob{Contents: `{"camliVersion": 1, "camliType": "foo"}`} ssb := mustReceive(t, sto, schemaBlob) osb := mustReceive(t, sto, otherBlob) ssto, _ := ld.GetStorage("/good-schema/") osto, _ := ld.GetStorage("/good-other/") if _, err := blobserver.StatBlob(ssto, ssb.Ref); err != nil { t.Errorf("schema blob didn't end up on schema storage") } if _, err := blobserver.StatBlob(osto, osb.Ref); err != nil { t.Errorf("other blob didn't end up on other storage") } }
mustReceive
wallet-hd.py
#!/usr/bin/env python3 # Copyright (c) 2016 The Sikacoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test Hierarchical Deterministic wallet function.""" from test_framework.test_framework import SikacoinTestFramework from test_framework.util import ( assert_equal, connect_nodes_bi, ) import shutil class WalletHDTest(SikacoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 self.extra_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']] def
(self): tmpdir = self.options.tmpdir # Make sure can't switch off usehd after wallet creation self.stop_node(1) self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet') self.start_node(1) connect_nodes_bi(self.nodes, 0, 1) # Make sure we use hd, keep masterkeyid masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid'] assert_equal(len(masterkeyid), 40) # create an internal key change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].validateaddress(change_addr) assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key # Import a non-HD private key in the HD wallet non_hd_add = self.nodes[0].getnewaddress() self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add)) # This should be enough to keep the master key and the non-HD key self.nodes[1].backupwallet(tmpdir + "/hd.bak") #self.nodes[1].dumpwallet(tmpdir + "/hd.dump") # Derive some HD addresses and remember the last # Also send funds to each add self.nodes[0].generate(101) hd_add = None num_hd_adds = 300 for i in range(num_hd_adds): hd_add = self.nodes[1].getnewaddress() hd_info = self.nodes[1].validateaddress(hd_add) assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i+1)+"'") assert_equal(hd_info["hdmasterkeyid"], masterkeyid) self.nodes[0].sendtoaddress(hd_add, 1) self.nodes[0].generate(1) self.nodes[0].sendtoaddress(non_hd_add, 1) self.nodes[0].generate(1) # create an internal key (again) change_addr = self.nodes[1].getrawchangeaddress() change_addrV= self.nodes[1].validateaddress(change_addr) assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key self.sync_all() assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1) self.log.info("Restore backup ...") self.stop_node(1) # we need to delete the complete regtest directory # otherwise node1 would auto-recover all funds in flag the keypool keys as used shutil.rmtree(tmpdir + "/node1/regtest/blocks") shutil.rmtree(tmpdir + "/node1/regtest/chainstate") shutil.copyfile(tmpdir + "/hd.bak", tmpdir + "/node1/regtest/wallet.dat") self.start_node(1) # Assert that derivation is deterministic hd_add_2 = None for _ in range(num_hd_adds): hd_add_2 = self.nodes[1].getnewaddress() hd_info_2 = self.nodes[1].validateaddress(hd_add_2) assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_+1)+"'") assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid) assert_equal(hd_add, hd_add_2) connect_nodes_bi(self.nodes, 0, 1) self.sync_all() # Needs rescan self.stop_node(1) self.start_node(1, extra_args=self.extra_args[1] + ['-rescan']) assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1) # send a tx and make sure its using the internal chain for the changeoutput txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout'] keypath = "" for out in outs: if out['value'] != 1: keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath'] assert_equal(keypath[0:7], "m/0'/1'") if __name__ == '__main__': WalletHDTest().main ()
run_test
test_xml_dataset.py
# Copyright (c) OpenMMLab. All rights reserved. import pytest from mmdet.datasets import DATASETS def test_xml_dataset():
dataconfig = { 'ann_file': 'data/VOCdevkit/VOC2007/ImageSets/Main/test.txt', 'img_prefix': 'data/VOCdevkit/VOC2007/', 'pipeline': [{ 'type': 'LoadImageFromFile' }] } XMLDataset = DATASETS.get('XMLDataset') class XMLDatasetSubClass(XMLDataset): CLASSES = None # get_ann_info and _filter_imgs of XMLDataset # would use self.CLASSES, we added CLASSES not NONE with pytest.raises(AssertionError): XMLDatasetSubClass(**dataconfig)
modify_cluster_app_secret_v5_parameters.go
// Code generated by go-swagger; DO NOT EDIT. package app_secrets // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" strfmt "github.com/go-openapi/strfmt" models "github.com/giantswarm/gsclientgen/v2/models" ) // NewModifyClusterAppSecretV5Params creates a new ModifyClusterAppSecretV5Params object // with the default values initialized. func NewModifyClusterAppSecretV5Params() *ModifyClusterAppSecretV5Params { var () return &ModifyClusterAppSecretV5Params{ timeout: cr.DefaultTimeout, } } // NewModifyClusterAppSecretV5ParamsWithTimeout creates a new ModifyClusterAppSecretV5Params object // with the default values initialized, and the ability to set a timeout on a request func NewModifyClusterAppSecretV5ParamsWithTimeout(timeout time.Duration) *ModifyClusterAppSecretV5Params { var () return &ModifyClusterAppSecretV5Params{ timeout: timeout, } } // NewModifyClusterAppSecretV5ParamsWithContext creates a new ModifyClusterAppSecretV5Params object // with the default values initialized, and the ability to set a context for a request func NewModifyClusterAppSecretV5ParamsWithContext(ctx context.Context) *ModifyClusterAppSecretV5Params { var () return &ModifyClusterAppSecretV5Params{ Context: ctx, } } // NewModifyClusterAppSecretV5ParamsWithHTTPClient creates a new ModifyClusterAppSecretV5Params object // with the default values initialized, and the ability to set a custom HTTPClient for a request func NewModifyClusterAppSecretV5ParamsWithHTTPClient(client *http.Client) *ModifyClusterAppSecretV5Params { var () return &ModifyClusterAppSecretV5Params{ HTTPClient: client, } } /*ModifyClusterAppSecretV5Params contains all the parameters to send to the API endpoint for the modify cluster app secret v5 operation typically these are written to a http.Request */ type ModifyClusterAppSecretV5Params struct { /*XGiantSwarmActivity Name of an activity to track, like "list-clusters". This allows to analyze several API requests sent in context and gives an idea on the purpose. */ XGiantSwarmActivity *string /*XGiantSwarmCmdLine If activity has been issued by a CLI, this header can contain the command line */ XGiantSwarmCmdLine *string /*XRequestID A randomly generated key that can be used to track a request throughout services of Giant Swarm. */ XRequestID *string /*AppName App Name */ AppName string /*Body*/ Body models.V4CreateClusterAppSecretRequest /*ClusterID Cluster ID */ ClusterID string timeout time.Duration Context context.Context HTTPClient *http.Client } // WithTimeout adds the timeout to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithTimeout(timeout time.Duration) *ModifyClusterAppSecretV5Params { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithContext(ctx context.Context) *ModifyClusterAppSecretV5Params { o.SetContext(ctx) return o } // SetContext adds the context to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetContext(ctx context.Context) { o.Context = ctx } // WithHTTPClient adds the HTTPClient to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithHTTPClient(client *http.Client) *ModifyClusterAppSecretV5Params { o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithXGiantSwarmActivity adds the xGiantSwarmActivity to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithXGiantSwarmActivity(xGiantSwarmActivity *string) *ModifyClusterAppSecretV5Params { o.SetXGiantSwarmActivity(xGiantSwarmActivity) return o } // SetXGiantSwarmActivity adds the xGiantSwarmActivity to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetXGiantSwarmActivity(xGiantSwarmActivity *string) { o.XGiantSwarmActivity = xGiantSwarmActivity } // WithXGiantSwarmCmdLine adds the xGiantSwarmCmdLine to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithXGiantSwarmCmdLine(xGiantSwarmCmdLine *string) *ModifyClusterAppSecretV5Params { o.SetXGiantSwarmCmdLine(xGiantSwarmCmdLine) return o } // SetXGiantSwarmCmdLine adds the xGiantSwarmCmdLine to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetXGiantSwarmCmdLine(xGiantSwarmCmdLine *string) { o.XGiantSwarmCmdLine = xGiantSwarmCmdLine } // WithXRequestID adds the xRequestID to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithXRequestID(xRequestID *string) *ModifyClusterAppSecretV5Params { o.SetXRequestID(xRequestID) return o } // SetXRequestID adds the xRequestId to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetXRequestID(xRequestID *string) { o.XRequestID = xRequestID } // WithAppName adds the appName to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithAppName(appName string) *ModifyClusterAppSecretV5Params { o.SetAppName(appName) return o } // SetAppName adds the appName to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetAppName(appName string) { o.AppName = appName } // WithBody adds the body to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithBody(body models.V4CreateClusterAppSecretRequest) *ModifyClusterAppSecretV5Params { o.SetBody(body) return o } // SetBody adds the body to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetBody(body models.V4CreateClusterAppSecretRequest) { o.Body = body } // WithClusterID adds the clusterID to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) WithClusterID(clusterID string) *ModifyClusterAppSecretV5Params { o.SetClusterID(clusterID) return o } // SetClusterID adds the clusterId to the modify cluster app secret v5 params func (o *ModifyClusterAppSecretV5Params) SetClusterID(clusterID string) { o.ClusterID = clusterID } // WriteToRequest writes these params to a swagger request func (o *ModifyClusterAppSecretV5Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error if o.XGiantSwarmActivity != nil { // header param X-Giant-Swarm-Activity if err := r.SetHeaderParam("X-Giant-Swarm-Activity", *o.XGiantSwarmActivity); err != nil { return err } } if o.XGiantSwarmCmdLine != nil { // header param X-Giant-Swarm-CmdLine if err := r.SetHeaderParam("X-Giant-Swarm-CmdLine", *o.XGiantSwarmCmdLine); err != nil { return err } }
if err := r.SetHeaderParam("X-Request-ID", *o.XRequestID); err != nil { return err } } // path param app_name if err := r.SetPathParam("app_name", o.AppName); err != nil { return err } if o.Body != nil { if err := r.SetBodyParam(o.Body); err != nil { return err } } // path param cluster_id if err := r.SetPathParam("cluster_id", o.ClusterID); err != nil { return err } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
if o.XRequestID != nil { // header param X-Request-ID
getPool.go
// *** WARNING: this file was generated by the Pulumi SDK Generator. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package v20200601 import ( "github.com/pulumi/pulumi/sdk/v3/go/pulumi" ) func LookupPool(ctx *pulumi.Context, args *LookupPoolArgs, opts ...pulumi.InvokeOption) (*LookupPoolResult, error) { var rv LookupPoolResult err := ctx.Invoke("azure-native:netapp/v20200601:getPool", args, &rv, opts...) if err != nil { return nil, err } return &rv, nil } type LookupPoolArgs struct { AccountName string `pulumi:"accountName"` PoolName string `pulumi:"poolName"` ResourceGroupName string `pulumi:"resourceGroupName"` } // Capacity pool resource type LookupPoolResult struct { Id string `pulumi:"id"` Location string `pulumi:"location"`
ServiceLevel string `pulumi:"serviceLevel"` Size float64 `pulumi:"size"` Tags map[string]string `pulumi:"tags"` TotalThroughputMibps float64 `pulumi:"totalThroughputMibps"` Type string `pulumi:"type"` UtilizedThroughputMibps float64 `pulumi:"utilizedThroughputMibps"` }
Name string `pulumi:"name"` PoolId string `pulumi:"poolId"` ProvisioningState string `pulumi:"provisioningState"` QosType *string `pulumi:"qosType"`
root.go
// Code generated - DO NOT EDIT. // This file is a generated binding and any manual changes will be lost. package contracts import ( "math/big" "strings" ethereum "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/event" ) // Reference imports to suppress errors if they are not otherwise used. var ( _ = big.NewInt _ = strings.NewReader _ = ethereum.NotFound _ = abi.U256 _ = bind.Bind _ = common.Big1 _ = types.BloomLookup _ = event.NewSubscription ) // RootABI is the input ABI used to generate the binding from. const RootABI = "[{\"constant\":true,\"inputs\":[{\"name\":\"interfaceID\",\"type\":\"bytes4\"}],\"name\":\"supportsInterface\",\"outputs\":[{\"name\":\"\",\"type\":\"bool\"}],\"payable\":false,\"stateMutability\":\"pure\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"input\",\"type\":\"bytes\"},{\"name\":\"proof\",\"type\":\"bytes\"}],\"name\":\"proveAndRegisterDefaultTLD\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"input\",\"type\":\"bytes\"},{\"name\":\"proof\",\"type\":\"bytes\"}],\"name\":\"proveAndRegisterTLD\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"constant\":true,\"inputs\":[],\"name\":\"oracle\",\"outputs\":[{\"name\":\"\",\"type\":\"address\"}],\"payable\":false,\"stateMutability\":\"view\",\"type\":\"function\"},{\"constant\":false,\"inputs\":[{\"name\":\"name\",\"type\":\"bytes\"},{\"name\":\"proof\",\"type\":\"bytes\"}],\"name\":\"registerTLD\",\"outputs\":[],\"payable\":false,\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]" // RootBin is the compiled bytecode used for deploying new contracts. const RootBin = `0x` // DeployRoot deploys a new Ethereum contract, binding an instance of Root to it. func DeployRoot(auth *bind.TransactOpts, backend bind.ContractBackend) (common.Address, *types.Transaction, *Root, error) { parsed, err := abi.JSON(strings.NewReader(RootABI)) if err != nil { return common.Address{}, nil, nil, err } address, tx, contract, err := bind.DeployContract(auth, parsed, common.FromHex(RootBin), backend) if err != nil { return common.Address{}, nil, nil, err } return address, tx, &Root{RootCaller: RootCaller{contract: contract}, RootTransactor: RootTransactor{contract: contract}, RootFilterer: RootFilterer{contract: contract}}, nil } // Root is an auto generated Go binding around an Ethereum contract. type Root struct { RootCaller // Read-only binding to the contract RootTransactor // Write-only binding to the contract RootFilterer // Log filterer for contract events } // RootCaller is an auto generated read-only Go binding around an Ethereum contract. type RootCaller struct { contract *bind.BoundContract // Generic contract wrapper for the low level calls } // RootTransactor is an auto generated write-only Go binding around an Ethereum contract. type RootTransactor struct { contract *bind.BoundContract // Generic contract wrapper for the low level calls } // RootFilterer is an auto generated log filtering Go binding around an Ethereum contract events. type RootFilterer struct { contract *bind.BoundContract // Generic contract wrapper for the low level calls } // RootSession is an auto generated Go binding around an Ethereum contract, // with pre-set call and transact options. type RootSession struct { Contract *Root // Generic contract binding to set the session for CallOpts bind.CallOpts // Call options to use throughout this session TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session } // RootCallerSession is an auto generated read-only Go binding around an Ethereum contract, // with pre-set call options. type RootCallerSession struct { Contract *RootCaller // Generic contract caller binding to set the session for CallOpts bind.CallOpts // Call options to use throughout this session } // RootTransactorSession is an auto generated write-only Go binding around an Ethereum contract, // with pre-set transact options. type RootTransactorSession struct { Contract *RootTransactor // Generic contract transactor binding to set the session for TransactOpts bind.TransactOpts // Transaction auth options to use throughout this session } // RootRaw is an auto generated low-level Go binding around an Ethereum contract. type RootRaw struct { Contract *Root // Generic contract binding to access the raw methods on } // RootCallerRaw is an auto generated low-level read-only Go binding around an Ethereum contract. type RootCallerRaw struct { Contract *RootCaller // Generic read-only contract binding to access the raw methods on } // RootTransactorRaw is an auto generated low-level write-only Go binding around an Ethereum contract. type RootTransactorRaw struct { Contract *RootTransactor // Generic write-only contract binding to access the raw methods on } // NewRoot creates a new instance of Root, bound to a specific deployed contract. func
(address common.Address, backend bind.ContractBackend) (*Root, error) { contract, err := bindRoot(address, backend, backend, backend) if err != nil { return nil, err } return &Root{RootCaller: RootCaller{contract: contract}, RootTransactor: RootTransactor{contract: contract}, RootFilterer: RootFilterer{contract: contract}}, nil } // NewRootCaller creates a new read-only instance of Root, bound to a specific deployed contract. func NewRootCaller(address common.Address, caller bind.ContractCaller) (*RootCaller, error) { contract, err := bindRoot(address, caller, nil, nil) if err != nil { return nil, err } return &RootCaller{contract: contract}, nil } // NewRootTransactor creates a new write-only instance of Root, bound to a specific deployed contract. func NewRootTransactor(address common.Address, transactor bind.ContractTransactor) (*RootTransactor, error) { contract, err := bindRoot(address, nil, transactor, nil) if err != nil { return nil, err } return &RootTransactor{contract: contract}, nil } // NewRootFilterer creates a new log filterer instance of Root, bound to a specific deployed contract. func NewRootFilterer(address common.Address, filterer bind.ContractFilterer) (*RootFilterer, error) { contract, err := bindRoot(address, nil, nil, filterer) if err != nil { return nil, err } return &RootFilterer{contract: contract}, nil } // bindRoot binds a generic wrapper to an already deployed contract. func bindRoot(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) { parsed, err := abi.JSON(strings.NewReader(RootABI)) if err != nil { return nil, err } return bind.NewBoundContract(address, parsed, caller, transactor, filterer), nil } // Call invokes the (constant) contract method with params as input values and // sets the output to result. The result type might be a single field for simple // returns, a slice of interfaces for anonymous returns and a struct for named // returns. func (_Root *RootRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error { return _Root.Contract.RootCaller.contract.Call(opts, result, method, params...) } // Transfer initiates a plain transaction to move funds to the contract, calling // its default method if one is available. func (_Root *RootRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { return _Root.Contract.RootTransactor.contract.Transfer(opts) } // Transact invokes the (paid) contract method with params as input values. func (_Root *RootRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { return _Root.Contract.RootTransactor.contract.Transact(opts, method, params...) } // Call invokes the (constant) contract method with params as input values and // sets the output to result. The result type might be a single field for simple // returns, a slice of interfaces for anonymous returns and a struct for named // returns. func (_Root *RootCallerRaw) Call(opts *bind.CallOpts, result interface{}, method string, params ...interface{}) error { return _Root.Contract.contract.Call(opts, result, method, params...) } // Transfer initiates a plain transaction to move funds to the contract, calling // its default method if one is available. func (_Root *RootTransactorRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { return _Root.Contract.contract.Transfer(opts) } // Transact invokes the (paid) contract method with params as input values. func (_Root *RootTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { return _Root.Contract.contract.Transact(opts, method, params...) } // Oracle is a free data retrieval call binding the contract method 0x7dc0d1d0. // // Solidity: function oracle() constant returns(address) func (_Root *RootCaller) Oracle(opts *bind.CallOpts) (common.Address, error) { var ( ret0 = new(common.Address) ) out := ret0 err := _Root.contract.Call(opts, out, "oracle") return *ret0, err } // Oracle is a free data retrieval call binding the contract method 0x7dc0d1d0. // // Solidity: function oracle() constant returns(address) func (_Root *RootSession) Oracle() (common.Address, error) { return _Root.Contract.Oracle(&_Root.CallOpts) } // Oracle is a free data retrieval call binding the contract method 0x7dc0d1d0. // // Solidity: function oracle() constant returns(address) func (_Root *RootCallerSession) Oracle() (common.Address, error) { return _Root.Contract.Oracle(&_Root.CallOpts) } // SupportsInterface is a free data retrieval call binding the contract method 0x01ffc9a7. // // Solidity: function supportsInterface(bytes4 interfaceID) constant returns(bool) func (_Root *RootCaller) SupportsInterface(opts *bind.CallOpts, interfaceID [4]byte) (bool, error) { var ( ret0 = new(bool) ) out := ret0 err := _Root.contract.Call(opts, out, "supportsInterface", interfaceID) return *ret0, err } // SupportsInterface is a free data retrieval call binding the contract method 0x01ffc9a7. // // Solidity: function supportsInterface(bytes4 interfaceID) constant returns(bool) func (_Root *RootSession) SupportsInterface(interfaceID [4]byte) (bool, error) { return _Root.Contract.SupportsInterface(&_Root.CallOpts, interfaceID) } // SupportsInterface is a free data retrieval call binding the contract method 0x01ffc9a7. // // Solidity: function supportsInterface(bytes4 interfaceID) constant returns(bool) func (_Root *RootCallerSession) SupportsInterface(interfaceID [4]byte) (bool, error) { return _Root.Contract.SupportsInterface(&_Root.CallOpts, interfaceID) } // ProveAndRegisterDefaultTLD is a paid mutator transaction binding the contract method 0x19f5b1e2. // // Solidity: function proveAndRegisterDefaultTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootTransactor) ProveAndRegisterDefaultTLD(opts *bind.TransactOpts, name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.contract.Transact(opts, "proveAndRegisterDefaultTLD", name, input, proof) } // ProveAndRegisterDefaultTLD is a paid mutator transaction binding the contract method 0x19f5b1e2. // // Solidity: function proveAndRegisterDefaultTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootSession) ProveAndRegisterDefaultTLD(name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.ProveAndRegisterDefaultTLD(&_Root.TransactOpts, name, input, proof) } // ProveAndRegisterDefaultTLD is a paid mutator transaction binding the contract method 0x19f5b1e2. // // Solidity: function proveAndRegisterDefaultTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootTransactorSession) ProveAndRegisterDefaultTLD(name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.ProveAndRegisterDefaultTLD(&_Root.TransactOpts, name, input, proof) } // ProveAndRegisterTLD is a paid mutator transaction binding the contract method 0x245b79ad. // // Solidity: function proveAndRegisterTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootTransactor) ProveAndRegisterTLD(opts *bind.TransactOpts, name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.contract.Transact(opts, "proveAndRegisterTLD", name, input, proof) } // ProveAndRegisterTLD is a paid mutator transaction binding the contract method 0x245b79ad. // // Solidity: function proveAndRegisterTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootSession) ProveAndRegisterTLD(name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.ProveAndRegisterTLD(&_Root.TransactOpts, name, input, proof) } // ProveAndRegisterTLD is a paid mutator transaction binding the contract method 0x245b79ad. // // Solidity: function proveAndRegisterTLD(bytes name, bytes input, bytes proof) returns() func (_Root *RootTransactorSession) ProveAndRegisterTLD(name []byte, input []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.ProveAndRegisterTLD(&_Root.TransactOpts, name, input, proof) } // RegisterTLD is a paid mutator transaction binding the contract method 0x87900f20. // // Solidity: function registerTLD(bytes name, bytes proof) returns() func (_Root *RootTransactor) RegisterTLD(opts *bind.TransactOpts, name []byte, proof []byte) (*types.Transaction, error) { return _Root.contract.Transact(opts, "registerTLD", name, proof) } // RegisterTLD is a paid mutator transaction binding the contract method 0x87900f20. // // Solidity: function registerTLD(bytes name, bytes proof) returns() func (_Root *RootSession) RegisterTLD(name []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.RegisterTLD(&_Root.TransactOpts, name, proof) } // RegisterTLD is a paid mutator transaction binding the contract method 0x87900f20. // // Solidity: function registerTLD(bytes name, bytes proof) returns() func (_Root *RootTransactorSession) RegisterTLD(name []byte, proof []byte) (*types.Transaction, error) { return _Root.Contract.RegisterTLD(&_Root.TransactOpts, name, proof) }
NewRoot
0008_auto_20210906_0623.py
# Generated by Django 3.2.7 on 2021-09-06 06:23 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('galleria', '0007_rename_cats_cat'), ]
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=30)), ], ), migrations.AddField( model_name='images', name='location', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='galleria.location'), ), ]
operations = [ migrations.CreateModel( name='Location', fields=[
device.py
# coding: UTF-8 # Copyright 2012 Keita Kita # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Management an instance of MonkeyDevice while integration test. # # If MonkeyDevice tried to create par test method, # MonkeyRunner.waitForConnection blocks for a long time on Android SDK 20.x. from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice device = None def init(): u''' Initialize MonkeyDevice that held by this module. ''' global device if not device: device = MonkeyRunner.waitForConnection(5) def
(): u''' Get MonkeyDevice that held by this module. ''' global device return device
get
vec.rs
use crate::message::{MessageData, MessageDirection}; use crate::{ border::BorderBuilder, brush::Brush, core::{color::Color, math::vec3::Vec3, pool::Handle}, grid::{Column, GridBuilder, Row}, message::{NumericUpDownMessage, UiMessage, UiMessageData, Vec3EditorMessage}, node::UINode, numeric::NumericUpDownBuilder, text::TextBuilder, BuildContext, Control, NodeHandleMapping, Thickness, UserInterface, VerticalAlignment, Widget, WidgetBuilder, }; use std::ops::{Deref, DerefMut}; #[derive(Clone)] pub struct Vec3Editor<M: MessageData, C: Control<M, C>> { widget: Widget<M, C>, x_field: Handle<UINode<M, C>>, y_field: Handle<UINode<M, C>>, z_field: Handle<UINode<M, C>>, value: Vec3, } impl<M: MessageData, C: Control<M, C>> Deref for Vec3Editor<M, C> { type Target = Widget<M, C>; fn deref(&self) -> &Self::Target { &self.widget } } impl<M: MessageData, C: Control<M, C>> DerefMut for Vec3Editor<M, C> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.widget } } impl<M: MessageData, C: Control<M, C>> Control<M, C> for Vec3Editor<M, C> { fn
(&mut self, node_map: &NodeHandleMapping<M, C>) { node_map.resolve(&mut self.x_field); node_map.resolve(&mut self.y_field); node_map.resolve(&mut self.z_field); } fn handle_routed_message( &mut self, ui: &mut UserInterface<M, C>, message: &mut UiMessage<M, C>, ) { self.widget.handle_routed_message(ui, message); match &message.data() { UiMessageData::NumericUpDown(msg) if message.direction() == MessageDirection::FromWidget => { if let NumericUpDownMessage::Value(value) = *msg { if message.destination() == self.x_field { ui.send_message(Vec3EditorMessage::value( self.handle(), MessageDirection::ToWidget, Vec3::new(value, self.value.y, self.value.z), )); } else if message.destination() == self.y_field { ui.send_message(Vec3EditorMessage::value( self.handle(), MessageDirection::ToWidget, Vec3::new(self.value.x, value, self.value.z), )); } else if message.destination() == self.z_field { ui.send_message(Vec3EditorMessage::value( self.handle(), MessageDirection::ToWidget, Vec3::new(self.value.x, self.value.y, value), )); } } } UiMessageData::Vec3Editor(msg) if message.direction() == MessageDirection::ToWidget => { if let Vec3EditorMessage::Value(value) = *msg { let mut changed = false; if self.value.x != value.x { self.value.x = value.x; ui.send_message(NumericUpDownMessage::value( self.x_field, MessageDirection::ToWidget, value.x, )); changed = true; } if self.value.y != value.y { self.value.y = value.y; ui.send_message(NumericUpDownMessage::value( self.y_field, MessageDirection::ToWidget, value.y, )); changed = true; } if self.value.z != value.z { self.value.z = value.z; ui.send_message(NumericUpDownMessage::value( self.z_field, MessageDirection::ToWidget, value.z, )); changed = true; } if changed { ui.send_message(message.reverse()); } } } _ => (), } } } pub struct Vec3EditorBuilder<M: MessageData, C: Control<M, C>> { widget_builder: WidgetBuilder<M, C>, value: Vec3, } pub fn make_numeric_input<M: MessageData, C: Control<M, C>>( ctx: &mut BuildContext<M, C>, column: usize, ) -> Handle<UINode<M, C>> { NumericUpDownBuilder::new( WidgetBuilder::new() .on_row(0) .on_column(column) .with_margin(Thickness { left: 1.0, top: 0.0, right: 1.0, bottom: 0.0, }), ) .build(ctx) } pub fn make_mark<M: MessageData, C: Control<M, C>>( ctx: &mut BuildContext<M, C>, text: &str, column: usize, color: Color, ) -> Handle<UINode<M, C>> { BorderBuilder::new( WidgetBuilder::new() .on_row(0) .on_column(column) .with_background(Brush::Solid(color)) .with_foreground(Brush::Solid(Color::TRANSPARENT)) .with_child( TextBuilder::new(WidgetBuilder::new()) .with_vertical_text_alignment(VerticalAlignment::Center) .with_text(text) .build(ctx), ), ) .build(ctx) } impl<M: MessageData, C: Control<M, C>> Vec3EditorBuilder<M, C> { pub fn new(widget_builder: WidgetBuilder<M, C>) -> Self { Self { widget_builder, value: Default::default(), } } pub fn with_value(mut self, value: Vec3) -> Self { self.value = value; self } pub fn build(self, ctx: &mut BuildContext<M, C>) -> Handle<UINode<M, C>> { let x_field; let y_field; let z_field; let grid = GridBuilder::new( WidgetBuilder::new() .with_child(make_mark(ctx, "X", 0, Color::opaque(120, 0, 0))) .with_child({ x_field = make_numeric_input(ctx, 1); x_field }) .with_child(make_mark(ctx, "Y", 2, Color::opaque(0, 120, 0))) .with_child({ y_field = make_numeric_input(ctx, 3); y_field }) .with_child(make_mark(ctx, "Z", 4, Color::opaque(0, 0, 120))) .with_child({ z_field = make_numeric_input(ctx, 5); z_field }), ) .add_row(Row::stretch()) .add_column(Column::auto()) .add_column(Column::stretch()) .add_column(Column::auto()) .add_column(Column::stretch()) .add_column(Column::auto()) .add_column(Column::stretch()) .build(ctx); let node = Vec3Editor { widget: self.widget_builder.with_child(grid).build(), x_field, y_field, z_field, value: self.value, }; ctx.add_node(UINode::Vec3Editor(node)) } }
resolve
azure.go
// Copyright © 2019 Banzai Cloud // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package main import ( "go.uber.org/cadence/activity" "go.uber.org/cadence/workflow" "github.com/banzaicloud/pipeline/internal/providers/azure/pke" azurepkeworkflow "github.com/banzaicloud/pipeline/internal/providers/azure/pke/workflow" "github.com/banzaicloud/pipeline/internal/providers/pke/pkeworkflow" "github.com/banzaicloud/pipeline/internal/providers/pke/pkeworkflow/pkeworkflowadapter" ) func r
secretStore pkeworkflow.SecretStore, tokenGenerator pkeworkflowadapter.TokenGenerator, store pke.ClusterStore) { // Azure PKE workflow.RegisterWithOptions(azurepkeworkflow.CreateClusterWorkflow, workflow.RegisterOptions{Name: azurepkeworkflow.CreateClusterWorkflowName}) workflow.RegisterWithOptions(azurepkeworkflow.CreateInfrastructureWorkflow, workflow.RegisterOptions{Name: azurepkeworkflow.CreateInfraWorkflowName}) workflow.RegisterWithOptions(azurepkeworkflow.DeleteClusterWorkflow, workflow.RegisterOptions{Name: azurepkeworkflow.DeleteClusterWorkflowName}) workflow.RegisterWithOptions(azurepkeworkflow.DeleteInfrastructureWorkflow, workflow.RegisterOptions{Name: azurepkeworkflow.DeleteInfraWorkflowName}) workflow.RegisterWithOptions(azurepkeworkflow.UpdateClusterWorkflow, workflow.RegisterOptions{Name: azurepkeworkflow.UpdateClusterWorkflowName}) azureClientFactory := azurepkeworkflow.NewAzureClientFactory(secretStore) createVnetActivity := azurepkeworkflow.MakeCreateVnetActivity(azureClientFactory) activity.RegisterWithOptions(createVnetActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateVnetActivityName}) createNSGActivity := azurepkeworkflow.MakeCreateNSGActivity(azureClientFactory) activity.RegisterWithOptions(createNSGActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateNSGActivityName}) createLBActivity := azurepkeworkflow.MakeCreateLoadBalancerActivity(azureClientFactory) activity.RegisterWithOptions(createLBActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateLoadBalancerActivityName}) createVMSSActivity := azurepkeworkflow.MakeCreateVMSSActivity(azureClientFactory, tokenGenerator) activity.RegisterWithOptions(createVMSSActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateVMSSActivityName}) createRouteTableActivity := azurepkeworkflow.MakeCreateRouteTableActivity(azureClientFactory) activity.RegisterWithOptions(createRouteTableActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateRouteTableActivityName}) assignRoleActivity := azurepkeworkflow.MakeAssignRoleActivity(azureClientFactory) activity.RegisterWithOptions(assignRoleActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.AssignRoleActivityName}) createPublicIPActivity := azurepkeworkflow.MakeCreatePublicIPActivity(azureClientFactory) activity.RegisterWithOptions(createPublicIPActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreatePublicIPActivityName}) // delete infra activities deleteVMSSActivity := azurepkeworkflow.MakeDeleteVMSSActivity(azureClientFactory) activity.RegisterWithOptions(deleteVMSSActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteVMSSActivityName}) deleteLoadBalancerActivity := azurepkeworkflow.MakeDeleteLoadBalancerActivity(azureClientFactory) activity.RegisterWithOptions(deleteLoadBalancerActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteLoadBalancerActivityName}) deletePublicIPActivity := azurepkeworkflow.MakeDeletePublicIPActivity(azureClientFactory) activity.RegisterWithOptions(deletePublicIPActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeletePublicIPActivityName}) deleteVNetActivity := azurepkeworkflow.MakeDeleteVNetActivity(azureClientFactory) activity.RegisterWithOptions(deleteVNetActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteVNetActivityName}) deleteRouteTableActivity := azurepkeworkflow.MakeDeleteRouteTableActivity(azureClientFactory) activity.RegisterWithOptions(deleteRouteTableActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteRouteTableActivityName}) deleteNSGActivity := azurepkeworkflow.MakeDeleteNSGActivity(azureClientFactory) activity.RegisterWithOptions(deleteNSGActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteNSGActivityName}) deleteClusterFromStoreActivity := azurepkeworkflow.MakeDeleteClusterFromStoreActivity(store) activity.RegisterWithOptions(deleteClusterFromStoreActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteClusterFromStoreActivityName}) setClusterStatusActivity := azurepkeworkflow.MakeSetClusterStatusActivity(store) activity.RegisterWithOptions(setClusterStatusActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.SetClusterStatusActivityName}) updateVMSSActivity := azurepkeworkflow.MakeUpdateVMSSActivity(azureClientFactory) activity.RegisterWithOptions(updateVMSSActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.UpdateVMSSActivityName}) createSubnetActivity := azurepkeworkflow.MakeCreateSubnetActivity(azureClientFactory) activity.RegisterWithOptions(createSubnetActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CreateSubnetActivityName}) deleteNodePoolFromStoreActivity := azurepkeworkflow.MakeDeleteNodePoolFromStoreActivity(store) activity.RegisterWithOptions(deleteNodePoolFromStoreActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteNodePoolFromStoreActivityName}) deleteSubnetActivity := azurepkeworkflow.MakeDeleteSubnetActivity(azureClientFactory) activity.RegisterWithOptions(deleteSubnetActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.DeleteSubnetActivityName}) collectUpdateClusterProvidersActivity := azurepkeworkflow.MakeCollectUpdateClusterProvidersActivity(azureClientFactory) activity.RegisterWithOptions(collectUpdateClusterProvidersActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.CollectUpdateClusterProvidersActivityName}) updateClusterAccessPointsActivity := azurepkeworkflow.MakeUpdateClusterAccessPointsActivity(store) activity.RegisterWithOptions(updateClusterAccessPointsActivity.Execute, activity.RegisterOptions{Name: azurepkeworkflow.UpdateClusterAccessPointsActivityName}) }
egisterAzureWorkflows(
_file_shares_operations.py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class FileSharesOperations: """FileSharesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.storage.v2019_06_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list( self, resource_group_name: str, account_name: str, maxpagesize: Optional[str] = None, filter: Optional[str] = None, expand: Optional[str] = "deleted", **kwargs ) -> AsyncIterable["_models.FileShareItems"]: """Lists all shares. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param maxpagesize: Optional. Specified maximum number of shares that can be included in the list. :type maxpagesize: str :param filter: Optional. When specified, only share names starting with the filter will be listed. :type filter: str :param expand: Optional, used to expand the properties within share's properties. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either FileShareItems or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.storage.v2019_06_01.models.FileShareItems] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShareItems"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if maxpagesize is not None: query_parameters['$maxpagesize'] = self._serialize.query("maxpagesize", maxpagesize, 'str') if filter is not None: query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response):
async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares'} # type: ignore async def create( self, resource_group_name: str, account_name: str, share_name: str, file_share: "_models.FileShare", **kwargs ) -> "_models.FileShare": """Creates a new share under the specified account as described by request body. The share resource includes metadata and properties for that share. It does not include a list of the files contained by the share. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param share_name: The name of the file share within the specified storage account. File share names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number. :type share_name: str :param file_share: Properties of the file share to create. :type file_share: ~azure.mgmt.storage.v2019_06_01.models.FileShare :keyword callable cls: A custom type or function that will be passed the direct response :return: FileShare, or the result of cls(response) :rtype: ~azure.mgmt.storage.v2019_06_01.models.FileShare :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(file_share, 'FileShare') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('FileShare', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('FileShare', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore async def update( self, resource_group_name: str, account_name: str, share_name: str, file_share: "_models.FileShare", **kwargs ) -> "_models.FileShare": """Updates share properties as specified in request body. Properties not mentioned in the request will not be changed. Update fails if the specified share does not already exist. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param share_name: The name of the file share within the specified storage account. File share names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number. :type share_name: str :param file_share: Properties to update for the file share. :type file_share: ~azure.mgmt.storage.v2019_06_01.models.FileShare :keyword callable cls: A custom type or function that will be passed the direct response :return: FileShare, or the result of cls(response) :rtype: ~azure.mgmt.storage.v2019_06_01.models.FileShare :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(file_share, 'FileShare') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('FileShare', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore async def get( self, resource_group_name: str, account_name: str, share_name: str, expand: Optional[str] = "stats", **kwargs ) -> "_models.FileShare": """Gets properties of a specified share. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param share_name: The name of the file share within the specified storage account. File share names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number. :type share_name: str :param expand: Optional, used to expand the properties within share's properties. :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: FileShare, or the result of cls(response) :rtype: ~azure.mgmt.storage.v2019_06_01.models.FileShare :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.FileShare"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') if expand is not None: query_parameters['$expand'] = self._serialize.query("expand", expand, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('FileShare', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore async def delete( self, resource_group_name: str, account_name: str, share_name: str, **kwargs ) -> None: """Deletes specified share under its account. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param share_name: The name of the file share within the specified storage account. File share names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number. :type share_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}'} # type: ignore async def restore( self, resource_group_name: str, account_name: str, share_name: str, deleted_share: "_models.DeletedShare", **kwargs ) -> None: """Restore a file share within a valid retention days if share soft delete is enabled. :param resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive. :type resource_group_name: str :param account_name: The name of the storage account within the specified resource group. Storage account names must be between 3 and 24 characters in length and use numbers and lower- case letters only. :type account_name: str :param share_name: The name of the file share within the specified storage account. File share names must be between 3 and 63 characters in length and use numbers, lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and followed by a letter or number. :type share_name: str :param deleted_share: :type deleted_share: ~azure.mgmt.storage.v2019_06_01.models.DeletedShare :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-06-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.restore.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), 'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3), 'shareName': self._serialize.url("share_name", share_name, 'str', max_length=63, min_length=3), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(deleted_share, 'DeletedShare') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) restore.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/fileServices/default/shares/{shareName}/restore'} # type: ignore
deserialized = self._deserialize('FileShareItems', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem)
index.js
import template from './sw-first-run-wizard-data-import.html.twig'; import './sw-first-run-wizard-data-import.scss'; const { Criteria } = Shopware.Data; const { Component } = Shopware; Component.register('sw-first-run-wizard-data-import', { template, inject: [ 'extensionStoreActionService', 'repositoryFactory', ], data() { return { plugins: { demodata: { name: 'SwagPlatformDemoData', isInstalled: false, }, migration: { name: 'SwagMigrationAssistant', isInstalled: false, }, }, demoDataPluginName: 'SwagPlatformDemoData', migrationPluginName: 'SwagMigrationAssistant', isPluginAlreadyInstalled: false, isInstallingPlugin: false, installationError: false, pluginError: null, pluginInstalledSuccessfully: { demodata: false, migration: false, }, }; }, computed: { pluginRepository() { return this.repositoryFactory.create('plugin'); }, buttonConfig() { return [ { key: 'skip', label: this.$tc('sw-first-run-wizard.general.buttonNext'), position: 'right', variant: 'primary', action: 'sw.first.run.wizard.index.defaults', disabled: this.isInstallingPlugin, }, ]; }, }, watch: { isInstallingPlugin() { this.updateButtons(); }, }, created() { this.createdComponent(); }, methods: { createdComponent() { this.updateButtons(); this.setTitle(); this.getInstalledPlugins(); }, updateButtons() { this.$emit('buttons-update', this.buttonConfig); }, setTitle() { this.$emit('frw-set-title', this.$tc('sw-first-run-wizard.dataImport.modalTitle')); }, notInstalled(pluginKey) { return !this.plugins[pluginKey].isInstalled; }, onInstall(pluginKey) { const plugin = this.plugins[pluginKey]; this.isInstallingPlugin = true; this.installationError = false; return this.extensionStoreActionService.downloadExtension(plugin.name) .then(() => { return this.extensionStoreActionService.installExtension(plugin.name, 'plugin'); }) .then(() => { return this.extensionStoreActionService.activateExtension(plugin.name, 'plugin'); }) .then(() => { this.isInstallingPlugin = false; this.plugins[pluginKey].isInstalled = true; return false; }) .catch((error) => { this.isInstallingPlugin = false; this.installationError = true; if (error.response?.data?.errors) { this.pluginError = error.response.data.errors.pop(); } return true; }); }, getInstalledPlugins() { const pluginNames = Object.values(this.plugins).map(plugin => plugin.name); const pluginCriteria = new Criteria(1, 5); pluginCriteria .addFilter( Criteria.equalsAny('plugin.name', pluginNames), ); this.pluginRepository.search(pluginCriteria) .then((result) => { if (result.total < 1) { return; } result.forEach((plugin) => { if (!plugin.active || plugin.installedAt === null) { return; } const key = this.findPluginKeyByName(plugin.name); this.plugins[key].isInstalled = true; }); }); }, findPluginKeyByName(name) { const [pluginKey] = Object.entries(this.plugins).find(([key, state]) => { if (state.name === name) { return key; } return ''; }); return pluginKey; },
}, });
eventserver.py
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = 'Benjamin Milde' import flask import redis import os import json import bs4 import bridge import codecs import datetime from werkzeug.serving import WSGIRequestHandler base_path = os.getcwd() + '/' print "base_path:",base_path app = flask.Flask(__name__) app.secret_key = 'asdf' app._static_folder = base_path app._static_files_root_folder_path = base_path red = redis.StrictRedis() long_poll_timeout = 0.5 long_poll_timeout_burst = 0.08 ambient_server_channel = 'ambient' relevant_event_generator_channel = 'ambient_transcript_only' return_string_ok = "ok" kc = bridge.KeywordClient() session_outfile = None #Send event to the event stream def event_stream(): print "New connection to event_stream!" pubsub = red.pubsub() pubsub.subscribe(ambient_server_channel) for message in pubsub.listen(): print 'New message:', message yield 'data: %s\n\n' % message['data'] #Event stream end point for the browser, connection is left open. Must be used with threaded Flask. @app.route('/stream') def stream(): return flask.Response(event_stream(), mimetype="text/event-stream") #Traditional long polling. This is the fall back, if a browser does not support server side events. TODO: test and handle disconnects @app.route('/stream_poll') def poll(): pubsub = red.pubsub() pubsub.subscribe(ambient_server_channel) message = pubsub.get_message(timeout=long_poll_timeout) while(message != None): yield message message = pubsub.get_message(timeout=long_poll_timeout_burst) @app.route('/closed', methods=['POST']) def closed(): received_json = flask.request.json print "closed called" print received_json data = {'handle':'closed', 'entry_id':received_json['entry_id']} red.publish(relevant_event_generator_channel, json.dumps(data)) session_outfile.write('closed ' + json.dumps(received_json) + '\n') return return_string_ok @app.route('/starred', methods=['POST']) def starred(): received_json = flask.request.json print "starred called" print received_json session_outfile.write('starred ' + json.dumps(received_json) + '\n') return return_string_ok @app.route('/unstarred', methods=['POST']) def unstarred(): received_json = flask.request.json print "unstarred called" print received_json session_outfile.write('unstarred ' + json.dumps(received_json) + '\n') return return_string_ok @app.route('/viewing', methods=['POST']) def viewing(): received_json = flask.request.json print "viewing called" print received_json session_outfile.write('viewing ' + json.dumps(received_json) + '\n') return return_string_ok @app.route('/viewingClosed', methods=['POST']) def viewingClosed(): received_json = flask.request.json print "viewingClosed called" print received_json session_outfile.write('viewingClosed ' + json.dumps(received_json) + '\n') return return_string_ok @app.route('/reset', methods=['GET']) def
(): print "Reset called from browser" #reset local timer in keyword client kc.resetTimer() data = {'handle':'reset'} red.publish(relevant_event_generator_channel, json.dumps(data)) new_session_outfile() return return_string_ok #These are now replaced by using redis and message passing. Do we still need them? @app.route('/addUtterance', methods=['POST']) @app.route('/replaceLastUtterance', methods=['POST']) @app.route('/addRelevantEntry', methods=['POST']) @app.route('/delRelevantEntry', methods=['POST']) @app.route('/reset', methods=['POST']) def generate_event(): received_json = flask.request.json print received_json red.publish(ambient_server_channel, json.dumps(received_json)) return return_string_ok # This is for the flow player, which can send its subtitles # They look like this: "<p>Guten Abend</p><br/><p>aus dem Gasometer in Berlin.</p><br/>" # We use beautiful Soup to strip all elements (see: http://stackoverflow.com/questions/1936466/beautifulsoup-grab-visible-webpage-text) @app.route('/addSubtitle', methods=['POST']) def add_subtitle(): received_json = flask.request.json print received_json soup = bs4.BeautifulSoup(received_json['text']) text = u' '.join(soup.findAll(text=True)) kc.addUtterance(utterance='',speaker='TV') kc.replaceLastUtterance(new_utterance=text,old_utterance='',speaker='TV') kc.completeUtterance(utterance=text,speaker='TV') return return_string_ok # TODO #@app.route('/reset_topics', methods=['POST']) #These should ideally be served with a real web server, but for developping purposes, serving static files with Flask is also ok: #START static files @app.route('/') def root(): print 'root called' return app.send_static_file('index.html') @app.route('/flow_player') def flow_player(): print 'flow player called' return app.send_static_file('flow_player.html') @app.route('/foo.mp4') def foo1(): return app.send_static_file('foo.mp4') @app.route('/foo.vtt') def foo2(): return app.send_static_file('foo.vtt') @app.route('/css/<path:path>') def send_css(path): return flask.send_from_directory(base_path+'css', path) @app.route('/js/<path:path>') def send_js(path): return flask.send_from_directory(base_path+'js', path) @app.route('/pics/<path:path>') def send_pics(path): return flask.send_from_directory(base_path+'pics', path) @app.route('/fonts/<path:path>') def send_fonts(path): return flask.send_from_directory(base_path+'fonts', path) @app.route('/test_videos/<path:path>') def test_videos(path): print 'Sending test video:', path return flask.send_from_directory(base_path+'test_videos', path) #END static files def new_session_outfile(): global session_outfile if session_outfile is not None: session_outfile.close() session_outfile = codecs.open('sessions/' + str(datetime.datetime.now()).replace('-',' ').replace(':','_').replace(' ','_') + '.txt','w','utf-8') if __name__ == '__main__': new_session_outfile() app.debug = True WSGIRequestHandler.protocol_version = "HTTP/1.1" app.run(threaded=True)
reset
test_test_utils.py
from __future__ import print_function, division
from scantree.test_utils import assert_dir_entry_equal from scantree import DirEntryReplacement class MockStat(object): def __init__(self, st_ino=None): self.st_ino = st_ino class TestAssertDirEntryEqual(object): def get_mock_dir_entry(self): de = DirEntryReplacement(path='/path/to/mock', name='mock') de._is_dir = True de._is_file = False de._is_symlink = False de._stat_sym = MockStat(1) de._stat_nosym = MockStat(0) return de def test_equal(self): de = self.get_mock_dir_entry() assert_dir_entry_equal(de, de) @pytest.mark.parametrize( 'kwargs', [ {'path': 'other/path'}, {'name': 'other_name'}, {'_is_dir': False}, {'_is_file': True}, {'_is_symlink': True}, {'_stat_sym': MockStat(11)}, {'_stat_nosym': MockStat(22)}, ] ) def test_not_equal(self, kwargs): de = self.get_mock_dir_entry() de_different = attr.evolve(de) for k, v in kwargs.items(): setattr(de_different, k, v) with pytest.raises(AssertionError): assert_dir_entry_equal(de, de_different)
import pytest import attr
test_joins.py
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message import operator from sqlalchemy import * from sqlalchemy import exc as sa_exc, util from sqlalchemy.sql import compiler, table, column from sqlalchemy.engine import default from sqlalchemy.orm import * from sqlalchemy.orm import attributes from sqlalchemy.testing import eq_ import sqlalchemy as sa from sqlalchemy import testing from sqlalchemy.testing import AssertsCompiledSQL, engines from sqlalchemy.testing.schema import Column from test.orm import _fixtures from sqlalchemy.testing import fixtures from sqlalchemy.orm.util import join, outerjoin, with_parent class QueryTest(_fixtures.FixtureTest): run_setup_mappers = 'once' run_inserts = 'once' run_deletes = None @classmethod def setup_mappers(cls): Node, composite_pk_table, users, Keyword, items, Dingaling, \ order_items, item_keywords, Item, User, dingalings, \ Address, keywords, CompositePk, nodes, Order, orders, \ addresses = cls.classes.Node, \ cls.tables.composite_pk_table, cls.tables.users, \ cls.classes.Keyword, cls.tables.items, \ cls.classes.Dingaling, cls.tables.order_items, \ cls.tables.item_keywords, cls.classes.Item, \ cls.classes.User, cls.tables.dingalings, \ cls.classes.Address, cls.tables.keywords, \ cls.classes.CompositePk, cls.tables.nodes, \ cls.classes.Order, cls.tables.orders, cls.tables.addresses mapper(User, users, properties={ 'addresses': relationship(Address, backref='user', order_by=addresses.c.id), # o2m, m2o 'orders': relationship(Order, backref='user', order_by=orders.c.id) }) mapper(Address, addresses, properties={ # o2o 'dingaling': relationship(Dingaling, uselist=False, backref="address") }) mapper(Dingaling, dingalings) mapper(Order, orders, properties={ # m2m 'items': relationship(Item, secondary=order_items, order_by=items.c.id), 'address': relationship(Address), # m2o }) mapper(Item, items, properties={ 'keywords': relationship(Keyword, secondary=item_keywords) # m2m }) mapper(Keyword, keywords) mapper(Node, nodes, properties={ 'children': relationship(Node, backref=backref( 'parent', remote_side=[nodes.c.id])) }) mapper(CompositePk, composite_pk_table) configure_mappers() class InheritedJoinTest(fixtures.MappedTest, AssertsCompiledSQL): run_setup_mappers = 'once' @classmethod def define_tables(cls, metadata): Table('companies', metadata, Column('company_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50))) Table('people', metadata, Column('person_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('company_id', Integer, ForeignKey('companies.company_id')), Column('name', String(50)), Column('type', String(30))) Table('engineers', metadata, Column('person_id', Integer, ForeignKey( 'people.person_id'), primary_key=True), Column('status', String(30)), Column('engineer_name', String(50)), Column('primary_language', String(50))) Table('machines', metadata, Column('machine_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(50)), Column('engineer_id', Integer, ForeignKey('engineers.person_id'))) Table('managers', metadata, Column('person_id', Integer, ForeignKey( 'people.person_id'), primary_key=True), Column('status', String(30)), Column('manager_name', String(50))) Table('boss', metadata, Column('boss_id', Integer, ForeignKey( 'managers.person_id'), primary_key=True), Column('golf_swing', String(30)), ) Table('paperwork', metadata, Column('paperwork_id', Integer, primary_key=True, test_needs_autoincrement=True), Column('description', String(50)), Column('person_id', Integer, ForeignKey('people.person_id'))) @classmethod def setup_classes(cls): paperwork, people, companies, boss, managers, machines, engineers = ( cls.tables.paperwork, cls.tables.people, cls.tables.companies, cls.tables.boss, cls.tables.managers, cls.tables.machines, cls.tables.engineers) class Company(cls.Comparable): pass class Person(cls.Comparable): pass class Engineer(Person): pass class Manager(Person): pass class Boss(Manager): pass class Machine(cls.Comparable): pass class Paperwork(cls.Comparable): pass mapper(Company, companies, properties={ 'employees': relationship(Person, order_by=people.c.person_id) }) mapper(Machine, machines) mapper(Person, people, polymorphic_on=people.c.type, polymorphic_identity='person', properties={ 'paperwork': relationship(Paperwork, order_by=paperwork.c.paperwork_id) }) mapper(Engineer, engineers, inherits=Person, polymorphic_identity='engineer', properties={'machines': relationship( Machine, order_by=machines.c.machine_id)}) mapper(Manager, managers, inherits=Person, polymorphic_identity='manager') mapper(Boss, boss, inherits=Manager, polymorphic_identity='boss') mapper(Paperwork, paperwork) def test_single_prop(self): Company = self.classes.Company sess = create_session() self.assert_compile( sess.query(Company).join(Company.employees), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN people " "ON companies.company_id = people.company_id", use_default_dialect=True) def test_force_via_select_from(self): Company, Engineer = self.classes.Company, self.classes.Engineer sess = create_session() self.assert_compile( sess.query(Company) .filter(Company.company_id == Engineer.company_id) .filter(Engineer.primary_language == 'java'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies, people, engineers " "WHERE companies.company_id = people.company_id " "AND engineers.primary_language " "= :primary_language_1", use_default_dialect=True) self.assert_compile( sess.query(Company).select_from(Company, Engineer) .filter(Company.company_id == Engineer.company_id) .filter(Engineer.primary_language == 'java'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies, people JOIN engineers " "ON people.person_id = engineers.person_id " "WHERE companies.company_id = people.company_id " "AND engineers.primary_language =" " :primary_language_1", use_default_dialect=True) def test_single_prop_of_type(self): Company, Engineer = self.classes.Company, self.classes.Engineer sess = create_session() self.assert_compile( sess.query(Company).join(Company.employees.of_type(Engineer)), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN " "(people JOIN engineers " "ON people.person_id = engineers.person_id) " "ON companies.company_id = people.company_id", use_default_dialect=True) def test_prop_with_polymorphic_1(self): Person, Manager, Paperwork = (self.classes.Person, self.classes.Manager, self.classes.Paperwork) sess = create_session() self.assert_compile( sess.query(Person).with_polymorphic(Manager). order_by(Person.person_id).join('paperwork') .filter(Paperwork.description.like('%review%')), "SELECT people.person_id AS people_person_id, people.company_id AS" " people_company_id, " "people.name AS people_name, people.type AS people_type, " "managers.person_id AS managers_person_id, " "managers.status AS managers_status, managers.manager_name AS " "managers_manager_name FROM people " "LEFT OUTER JOIN managers " "ON people.person_id = managers.person_id " "JOIN paperwork " "ON people.person_id = paperwork.person_id " "WHERE paperwork.description LIKE :description_1 " "ORDER BY people.person_id", use_default_dialect=True) def test_prop_with_polymorphic_2(self): Person, Manager, Paperwork = (self.classes.Person, self.classes.Manager, self.classes.Paperwork) sess = create_session() self.assert_compile( sess.query(Person).with_polymorphic(Manager). order_by(Person.person_id).join('paperwork', aliased=True) .filter(Paperwork.description.like('%review%')), "SELECT people.person_id AS people_person_id, " "people.company_id AS people_company_id, " "people.name AS people_name, people.type AS people_type, " "managers.person_id AS managers_person_id, " "managers.status AS managers_status, " "managers.manager_name AS managers_manager_name " "FROM people LEFT OUTER JOIN managers " "ON people.person_id = managers.person_id " "JOIN paperwork AS paperwork_1 " "ON people.person_id = paperwork_1.person_id " "WHERE paperwork_1.description " "LIKE :description_1 ORDER BY people.person_id", use_default_dialect=True) def test_explicit_polymorphic_join_one(self): Company, Engineer = self.classes.Company, self.classes.Engineer sess = create_session() self.assert_compile( sess.query(Company).join(Engineer) .filter(Engineer.engineer_name == 'vlad'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN (people JOIN engineers " "ON people.person_id = engineers.person_id) " "ON " "companies.company_id = people.company_id " "WHERE engineers.engineer_name = :engineer_name_1", use_default_dialect=True) def test_explicit_polymorphic_join_two(self): Company, Engineer = self.classes.Company, self.classes.Engineer sess = create_session() self.assert_compile( sess.query(Company) .join(Engineer, Company.company_id == Engineer.company_id) .filter(Engineer.engineer_name == 'vlad'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN " "(people JOIN engineers " "ON people.person_id = engineers.person_id) " "ON " "companies.company_id = people.company_id " "WHERE engineers.engineer_name = :engineer_name_1", use_default_dialect=True) def test_multiple_adaption(self): """test that multiple filter() adapters get chained together " and work correctly within a multiple-entry join().""" people, Company, Machine, engineers, machines, Engineer = ( self.tables.people, self.classes.Company, self.classes.Machine, self.tables.engineers, self.tables.machines, self.classes.Engineer) sess = create_session() self.assert_compile( sess.query(Company) .join(people.join(engineers), Company.employees) .filter(Engineer.name == 'dilbert'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN (people " "JOIN engineers ON people.person_id = " "engineers.person_id) ON companies.company_id = " "people.company_id WHERE people.name = :name_1", use_default_dialect=True ) mach_alias = machines.select() self.assert_compile( sess.query(Company).join(people.join(engineers), Company.employees) .join(mach_alias, Engineer.machines, from_joinpoint=True). filter(Engineer.name == 'dilbert').filter(Machine.name == 'foo'), "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name " "FROM companies JOIN (people " "JOIN engineers ON people.person_id = " "engineers.person_id) ON companies.company_id = " "people.company_id JOIN " "(SELECT machines.machine_id AS machine_id, " "machines.name AS name, " "machines.engineer_id AS engineer_id " "FROM machines) AS anon_1 " "ON engineers.person_id = anon_1.engineer_id " "WHERE people.name = :name_1 AND anon_1.name = :name_2", use_default_dialect=True ) def test_auto_aliasing_multi_link(self): # test [ticket:2903] sess = create_session() Company, Engineer, Manager, Boss = self.classes.Company, \ self.classes.Engineer, \ self.classes.Manager, self.classes.Boss q = sess.query(Company).\ join(Company.employees.of_type(Engineer)).\ join(Company.employees.of_type(Manager)).\ join(Company.employees.of_type(Boss)) self.assert_compile( q, "SELECT companies.company_id AS companies_company_id, " "companies.name AS companies_name FROM companies " "JOIN (people JOIN engineers " "ON people.person_id = engineers.person_id) " "ON companies.company_id = people.company_id " "JOIN (people AS people_1 JOIN managers AS managers_1 " "ON people_1.person_id = managers_1.person_id) " "ON companies.company_id = people_1.company_id " "JOIN (people AS people_2 JOIN managers AS managers_2 " "ON people_2.person_id = managers_2.person_id JOIN boss AS boss_1 " "ON managers_2.person_id = boss_1.boss_id) " "ON companies.company_id = people_2.company_id", use_default_dialect=True) class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL): __dialect__ = 'default' @classmethod def setup_mappers(cls): User = cls.classes.User Address = cls.classes.Address users, addresses = (cls.tables.users, cls.tables.addresses) mapper(User, users, properties={ 'addresses': relationship(Address), 'ad_syn': synonym("addresses") }) mapper(Address, addresses) def test_join_on_synonym(self): User = self.classes.User self.assert_compile( Session().query(User).join(User.ad_syn), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN addresses ON users.id = addresses.user_id" ) class JoinTest(QueryTest, AssertsCompiledSQL): __dialect__ = 'default' def test_single_name(self): User = self.classes.User sess = create_session() self.assert_compile( sess.query(User).join("orders"), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id" ) assert_raises( sa_exc.InvalidRequestError, sess.query(User).join, "user", ) self.assert_compile( sess.query(User).join("orders", "items"), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders ON users.id = orders.user_id " "JOIN order_items AS order_items_1 " "ON orders.id = order_items_1.order_id JOIN items " "ON items.id = order_items_1.item_id" ) # test overlapping paths. User->orders is used by both joins, but # rendered once. self.assert_compile( sess.query(User).join("orders", "items").join( "orders", "address"), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders " "ON users.id = orders.user_id " "JOIN order_items AS order_items_1 " "ON orders.id = order_items_1.order_id " "JOIN items ON items.id = order_items_1.item_id JOIN addresses " "ON addresses.id = orders.address_id") def test_invalid_kwarg_join(self): User = self.classes.User sess = create_session() assert_raises_message( TypeError, "unknown arguments: bar, foob", sess.query(User).join, "address", foob="bar", bar="bat" ) assert_raises_message( TypeError, "unknown arguments: bar, foob", sess.query(User).outerjoin, "address", foob="bar", bar="bat" ) def test_left_w_no_entity(self): User = self.classes.User Address = self.classes.Address sess = create_session() self.assert_compile( sess.query(User, literal_column('x'), ).join(Address), "SELECT users.id AS users_id, users.name AS users_name, x " "FROM users JOIN addresses ON users.id = addresses.user_id" ) self.assert_compile( sess.query(literal_column('x'), User).join(Address), "SELECT x, users.id AS users_id, users.name AS users_name " "FROM users JOIN addresses ON users.id = addresses.user_id" ) def test_left_is_none_and_query_has_no_entities(self): User = self.classes.User Address = self.classes.Address sess = create_session() assert_raises_message( sa_exc.InvalidRequestError, r"No entities to join from; please use select_from\(\) to " r"establish the left entity/selectable of this join", sess.query().join, Address ) def test_isouter_flag(self): User = self.classes.User self.assert_compile( create_session().query(User).join('orders', isouter=True), "SELECT users.id AS users_id, users.name AS users_name " "FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id" ) def test_full_flag(self): User = self.classes.User self.assert_compile( create_session().query(User).outerjoin('orders', full=True), "SELECT users.id AS users_id, users.name AS users_name " "FROM users FULL OUTER JOIN orders ON users.id = orders.user_id" ) def test_multi_tuple_form(self): """test the 'tuple' form of join, now superseded by the two-element join() form. Not deprecating this style as of yet. """ Item, Order, User = (self.classes.Item, self.classes.Order, self.classes.User) sess = create_session() # assert_raises( # sa.exc.SADeprecationWarning, # sess.query(User).join, (Order, User.id==Order.user_id) # ) self.assert_compile( sess.query(User).join((Order, User.id == Order.user_id)), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id", ) self.assert_compile( sess.query(User).join( (Order, User.id == Order.user_id), (Item, Order.items)), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id " "JOIN order_items AS order_items_1 ON orders.id = " "order_items_1.order_id JOIN items ON items.id = " "order_items_1.item_id", ) # the old "backwards" form self.assert_compile( sess.query(User).join(("orders", Order)), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id", ) def test_single_prop_1(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).join(User.orders), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id" ) def test_single_prop_2(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).join(Order.user), "SELECT users.id AS users_id, users.name AS users_name " "FROM orders JOIN users ON users.id = orders.user_id" ) def test_single_prop_3(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() oalias1 = aliased(Order) self.assert_compile( sess.query(User).join(oalias1.user), "SELECT users.id AS users_id, users.name AS users_name " "FROM orders AS orders_1 JOIN users ON users.id = orders_1.user_id" ) def test_single_prop_4(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() oalias1 = aliased(Order) oalias2 = aliased(Order) # another nonsensical query. (from [ticket:1537]). # in this case, the contract of "left to right" is honored self.assert_compile( sess.query(User).join(oalias1.user).join(oalias2.user), "SELECT users.id AS users_id, users.name AS users_name " "FROM orders AS orders_1 JOIN users " "ON users.id = orders_1.user_id, " "orders AS orders_2 JOIN users ON users.id = orders_2.user_id") def test_single_prop_5(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).join(User.orders, Order.items), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders ON users.id = orders.user_id " "JOIN order_items AS order_items_1 " "ON orders.id = order_items_1.order_id JOIN items " "ON items.id = order_items_1.item_id" ) def test_single_prop_6(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() ualias = aliased(User) self.assert_compile( sess.query(ualias).join(ualias.orders), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users AS users_1 JOIN orders ON users_1.id = orders.user_id" ) def test_single_prop_7(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() # this query is somewhat nonsensical. the old system didn't render a # correct query for this. In this case its the most faithful to what # was asked - there's no linkage between User.orders and "oalias", # so two FROM elements are generated. oalias = aliased(Order) self.assert_compile( sess.query(User).join(User.orders, oalias.items), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders ON users.id = orders.user_id, " "orders AS orders_1 JOIN order_items AS order_items_1 " "ON orders_1.id = order_items_1.order_id " "JOIN items ON items.id = order_items_1.item_id") def test_single_prop_8(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() # same as before using an aliased() for User as well ualias = aliased(User) oalias = aliased(Order) self.assert_compile( sess.query(ualias).join(ualias.orders, oalias.items), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users AS users_1 " "JOIN orders ON users_1.id = orders.user_id, " "orders AS orders_1 JOIN order_items AS order_items_1 " "ON orders_1.id = order_items_1.order_id " "JOIN items ON items.id = order_items_1.item_id") def test_single_prop_9(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).filter(User.name == 'ed').from_self(). join(User.orders), "SELECT anon_1.users_id AS anon_1_users_id, " "anon_1.users_name AS anon_1_users_name " "FROM (SELECT users.id AS users_id, users.name AS users_name " "FROM users " "WHERE users.name = :name_1) AS anon_1 JOIN orders " "ON anon_1.users_id = orders.user_id" ) def test_single_prop_10(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).join(User.addresses, aliased=True). filter(Address.email_address == 'foo'), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN addresses AS addresses_1 " "ON users.id = addresses_1.user_id " "WHERE addresses_1.email_address = :email_address_1" ) def test_single_prop_11(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() self.assert_compile( sess.query(User).join(User.orders, Order.items, aliased=True). filter(Item.id == 10), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders AS orders_1 " "ON users.id = orders_1.user_id " "JOIN order_items AS order_items_1 " "ON orders_1.id = order_items_1.order_id " "JOIN items AS items_1 ON items_1.id = order_items_1.item_id " "WHERE items_1.id = :id_1") def test_single_prop_12(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() oalias1 = aliased(Order) # test #1 for [ticket:1706] ualias = aliased(User) self.assert_compile( sess.query(ualias). join(oalias1, ualias.orders). join(Address, ualias.addresses), "SELECT users_1.id AS users_1_id, users_1.name AS " "users_1_name FROM users AS users_1 JOIN orders AS orders_1 " "ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id " "= addresses.user_id" ) def test_single_prop_13(self): Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() # test #2 for [ticket:1706] ualias = aliased(User) ualias2 = aliased(User) self.assert_compile( sess.query(ualias). join(Address, ualias.addresses). join(ualias2, Address.user). join(Order, ualias.orders), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users " "AS users_1 JOIN addresses ON users_1.id = addresses.user_id " "JOIN users AS users_2 " "ON users_2.id = addresses.user_id JOIN orders " "ON users_1.id = orders.user_id" ) def test_overlapping_paths(self): User = self.classes.User for aliased in (True, False): # load a user who has an order that contains item id 3 and address # id 1 (order 3, owned by jack) result = create_session().query(User) \ .join('orders', 'items', aliased=aliased) \ .filter_by(id=3) \ .join('orders', 'address', aliased=aliased) \ .filter_by(id=1).all() assert [User(id=7, name='jack')] == result def test_overlapping_paths_multilevel(self): User = self.classes.User s = Session() q = s.query(User).\ join('orders').\ join('addresses').\ join('orders', 'items').\ join('addresses', 'dingaling') self.assert_compile( q, "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id " "JOIN addresses ON users.id = addresses.user_id " "JOIN order_items AS order_items_1 ON orders.id = " "order_items_1.order_id " "JOIN items ON items.id = order_items_1.item_id " "JOIN dingalings ON addresses.id = dingalings.address_id" ) def test_overlapping_paths_outerjoin(self): User = self.classes.User result = create_session().query(User).outerjoin('orders', 'items') \ .filter_by(id=3).outerjoin('orders', 'address') \ .filter_by(id=1).all() assert [User(id=7, name='jack')] == result def test_raises_on_dupe_target_rel(self): User = self.classes.User assert_raises_message( sa.exc.SAWarning, "Pathed join target Order.items has already been joined to; " "skipping", lambda: create_session().query(User).outerjoin('orders', 'items'). outerjoin('orders', 'items') ) def test_from_joinpoint(self): Item, User, Order = (self.classes.Item, self.classes.User, self.classes.Order) sess = create_session() for oalias, ialias in [ (True, True), (False, False), (True, False), (False, True)]: eq_( sess.query(User).join('orders', aliased=oalias) .join('items', from_joinpoint=True, aliased=ialias) .filter(Item.description == 'item 4').all(), [User(name='jack')] ) # use middle criterion eq_( sess.query(User).join('orders', aliased=oalias) .filter(Order.user_id == 9) .join('items', from_joinpoint=True, aliased=ialias) .filter(Item.description == 'item 4').all(), [] ) orderalias = aliased(Order) itemalias = aliased(Item) eq_( sess.query(User).join(orderalias, 'orders') .join(itemalias, 'items', from_joinpoint=True) .filter(itemalias.description == 'item 4').all(), [User(name='jack')] ) eq_( sess.query(User).join(orderalias, 'orders') .join(itemalias, 'items', from_joinpoint=True) .filter(orderalias.user_id == 9) .filter(itemalias.description == 'item 4').all(), [] ) def test_join_nonmapped_column(self): """test that the search for a 'left' doesn't trip on non-mapped cols""" Order, User = self.classes.Order, self.classes.User sess = create_session() # intentionally join() with a non-existent "left" side self.assert_compile( sess.query(User.id, literal_column('foo')).join(Order.user), "SELECT users.id AS users_id, foo FROM " "orders JOIN users ON users.id = orders.user_id" ) def test_backwards_join(self): User, Address = self.classes.User, self.classes.Address # a more controversial feature. join from # User->Address, but the onclause is Address.user. sess = create_session() eq_( sess.query(User).join(Address.user) .filter(Address.email_address == '[email protected]').all(), [User(id=8, name='ed')] ) # its actually not so controversial if you view it in terms # of multiple entities. eq_( sess.query(User, Address).join(Address.user) .filter(Address.email_address == '[email protected]').all(), [(User(id=8, name='ed'), Address(email_address='[email protected]'))] ) # this was the controversial part. now, raise an error if the feature # is abused. # before the error raise was added, this would silently work..... assert_raises( sa_exc.InvalidRequestError, sess.query(User).join, Address, Address.user, ) # but this one would silently fail adalias = aliased(Address) assert_raises( sa_exc.InvalidRequestError, sess.query(User).join, adalias, Address.user, ) def test_multiple_with_aliases(self): Order, User = self.classes.Order, self.classes.User sess = create_session() ualias = aliased(User) oalias1 = aliased(Order) oalias2 = aliased(Order) self.assert_compile( sess.query(ualias).join(oalias1, ualias.orders) .join(oalias2, ualias.orders) .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users AS users_1 " "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id " "JOIN orders AS orders_2 ON " "users_1.id = orders_2.user_id " "WHERE orders_1.user_id = :user_id_1 " "OR orders_2.user_id = :user_id_2", use_default_dialect=True) def test_select_from_orm_joins(self): User, Order = self.classes.User, self.classes.Order sess = create_session() ualias = aliased(User) oalias1 = aliased(Order) oalias2 = aliased(Order) self.assert_compile( join(User, oalias2, User.id == oalias2.user_id), "users JOIN orders AS orders_1 ON users.id = orders_1.user_id", use_default_dialect=True ) self.assert_compile( join(ualias, oalias1, ualias.orders), "users AS users_1 JOIN orders AS orders_1 " "ON users_1.id = orders_1.user_id", use_default_dialect=True) self.assert_compile( sess.query(ualias).select_from( join(ualias, oalias1, ualias.orders)), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users AS users_1 " "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id", use_default_dialect=True) self.assert_compile( sess.query(User, ualias).select_from( join(ualias, oalias1, ualias.orders)), "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, " "users_1.name AS users_1_name FROM users, users AS users_1 " "JOIN orders AS orders_1 ON users_1.id = orders_1.user_id", use_default_dialect=True) # this fails (and we cant quite fix right now). if False: self.assert_compile( sess.query(User, ualias).join(oalias1, ualias.orders) .join(oalias2, User.id == oalias2.user_id) .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)), "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, users_1.name AS " "users_1_name FROM users JOIN orders AS orders_2 " "ON users.id = orders_2.user_id, " "users AS users_1 JOIN orders AS orders_1 " "ON users_1.id = orders_1.user_id " "WHERE orders_1.user_id = :user_id_1 " "OR orders_2.user_id = :user_id_2", use_default_dialect=True) # this is the same thing using explicit orm.join() (which now offers # multiple again) self.assert_compile( sess.query(User, ualias).select_from( join(ualias, oalias1, ualias.orders), join(User, oalias2, User.id == oalias2.user_id),) .filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)), "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, users_1.name AS " "users_1_name FROM users AS users_1 JOIN orders AS orders_1 " "ON users_1.id = orders_1.user_id, " "users JOIN orders AS orders_2 ON users.id = orders_2.user_id " "WHERE orders_1.user_id = :user_id_1 " "OR orders_2.user_id = :user_id_2", use_default_dialect=True) def test_overlapping_backwards_joins(self): User, Order = self.classes.User, self.classes.Order sess = create_session() oalias1 = aliased(Order) oalias2 = aliased(Order) # this is invalid SQL - joins from orders_1/orders_2 to User twice. # but that is what was asked for so they get it ! self.assert_compile( sess.query(User).join(oalias1.user).join(oalias2.user), "SELECT users.id AS users_id, users.name AS users_name " "FROM orders AS orders_1 " "JOIN users ON users.id = orders_1.user_id, orders AS orders_2 " "JOIN users ON users.id = orders_2.user_id", use_default_dialect=True,) def test_replace_multiple_from_clause(self): """test adding joins onto multiple FROM clauses""" User, Order, Address = (self.classes.User, self.classes.Order, self.classes.Address) sess = create_session() self.assert_compile( sess.query(Address, User) .join(Address.dingaling).join(User.orders, Order.items), "SELECT addresses.id AS addresses_id, " "addresses.user_id AS addresses_user_id, " "addresses.email_address AS addresses_email_address, " "users.id AS users_id, " "users.name AS users_name FROM addresses JOIN dingalings " "ON addresses.id = dingalings.address_id, " "users JOIN orders ON users.id = orders.user_id " "JOIN order_items AS order_items_1 " "ON orders.id = order_items_1.order_id JOIN items " "ON items.id = order_items_1.item_id", use_default_dialect=True ) def test_invalid_join_entity_from_single_from_clause(self): Address, Item = ( self.classes.Address, self.classes.Item) sess = create_session() q = sess.query(Address).select_from(Address) assert_raises_message( sa.exc.InvalidRequestError, "Don't know how to join to .*Item.*; " "please use an ON clause to more clearly establish the " "left side of this join", q.join, Item ) def test_invalid_join_entity_from_no_from_clause(self): Address, Item = ( self.classes.Address, self.classes.Item) sess = create_session() q = sess.query(Address) assert_raises_message( sa.exc.InvalidRequestError, "Don't know how to join to .*Item.*; " "please use an ON clause to more clearly establish the " "left side of this join", q.join, Item ) def test_invalid_join_entity_from_multiple_from_clause(self): """test adding joins onto multiple FROM clauses where we still need to say there's nothing to JOIN from""" User, Address, Item = ( self.classes.User, self.classes.Address, self.classes.Item) sess = create_session() q = sess.query(Address, User).join(Address.dingaling).\ join(User.orders) assert_raises_message( sa.exc.InvalidRequestError, "Don't know how to join to .*Item.*; " "please use an ON clause to more clearly establish the " "left side of this join", q.join, Item ) def test_join_explicit_left_multiple_from_clause(self): """test adding joins onto multiple FROM clauses where it is ambiguous which FROM should be used when an ON clause is given""" User = self.classes.User sess = create_session() u1 = aliased(User) # in this case, two FROM objects, one # is users, the other is u1_alias. # User.addresses looks for the "users" table and can match # to both u1_alias and users if the match is not specific enough q = sess.query(User, u1).\ select_from(User, u1).\ join(User.addresses) self.assert_compile( q, "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users AS users_1, " "users JOIN addresses ON users.id = addresses.user_id" ) q = sess.query(User, u1).\ select_from(User, u1).\ join(u1.addresses) self.assert_compile( q, "SELECT users.id AS users_id, users.name AS users_name, " "users_1.id AS users_1_id, users_1.name AS users_1_name " "FROM users, " "users AS users_1 JOIN addresses ON users_1.id = addresses.user_id" ) def test_join_explicit_left_multiple_adapted(self): """test adding joins onto multiple FROM clauses where it is ambiguous which FROM should be used when an ON clause is given""" User = self.classes.User sess = create_session() u1 = aliased(User) u2 = aliased(User) # in this case, two FROM objects, one # is users, the other is u1_alias. # User.addresses looks for the "users" table and can match # to both u1_alias and users if the match is not specific enough assert_raises_message( sa_exc.InvalidRequestError, "Can't identify which entity in which to assign the " "left side of this join.", sess.query(u1, u2).select_from(u1, u2).join, User.addresses ) # more specific ON clause self.assert_compile( sess.query(u1, u2).select_from(u1, u2).join(u2.addresses), "SELECT users_1.id AS users_1_id, users_1.name AS users_1_name, " "users_2.id AS users_2_id, users_2.name AS users_2_name " "FROM users AS users_1, " "users AS users_2 JOIN addresses ON users_2.id = addresses.user_id" ) def test_join_entity_from_multiple_from_clause(self): """test adding joins onto multiple FROM clauses where it is ambiguous which FROM should be used""" User, Order, Address, Dingaling = ( self.classes.User, self.classes.Order, self.classes.Address, self.classes.Dingaling) sess = create_session() q = sess.query(Address, User).join(Address.dingaling).\ join(User.orders) a1 = aliased(Address) assert_raises_message( sa.exc.InvalidRequestError, "Can't determine which FROM clause to join from, there are " "multiple FROMS which can join to this entity. " "Try adding an explicit ON clause to help resolve the ambiguity.", q.join, a1 ) # to resolve, add an ON clause # the user->orders join is chosen to join to a1 self.assert_compile( q.join(a1, Order.address_id == a1.id), "SELECT addresses.id AS addresses_id, " "addresses.user_id AS addresses_user_id, " "addresses.email_address AS addresses_email_address, " "users.id AS users_id, users.name AS users_name " "FROM addresses JOIN dingalings " "ON addresses.id = dingalings.address_id, " "users JOIN orders " "ON users.id = orders.user_id " "JOIN addresses AS addresses_1 " "ON orders.address_id = addresses_1.id" ) # the address->dingalings join is chosen to join to a1 self.assert_compile( q.join(a1, Dingaling.address_id == a1.id), "SELECT addresses.id AS addresses_id, " "addresses.user_id AS addresses_user_id, " "addresses.email_address AS addresses_email_address, " "users.id AS users_id, users.name AS users_name " "FROM addresses JOIN dingalings " "ON addresses.id = dingalings.address_id " "JOIN addresses AS addresses_1 " "ON dingalings.address_id = addresses_1.id, " "users JOIN orders ON users.id = orders.user_id" ) def test_join_entity_from_multiple_entities(self): """test adding joins onto multiple FROM clauses where it is ambiguous which FROM should be used""" Order, Address, Dingaling = ( self.classes.Order, self.classes.Address, self.classes.Dingaling) sess = create_session() q = sess.query(Order, Dingaling) a1 = aliased(Address) assert_raises_message( sa.exc.InvalidRequestError, "Can't determine which FROM clause to join from, there are " "multiple FROMS which can join to this entity. " "Try adding an explicit ON clause to help resolve the ambiguity.", q.join, a1 ) # to resolve, add an ON clause # Order is chosen to join to a1 self.assert_compile( q.join(a1, Order.address_id == a1.id), "SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, " "dingalings.address_id AS dingalings_address_id, " "dingalings.data AS dingalings_data " "FROM dingalings, orders " "JOIN addresses AS addresses_1 " "ON orders.address_id = addresses_1.id" ) # Dingaling is chosen to join to a1 self.assert_compile( q.join(a1, Dingaling.address_id == a1.id), "SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, " "orders.address_id AS orders_address_id, " "orders.description AS orders_description, " "orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, " "dingalings.address_id AS dingalings_address_id, " "dingalings.data AS dingalings_data " "FROM orders, dingalings JOIN addresses AS addresses_1 " "ON dingalings.address_id = addresses_1.id" ) def test_multiple_adaption(self): Item, Order, User = (self.classes.Item, self.classes.Order, self.classes.User) sess = create_session() self.assert_compile( sess.query(User).join(User.orders, Order.items, aliased=True) .filter(Order.id == 7).filter(Item.id == 8), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders AS orders_1 " "ON users.id = orders_1.user_id JOIN order_items AS order_items_1 " "ON orders_1.id = order_items_1.order_id " "JOIN items AS items_1 ON items_1.id = order_items_1.item_id " "WHERE orders_1.id = :id_1 AND items_1.id = :id_2", use_default_dialect=True ) def test_onclause_conditional_adaption(self): Item, Order, orders, order_items, User = (self.classes.Item, self.classes.Order, self.tables.orders, self.tables.order_items, self.classes.User) sess = create_session() # this is now a very weird test, nobody should really # be using the aliased flag in this way. self.assert_compile( sess.query(User).join(User.orders, aliased=True). join(Item, and_(Order.id == order_items.c.order_id, order_items.c.item_id == Item.id), from_joinpoint=True, aliased=True), "SELECT users.id AS users_id, users.name AS users_name FROM users " "JOIN orders AS orders_1 ON users.id = orders_1.user_id " "JOIN items AS items_1 " "ON orders_1.id = order_items.order_id " "AND order_items.item_id = items_1.id", use_default_dialect=True ) oalias = orders.select() self.assert_compile( sess.query(User).join(oalias, User.orders) .join(Item, and_( Order.id == order_items.c.order_id, order_items.c.item_id == Item.id), from_joinpoint=True), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN " "(SELECT orders.id AS id, orders.user_id AS user_id, " "orders.address_id AS address_id, orders.description " "AS description, orders.isopen AS isopen FROM orders) AS anon_1 " "ON users.id = anon_1.user_id JOIN items " "ON anon_1.id = order_items.order_id " "AND order_items.item_id = items.id", use_default_dialect=True) # query.join(<stuff>, aliased=True).join(target, sql_expression) # or: query.join(path_to_some_joined_table_mapper).join(target, # sql_expression) def test_pure_expression_error(self): addresses, users = self.tables.addresses, self.tables.users sess = create_session() self.assert_compile( sess.query(users).join(addresses), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN addresses ON users.id = addresses.user_id" ) def test_orderby_arg_bug(self): User, users, Order = (self.classes.User, self.tables.users, self.classes.Order) sess = create_session() # no arg error result = sess.query(User).join('orders', aliased=True) \ .order_by(Order.id).reset_joinpoint().order_by(users.c.id).all() def test_no_onclause(self): Item, User, Order = (self.classes.Item, self.classes.User, self.classes.Order) sess = create_session() eq_( sess.query(User).select_from(join(User, Order) .join(Item, Order.items)) .filter(Item.description == 'item 4').all(), [User(name='jack')] ) eq_( sess.query(User.name).select_from(join(User, Order) .join(Item, Order.items)) .filter(Item.description == 'item 4').all(), [('jack',)] ) eq_( sess.query(User).join(Order).join(Item, Order.items) .filter(Item.description == 'item 4').all(), [User(name='jack')] ) def test_clause_onclause(self): Item, Order, users, order_items, User = (self.classes.Item, self.classes.Order, self.tables.users, self.tables.order_items, self.classes.User) sess = create_session() eq_( sess.query(User).join(Order, User.id == Order.user_id) .join(order_items, Order.id == order_items.c.order_id) .join(Item, order_items.c.item_id == Item.id) .filter(Item.description == 'item 4').all(), [User(name='jack')] ) eq_( sess.query(User.name).join(Order, User.id == Order.user_id) .join(order_items, Order.id == order_items.c.order_id) .join(Item, order_items.c.item_id == Item.id) .filter(Item.description == 'item 4').all(), [('jack',)] ) ualias = aliased(User) eq_( sess.query(ualias.name).join(Order, ualias.id == Order.user_id) .join(order_items, Order.id == order_items.c.order_id) .join(Item, order_items.c.item_id == Item.id) .filter(Item.description == 'item 4').all(), [('jack',)] ) # explicit onclause with from_self(), means # the onclause must be aliased against the query's custom # FROM object eq_( sess.query(User).order_by(User.id).offset(2) .from_self() .join(Order, User.id == Order.user_id) .all(), [User(name='fred')] ) # same with an explicit select_from() eq_( sess.query(User).select_entity_from(select([users]) .order_by(User.id) .offset(2).alias()) .join(Order, User.id == Order.user_id).all(), [User(name='fred')] ) def test_aliased_classes(self): User, Address = self.classes.User, self.classes.Address sess = create_session() (user7, user8, user9, user10) = sess.query(User).all() (address1, address2, address3, address4, address5) = sess \ .query(Address).all() expected = [(user7, address1), (user8, address2), (user8, address3), (user8, address4), (user9, address5), (user10, None)] q = sess.query(User) AdAlias = aliased(Address) q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias)) result = q.order_by(User.id, AdAlias.id).all() eq_(result, expected) sess.expunge_all() q = sess.query(User).add_entity(AdAlias) result = q.select_from(outerjoin(User, AdAlias)) \ .filter(AdAlias.email_address == '[email protected]').all() eq_(result, [(user8, address3)]) result = q.select_from(outerjoin(User, AdAlias, 'addresses')) \ .filter(AdAlias.email_address == '[email protected]').all() eq_(result, [(user8, address3)]) result = q.select_from( outerjoin(User, AdAlias, User.id == AdAlias.user_id)).filter( AdAlias.email_address == '[email protected]').all() eq_(result, [(user8, address3)]) # this is the first test where we are joining "backwards" - from # AdAlias to User even though # the query is against User q = sess.query(User, AdAlias) result = q.join(AdAlias.user) \ .filter(User.name == 'ed').order_by(User.id, AdAlias.id) eq_(result.all(), [(user8, address2), (user8, address3), (user8, address4), ]) q = sess.query(User, AdAlias).select_from( join(AdAlias, User, AdAlias.user)).filter(User.name == 'ed') eq_(result.all(), [(user8, address2), (user8, address3), (user8, address4), ]) def test_expression_onclauses(self): Order, User = self.classes.Order, self.classes.User sess = create_session() subq = sess.query(User).subquery() self.assert_compile( sess.query(User).join(subq, User.name == subq.c.name), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN (SELECT users.id AS id, users.name " "AS name FROM users) AS anon_1 ON users.name = anon_1.name", use_default_dialect=True ) subq = sess.query(Order).subquery() self.assert_compile( sess.query(User).join(subq, User.id == subq.c.user_id), "SELECT users.id AS users_id, users.name AS users_name FROM " "users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, " "orders.address_id AS address_id, orders.description AS " "description, orders.isopen AS isopen FROM orders) AS " "anon_1 ON users.id = anon_1.user_id", use_default_dialect=True ) self.assert_compile( sess.query(User).join(Order, User.id == Order.user_id), "SELECT users.id AS users_id, users.name AS users_name " "FROM users JOIN orders ON users.id = orders.user_id", use_default_dialect=True ) def test_implicit_joins_from_aliases(self): Item, User, Order = (self.classes.Item, self.classes.User, self.classes.Order) sess = create_session() OrderAlias = aliased(Order) eq_(sess.query(OrderAlias).join('items') .filter_by(description='item 3').order_by(OrderAlias.id).all(), [ Order(address_id=1, description='order 1', isopen=0, user_id=7, id=1), Order(address_id=4, description='order 2', isopen=0, user_id=9, id=2), Order(address_id=1, description='order 3', isopen=1, user_id=7, id=3) ]) eq_(sess.query(User, OrderAlias, Item.description). join(OrderAlias, 'orders').join('items', from_joinpoint=True). filter_by(description='item 3').order_by(User.id, OrderAlias.id). all(), [(User(name='jack', id=7), Order(address_id=1, description='order 1', isopen=0, user_id=7, id=1), 'item 3'), (User(name='jack', id=7), Order(address_id=1, description='order 3', isopen=1, user_id=7, id=3), 'item 3'), (User(name='fred', id=9), Order(address_id=4, description='order 2', isopen=0, user_id=9, id=2), 'item 3')]) def test_aliased_classes_m2m(self): Item, Order = self.classes.Item, self.classes.Order sess = create_session() (order1, order2, order3, order4, order5) = sess.query(Order).all() (item1, item2, item3, item4, item5) = sess.query(Item).all() expected = [ (order1, item1), (order1, item2), (order1, item3), (order2, item1), (order2, item2), (order2, item3), (order3, item3), (order3, item4), (order3, item5), (order4, item1), (order4, item5), (order5, item5), ] q = sess.query(Order) q = q.add_entity(Item).select_from( join(Order, Item, 'items')).order_by(Order.id, Item.id) result = q.all() eq_(result, expected) IAlias = aliased(Item) q = sess.query(Order, IAlias).select_from( join(Order, IAlias, 'items')) \ .filter(IAlias.description == 'item 3') result = q.all() eq_(result, [ (order1, item3), (order2, item3), (order3, item3), ]) def test_joins_from_adapted_entities(self): User = self.classes.User # test for #1853 session = create_session() first = session.query(User) second = session.query(User) unioned = first.union(second) subquery = session.query(User.id).subquery() join = subquery, subquery.c.id == User.id joined = unioned.outerjoin(*join) self.assert_compile(joined, 'SELECT anon_1.users_id AS ' 'anon_1_users_id, anon_1.users_name AS ' 'anon_1_users_name FROM (SELECT users.id ' 'AS users_id, users.name AS users_name ' 'FROM users UNION SELECT users.id AS ' 'users_id, users.name AS users_name FROM ' 'users) AS anon_1 LEFT OUTER JOIN (SELECT ' 'users.id AS id FROM users) AS anon_2 ON ' 'anon_2.id = anon_1.users_id', use_default_dialect=True) first = session.query(User.id) second = session.query(User.id) unioned = first.union(second) subquery = session.query(User.id).subquery() join = subquery, subquery.c.id == User.id joined = unioned.outerjoin(*join) self.assert_compile(joined, 'SELECT anon_1.users_id AS anon_1_users_id ' 'FROM (SELECT users.id AS users_id FROM ' 'users UNION SELECT users.id AS users_id ' 'FROM users) AS anon_1 LEFT OUTER JOIN ' '(SELECT users.id AS id FROM users) AS ' 'anon_2 ON anon_2.id = anon_1.users_id', use_default_dialect=True) def test_joins_from_adapted_entities_isouter(self): User = self.classes.User # test for #1853 session = create_session() first = session.query(User) second = session.query(User) unioned = first.union(second) subquery = session.query(User.id).subquery() join = subquery, subquery.c.id == User.id joined = unioned.join(*join, isouter=True) self.assert_compile(joined, 'SELECT anon_1.users_id AS ' 'anon_1_users_id, anon_1.users_name AS ' 'anon_1_users_name FROM (SELECT users.id ' 'AS users_id, users.name AS users_name ' 'FROM users UNION SELECT users.id AS ' 'users_id, users.name AS users_name FROM ' 'users) AS anon_1 LEFT OUTER JOIN (SELECT ' 'users.id AS id FROM users) AS anon_2 ON ' 'anon_2.id = anon_1.users_id', use_default_dialect=True) first = session.query(User.id) second = session.query(User.id) unioned = first.union(second) subquery = session.query(User.id).subquery() join = subquery, subquery.c.id == User.id joined = unioned.join(*join, isouter=True) self.assert_compile(joined, 'SELECT anon_1.users_id AS anon_1_users_id ' 'FROM (SELECT users.id AS users_id FROM ' 'users UNION SELECT users.id AS users_id ' 'FROM users) AS anon_1 LEFT OUTER JOIN ' '(SELECT users.id AS id FROM users) AS ' 'anon_2 ON anon_2.id = anon_1.users_id', use_default_dialect=True) def test_reset_joinpoint(self): User = self.classes.User for aliased in (True, False): # load a user who has an order that contains item id 3 and address # id 1 (order 3, owned by jack) result = create_session().query(User) \ .join('orders', 'items', aliased=aliased) \ .filter_by(id=3).reset_joinpoint() \ .join('orders', 'address', aliased=aliased) \ .filter_by(id=1).all() assert [User(id=7, name='jack')] == result result = create_session().query(User) \ .join('orders', 'items', aliased=aliased, isouter=True) \ .filter_by(id=3).reset_joinpoint() \ .join('orders', 'address', aliased=aliased, isouter=True) \ .filter_by(id=1).all() assert [User(id=7, name='jack')] == result result = create_session().query(User).outerjoin( 'orders', 'items', aliased=aliased).filter_by( id=3).reset_joinpoint().outerjoin( 'orders', 'address', aliased=aliased).filter_by( id=1).all() assert [User(id=7, name='jack')] == result def test_overlap_with_aliases(self): orders, User, users = (self.tables.orders, self.classes.User, self.tables.users) oalias = orders.alias('oalias') result = create_session().query(User).select_from(users.join(oalias)) \ .filter(oalias.c.description.in_( ["order 1", "order 2", "order 3"])) \ .join('orders', 'items').order_by(User.id).all() assert [User(id=7, name='jack'), User(id=9, name='fred')] == result result = create_session().query(User).select_from(users.join(oalias)) \ .filter(oalias.c.description.in_( ["order 1", "order 2", "order 3"])) \ .join('orders', 'items').filter_by(id=4).all() assert [User(id=7, name='jack')] == result def test_aliased(self): """test automatic generation of aliased joins.""" Item, Order, User, Address = (self.classes.Item, self.classes.Order, self.classes.User, self.classes.Address) sess = create_session() # test a basic aliasized path q = sess.query(User).join('addresses', aliased=True).filter_by( email_address='[email protected]') assert [User(id=7)] == q.all() q = sess.query(User).join('addresses', aliased=True).filter( Address.email_address == '[email protected]') assert [User(id=7)] == q.all() q = sess.query(User).join('addresses', aliased=True).filter(or_( Address.email_address == '[email protected]', Address.email_address == '[email protected]')) assert [User(id=7), User(id=9)] == q.all() # test two aliasized paths, one to 'orders' and the other to # 'orders','items'. one row is returned because user 7 has order 3 and # also has order 1 which has item 1 # this tests a o2m join and a m2m join. q = sess.query(User).join('orders', aliased=True) \ .filter(Order.description == "order 3") \ .join('orders', 'items', aliased=True) \ .filter(Item.description == "item 1") assert q.count() == 1 assert [User(id=7)] == q.all() # test the control version - same joins but not aliased. rows are not # returned because order 3 does not have item 1 q = sess.query(User).join('orders').filter( Order.description == "order 3").join( 'orders', 'items').filter( Item.description == "item 1") assert [] == q.all() assert q.count() == 0 # the left half of the join condition of the any() is aliased. q = sess.query(User).join('orders', aliased=True).filter( Order.items.any(Item.description == 'item 4')) assert [User(id=7)] == q.all() # test that aliasing gets reset when join() is called q = sess.query(User).join('orders', aliased=True) \ .filter(Order.description == "order 3") \ .join('orders', aliased=True) \ .filter(Order.description == "order 5") assert q.count() == 1 assert [User(id=7)] == q.all() def test_aliased_order_by(self): User = self.classes.User sess = create_session() ualias = aliased(User) eq_( sess.query(User, ualias).filter(User.id > ualias.id) .order_by(desc(ualias.id), User.name).all(), [ (User(id=10, name='chuck'), User(id=9, name='fred')), (User(id=10, name='chuck'), User(id=8, name='ed')), (User(id=9, name='fred'), User(id=8, name='ed')), (User(id=10, name='chuck'), User(id=7, name='jack')), (User(id=8, name='ed'), User(id=7, name='jack')), (User(id=9, name='fred'), User(id=7, name='jack')) ] ) def test_plain_table(self): addresses, User = self.tables.addresses, self.classes.User sess = create_session() eq_( sess.query(User.name) .join(addresses, User.id == addresses.c.user_id) .order_by(User.id).all(), [('jack',), ('ed',), ('ed',), ('ed',), ('fred',)] ) def test_no_joinpoint_expr(self): User, users = self.classes.User, self.tables.users sess = create_session() # these are consistent regardless of # select_from() being present. assert_raises_message( sa_exc.InvalidRequestError, "Don't know how to join to .*User.* please use an ON clause to ", sess.query(users.c.id).join, User ) assert_raises_message( sa_exc.InvalidRequestError, "Don't know how to join to .*User.* please use an ON clause to ", sess.query(users.c.id).select_from(users).join, User ) def test_on_clause_no_right_side(self): User = self.classes.User Address = self.classes.Address sess = create_session() assert_raises_message( sa_exc.ArgumentError, "Expected mapped entity or selectable/table as join target", sess.query(User).join, User.id == Address.user_id ) def test_select_from(self): """Test that the left edge of the join can be set reliably with select_from().""" Item, Order, User = (self.classes.Item, self.classes.Order, self.classes.User) sess = create_session() self.assert_compile( sess.query(Item.id).select_from(User) .join(User.orders).join(Order.items), "SELECT items.id AS items_id FROM users JOIN orders ON " "users.id = orders.user_id JOIN order_items AS order_items_1 " "ON orders.id = order_items_1.order_id JOIN items ON items.id = " "order_items_1.item_id", use_default_dialect=True ) # here, the join really wants to add a second FROM clause # for "Item". but select_from disallows that self.assert_compile( sess.query(Item.id).select_from(User) .join(Item, User.id == Item.id), "SELECT items.id AS items_id FROM users JOIN items " "ON users.id = items.id", use_default_dialect=True) def test_from_self_resets_joinpaths(self): """test a join from from_self() doesn't confuse joins inside the subquery with the outside. """ Item, Keyword = self.classes.Item, self.classes.Keyword sess = create_session() self.assert_compile( sess.query(Item).join(Item.keywords).from_self(Keyword) .join(Item.keywords), "SELECT keywords.id AS keywords_id, " "keywords.name AS keywords_name " "FROM (SELECT items.id AS items_id, " "items.description AS items_description " "FROM items JOIN item_keywords AS item_keywords_1 ON items.id = " "item_keywords_1.item_id JOIN keywords " "ON keywords.id = item_keywords_1.keyword_id) " "AS anon_1 JOIN item_keywords AS item_keywords_2 ON " "anon_1.items_id = item_keywords_2.item_id " "JOIN keywords ON " "keywords.id = item_keywords_2.keyword_id", use_default_dialect=True) class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL): __dialect__ = 'default' run_setup_mappers = 'once' @classmethod def define_tables(cls, metadata): Table('table1', metadata, Column('id', Integer, primary_key=True)) Table('table2', metadata, Column('id', Integer, primary_key=True), Column('t1_id', Integer)) @classmethod def setup_classes(cls): table1, table2 = cls.tables.table1, cls.tables.table2 class T1(cls.Comparable): pass class T2(cls.Comparable): pass mapper(T1, table1) mapper(T2, table2) def test_select_mapped_to_mapped_explicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(subq.c.count, T1.id) .select_from(subq).join(T1, subq.c.t1_id == T1.id), "SELECT anon_1.count AS anon_1_count, table1.id AS table1_id " "FROM (SELECT table2.t1_id AS t1_id, " "count(table2.id) AS count FROM table2 " "GROUP BY table2.t1_id) AS anon_1 JOIN table1 " "ON anon_1.t1_id = table1.id" ) def test_select_mapped_to_mapped_implicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id), "SELECT anon_1.count AS anon_1_count, table1.id AS table1_id " "FROM (SELECT table2.t1_id AS t1_id, " "count(table2.id) AS count FROM table2 " "GROUP BY table2.t1_id) AS anon_1 JOIN table1 " "ON anon_1.t1_id = table1.id" ) def test_select_mapped_to_select_explicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(subq.c.count, T1.id).select_from(T1) .join(subq, subq.c.t1_id == T1.id), "SELECT anon_1.count AS anon_1_count, table1.id AS table1_id " "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, " "count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) " "AS anon_1 ON anon_1.t1_id = table1.id" ) def test_select_mapped_to_select_implicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() # without select_from self.assert_compile( sess.query(subq.c.count, T1.id).join(subq, subq.c.t1_id == T1.id), "SELECT anon_1.count AS anon_1_count, table1.id AS table1_id " "FROM table1 JOIN " "(SELECT table2.t1_id AS t1_id, count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) " "AS anon_1 ON anon_1.t1_id = table1.id" ) # with select_from, same query self.assert_compile( sess.query(subq.c.count, T1.id).select_from(T1). join(subq, subq.c.t1_id == T1.id), "SELECT anon_1.count AS anon_1_count, table1.id AS table1_id " "FROM table1 JOIN " "(SELECT table2.t1_id AS t1_id, count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) " "AS anon_1 ON anon_1.t1_id = table1.id" ) def test_mapped_select_to_mapped_implicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() # without select_from self.assert_compile( sess.query(T1.id, subq.c.count). join(T1, subq.c.t1_id == T1.id), "SELECT table1.id AS table1_id, anon_1.count AS anon_1_count " "FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) AS anon_1 " "JOIN table1 ON anon_1.t1_id = table1.id" ) # with select_from, same query self.assert_compile( sess.query(T1.id, subq.c.count).select_from(subq). join(T1, subq.c.t1_id == T1.id), "SELECT table1.id AS table1_id, anon_1.count AS anon_1_count " "FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) AS anon_1 " "JOIN table1 ON anon_1.t1_id = table1.id" ) def test_mapped_select_to_mapped_explicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(T1.id, subq.c.count).select_from(subq) .join(T1, subq.c.t1_id == T1.id), "SELECT table1.id AS table1_id, anon_1.count AS anon_1_count " "FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 " "ON anon_1.t1_id = table1.id" ) def test_mapped_select_to_select_explicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(T1.id, subq.c.count).select_from(T1) .join(subq, subq.c.t1_id == T1.id), "SELECT table1.id AS table1_id, anon_1.count AS anon_1_count " "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, " "count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) AS anon_1 " "ON anon_1.t1_id = table1.id") def test_mapped_select_to_select_implicit_left(self): T1, T2 = self.classes.T1, self.classes.T2 sess = Session() subq = sess.query(T2.t1_id, func.count(T2.id).label('count')).\ group_by(T2.t1_id).subquery() self.assert_compile( sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id), "SELECT table1.id AS table1_id, anon_1.count AS anon_1_count " "FROM table1 JOIN (SELECT table2.t1_id AS t1_id, " "count(table2.id) AS count " "FROM table2 GROUP BY table2.t1_id) AS anon_1 " "ON anon_1.t1_id = table1.id") class MultiplePathTest(fixtures.MappedTest, AssertsCompiledSQL): @classmethod def define_tables(cls, metadata): t1 = Table('t1', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30))) t2 = Table('t2', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30))) t1t2_1 = Table('t1t2_1', metadata, Column('t1id', Integer, ForeignKey('t1.id')), Column('t2id', Integer, ForeignKey('t2.id'))) t1t2_2 = Table('t1t2_2', metadata, Column('t1id', Integer, ForeignKey('t1.id')), Column('t2id', Integer, ForeignKey('t2.id'))) def test_basic(self): t2, t1t2_1, t1t2_2, t1 = (self.tables.t2, self.tables.t1t2_1, self.tables.t1t2_2, self.tables.t1) class T1(object): pass class T2(object): pass mapper(T1, t1, properties={ 't2s_1': relationship(T2, secondary=t1t2_1), 't2s_2': relationship(T2, secondary=t1t2_2), }) mapper(T2, t2) q = create_session().query(T1).join('t2s_1') \ .filter(t2.c.id == 5).reset_joinpoint().join('t2s_2') self.assert_compile( q, "SELECT t1.id AS t1_id, t1.data AS t1_data FROM t1 " "JOIN t1t2_1 AS t1t2_1_1 " "ON t1.id = t1t2_1_1.t1id JOIN t2 ON t2.id = t1t2_1_1.t2id " "JOIN t1t2_2 AS t1t2_2_1 " "ON t1.id = t1t2_2_1.t1id JOIN t2 ON t2.id = t1t2_2_1.t2id " "WHERE t2.id = :id_1", use_default_dialect=True) class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL): run_setup_mappers = 'once' __dialect__ = default.DefaultDialect() @classmethod def define_tables(cls, metadata): nodes = Table('nodes', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('parent_id', Integer, ForeignKey('nodes.id'))) sub_table = Table('sub_table', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('node_id', Integer, ForeignKey('nodes.id'))) assoc_table = Table('assoc_table', metadata, Column('left_id', Integer, ForeignKey('nodes.id')), Column('right_id', Integer, ForeignKey('nodes.id'))) @classmethod def setup_classes(cls): nodes, assoc_table, sub_table = (cls.tables.nodes, cls.tables.assoc_table, cls.tables.sub_table) class Node(cls.Comparable): pass class Sub(cls.Comparable): pass mapper(Node, nodes, properties={ 'children': relationship(Node, lazy='select', join_depth=3, backref=backref( 'parent', remote_side=[nodes.c.id]) ), 'subs': relationship(Sub), 'assoc': relationship( Node, secondary=assoc_table, primaryjoin=nodes.c.id == assoc_table.c.left_id, secondaryjoin=nodes.c.id == assoc_table.c.right_id) }) mapper(Sub, sub_table) def test_o2m_aliased_plus_o2m(self): Node, Sub = self.classes.Node, self.classes.Sub sess = create_session() n1 = aliased(Node) self.assert_compile( sess.query(Node).join(n1, Node.children).join(Sub, n1.subs), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id " "FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id " "JOIN sub_table ON nodes_1.id = sub_table.node_id" ) self.assert_compile( sess.query(Node).join(n1, Node.children).join(Sub, Node.subs), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id " "FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id " "JOIN sub_table ON nodes.id = sub_table.node_id" ) def test_m2m_aliased_plus_o2m(self): Node, Sub = self.classes.Node, self.classes.Sub sess = create_session() n1 = aliased(Node) self.assert_compile( sess.query(Node).join(n1, Node.assoc).join(Sub, n1.subs), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id " "FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = " "assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = " "assoc_table_1.right_id JOIN sub_table " "ON nodes_1.id = sub_table.node_id", ) self.assert_compile( sess.query(Node).join(n1, Node.assoc).join(Sub, Node.subs), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id " "FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = " "assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = " "assoc_table_1.right_id JOIN sub_table " "ON nodes.id = sub_table.node_id", ) class CreateJoinsTest(fixtures.ORMTest, AssertsCompiledSQL): __dialect__ = 'default' def _inherits_fixture(self): m = MetaData() base = Table('base', m, Column('id', Integer, primary_key=True)) a = Table('a', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('b_id', Integer, ForeignKey('b.id'))) b = Table('b', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True), Column('c_id', Integer, ForeignKey('c.id'))) c = Table('c', m, Column('id', Integer, ForeignKey('base.id'), primary_key=True)) class Base(object): pass class A(Base): pass class B(Base): pass class C(Base): pass mapper(Base, base) mapper(A, a, inherits=Base, properties={ 'b': relationship(B, primaryjoin=a.c.b_id == b.c.id)}) mapper(B, b, inherits=Base, properties={ 'c': relationship(C, primaryjoin=b.c.c_id == c.c.id)}) mapper(C, c, inherits=Base) return A, B, C, Base def test_double_level_aliased_exists(self): A, B, C, Base = self._inherits_fixture() s = Session() self.assert_compile( s.query(A).filter(A.b.has(B.c.has(C.id == 5))), "SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id " "FROM base JOIN a ON base.id = a.id WHERE " "EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS " "b_id, b.c_id AS b_c_id FROM base JOIN b ON base.id = b.id) " "AS anon_1 WHERE a.b_id = anon_1.b_id AND (EXISTS " "(SELECT 1 FROM (SELECT base.id AS base_id, c.id AS c_id " "FROM base JOIN c ON base.id = c.id) AS anon_2 " "WHERE anon_1.b_c_id = anon_2.c_id AND anon_2.c_id = :id_1" ")))" ) class JoinToNonPolyAliasesTest(fixtures.MappedTest, AssertsCompiledSQL): """test joins to an aliased selectable and that we can refer to that aliased selectable in filter criteria. Basically testing that the aliasing Query applies to with_polymorphic targets doesn't leak into non-polymorphic mappers. """ __dialect__ = 'default' run_create_tables = None run_deletes = None @classmethod def define_tables(cls, metadata): Table("parent", metadata, Column('id', Integer, primary_key=True), Column('data', String(50))) Table("child", metadata, Column('id', Integer, primary_key=True), Column('parent_id', Integer, ForeignKey('parent.id')), Column('data', String(50))) @classmethod def setup_mappers(cls): parent, child = cls.tables.parent, cls.tables.child class Parent(cls.Comparable): pass class Child(cls.Comparable): pass mp = mapper(Parent, parent) mapper(Child, child) derived = select([child]).alias() npc = mapper(Child, derived, non_primary=True) cls.npc = npc cls.derived = derived mp.add_property("npc", relationship(npc)) def test_join_parent_child(self): Parent = self.classes.Parent npc = self.npc sess = Session() self.assert_compile( sess.query(Parent).join(Parent.npc) .filter(self.derived.c.data == 'x'), "SELECT parent.id AS parent_id, parent.data AS parent_data " "FROM parent JOIN (SELECT child.id AS id, " "child.parent_id AS parent_id, " "child.data AS data " "FROM child) AS anon_1 ON parent.id = anon_1.parent_id " "WHERE anon_1.data = :data_1") def test_join_parent_child_select_from(self): Parent = self.classes.Parent npc = self.npc sess = Session() self.assert_compile( sess.query(npc).select_from(Parent).join(Parent.npc) .filter(self.derived.c.data == 'x'), "SELECT anon_1.id AS anon_1_id, anon_1.parent_id " "AS anon_1_parent_id, anon_1.data AS anon_1_data " "FROM parent JOIN (SELECT child.id AS id, child.parent_id AS " "parent_id, child.data AS data FROM child) AS anon_1 ON " "parent.id = anon_1.parent_id WHERE anon_1.data = :data_1" ) def test_join_select_parent_child(self): Parent = self.classes.Parent npc = self.npc sess = Session() self.assert_compile( sess.query(Parent, npc).join(Parent.npc) .filter(self.derived.c.data == 'x'), "SELECT parent.id AS parent_id, parent.data AS parent_data, " "anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, " "anon_1.data AS anon_1_data FROM parent JOIN " "(SELECT child.id AS id, child.parent_id AS parent_id, " "child.data AS data FROM child) AS anon_1 ON parent.id = " "anon_1.parent_id WHERE anon_1.data = :data_1" ) class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL): run_setup_mappers = 'once' run_inserts = 'once' run_deletes = None @classmethod def define_tables(cls, metadata): Table('nodes', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('parent_id', Integer, ForeignKey('nodes.id')), Column('data', String(30))) @classmethod def setup_classes(cls): class Node(cls.Comparable): def append(self, node):
@classmethod def setup_mappers(cls): Node, nodes = cls.classes.Node, cls.tables.nodes mapper(Node, nodes, properties={ 'children': relationship(Node, lazy='select', join_depth=3, backref=backref( 'parent', remote_side=[nodes.c.id]) ), }) @classmethod def insert_data(cls): Node = cls.classes.Node sess = create_session() n1 = Node(data='n1') n1.append(Node(data='n11')) n1.append(Node(data='n12')) n1.append(Node(data='n13')) n1.children[1].append(Node(data='n121')) n1.children[1].append(Node(data='n122')) n1.children[1].append(Node(data='n123')) sess.add(n1) sess.flush() sess.close() def test_join_1(self): Node = self.classes.Node sess = create_session() node = sess.query(Node) \ .join('children', aliased=True).filter_by(data='n122').first() assert node.data == 'n12' def test_join_2(self): Node = self.classes.Node sess = create_session() ret = sess.query(Node.data) \ .join(Node.children, aliased=True).filter_by(data='n122').all() assert ret == [('n12',)] def test_join_3(self): Node = self.classes.Node sess = create_session() node = sess.query(Node) \ .join('children', 'children', aliased=True) \ .filter_by(data='n122').first() assert node.data == 'n1' def test_join_4(self): Node = self.classes.Node sess = create_session() node = sess.query(Node) \ .filter_by(data='n122').join('parent', aliased=True) \ .filter_by(data='n12') \ .join('parent', aliased=True, from_joinpoint=True) \ .filter_by(data='n1').first() assert node.data == 'n122' def test_string_or_prop_aliased(self): """test that join('foo') behaves the same as join(Cls.foo) in a self referential scenario. """ Node = self.classes.Node sess = create_session() nalias = aliased(Node, sess.query(Node).filter_by(data='n1').subquery()) q1 = sess.query(nalias).join(nalias.children, aliased=True).\ join(Node.children, from_joinpoint=True) q2 = sess.query(nalias).join(nalias.children, aliased=True).\ join("children", from_joinpoint=True) for q in (q1, q2): self.assert_compile( q, "SELECT anon_1.id AS anon_1_id, anon_1.parent_id AS " "anon_1_parent_id, anon_1.data AS anon_1_data FROM " "(SELECT nodes.id AS id, nodes.parent_id AS parent_id, " "nodes.data AS data FROM nodes WHERE nodes.data = :data_1) " "AS anon_1 JOIN nodes AS nodes_1 ON anon_1.id = " "nodes_1.parent_id JOIN nodes ON nodes_1.id = nodes.parent_id", use_default_dialect=True ) q1 = sess.query(Node).join(nalias.children, aliased=True).\ join(Node.children, aliased=True, from_joinpoint=True).\ join(Node.children, from_joinpoint=True) q2 = sess.query(Node).join(nalias.children, aliased=True).\ join("children", aliased=True, from_joinpoint=True).\ join("children", from_joinpoint=True) for q in (q1, q2): self.assert_compile( q, "SELECT nodes.id AS nodes_id, nodes.parent_id AS " "nodes_parent_id, nodes.data AS nodes_data FROM (SELECT " "nodes.id AS id, nodes.parent_id AS parent_id, nodes.data " "AS data FROM nodes WHERE nodes.data = :data_1) AS anon_1 " "JOIN nodes AS nodes_1 ON anon_1.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id " "JOIN nodes ON nodes_2.id = nodes.parent_id", use_default_dialect=True ) def test_from_self_inside_excludes_outside(self): """test the propagation of aliased() from inside to outside on a from_self().. """ Node = self.classes.Node sess = create_session() n1 = aliased(Node) # n1 is not inside the from_self(), so all cols must be maintained # on the outside self.assert_compile( sess.query(Node).filter(Node.data == 'n122') .from_self(n1, Node.id), "SELECT nodes_1.id AS nodes_1_id, " "nodes_1.parent_id AS nodes_1_parent_id, " "nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id " "FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, " "nodes.parent_id AS nodes_parent_id, " "nodes.data AS nodes_data FROM " "nodes WHERE nodes.data = :data_1) AS anon_1", use_default_dialect=True) parent = aliased(Node) grandparent = aliased(Node) q = sess.query(Node, parent, grandparent).\ join(parent, Node.parent).\ join(grandparent, parent.parent).\ filter(Node.data == 'n122').filter(parent.data == 'n12').\ filter(grandparent.data == 'n1').from_self().limit(1) # parent, grandparent *are* inside the from_self(), so they # should get aliased to the outside. self.assert_compile( q, "SELECT anon_1.nodes_id AS anon_1_nodes_id, " "anon_1.nodes_parent_id AS anon_1_nodes_parent_id, " "anon_1.nodes_data AS anon_1_nodes_data, " "anon_1.nodes_1_id AS anon_1_nodes_1_id, " "anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, " "anon_1.nodes_1_data AS anon_1_nodes_1_data, " "anon_1.nodes_2_id AS anon_1_nodes_2_id, " "anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, " "anon_1.nodes_2_data AS anon_1_nodes_2_data " "FROM (SELECT nodes.id AS nodes_id, nodes.parent_id " "AS nodes_parent_id, nodes.data AS nodes_data, " "nodes_1.id AS nodes_1_id, " "nodes_1.parent_id AS nodes_1_parent_id, " "nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, " "nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS " "nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON " "nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 " "ON nodes_2.id = nodes_1.parent_id " "WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND " "nodes_2.data = :data_3) AS anon_1 LIMIT :param_1", {'param_1': 1}, use_default_dialect=True) def test_explicit_join_1(self): Node = self.classes.Node n1 = aliased(Node) n2 = aliased(Node) self.assert_compile( join(Node, n1, 'children').join(n2, 'children'), "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id", use_default_dialect=True ) def test_explicit_join_2(self): Node = self.classes.Node n1 = aliased(Node) n2 = aliased(Node) self.assert_compile( join(Node, n1, Node.children).join(n2, n1.children), "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id", use_default_dialect=True ) def test_explicit_join_3(self): Node = self.classes.Node n1 = aliased(Node) n2 = aliased(Node) # the join_to_left=False here is unfortunate. the default on this # flag should be False. self.assert_compile( join(Node, n1, Node.children) .join(n2, Node.children, join_to_left=False), "nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id", use_default_dialect=True ) def test_explicit_join_4(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) self.assert_compile( sess.query(Node).join(n1, Node.children).join(n2, n1.children), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, " "nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 " "ON nodes.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id", use_default_dialect=True) def test_explicit_join_5(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) self.assert_compile( sess.query(Node).join(n1, Node.children).join(n2, Node.children), "SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, " "nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 " "ON nodes.id = nodes_1.parent_id " "JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id", use_default_dialect=True) def test_explicit_join_6(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) node = sess.query(Node).select_from(join(Node, n1, 'children')).\ filter(n1.data == 'n122').first() assert node.data == 'n12' def test_explicit_join_7(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) node = sess.query(Node).select_from( join(Node, n1, 'children').join(n2, 'children')).\ filter(n2.data == 'n122').first() assert node.data == 'n1' def test_explicit_join_8(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) # mix explicit and named onclauses node = sess.query(Node).select_from( join(Node, n1, Node.id == n1.parent_id).join(n2, 'children')).\ filter(n2.data == 'n122').first() assert node.data == 'n1' def test_explicit_join_9(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) node = sess.query(Node).select_from( join(Node, n1, 'parent').join(n2, 'parent')).filter( and_(Node.data == 'n122', n1.data == 'n12', n2.data == 'n1')) \ .first() assert node.data == 'n122' def test_explicit_join_10(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) n2 = aliased(Node) eq_( list(sess.query(Node).select_from(join(Node, n1, 'parent') .join(n2, 'parent')). filter(and_(Node.data == 'n122', n1.data == 'n12', n2.data == 'n1')).values(Node.data, n1.data, n2.data)), [('n122', 'n12', 'n1')]) def test_join_to_nonaliased(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) # using 'n1.parent' implicitly joins to unaliased Node eq_(sess.query(n1).join(n1.parent).filter(Node.data == 'n1').all(), [Node(parent_id=1, data='n11', id=2), Node(parent_id=1, data='n12', id=3), Node(parent_id=1, data='n13', id=4)]) # explicit (new syntax) eq_(sess.query(n1).join(Node, n1.parent).filter(Node.data == 'n1').all(), [Node(parent_id=1, data='n11', id=2), Node(parent_id=1, data='n12', id=3), Node(parent_id=1, data='n13', id=4)]) def test_multiple_explicit_entities_one(self): Node = self.classes.Node sess = create_session() parent = aliased(Node) grandparent = aliased(Node) eq_( sess.query(Node, parent, grandparent). join(parent, Node.parent). join(grandparent, parent.parent). filter(Node.data == 'n122').filter(parent.data == 'n12'). filter(grandparent.data == 'n1').first(), (Node(data='n122'), Node(data='n12'), Node(data='n1')) ) def test_multiple_explicit_entities_two(self): Node = self.classes.Node sess = create_session() parent = aliased(Node) grandparent = aliased(Node) eq_( sess.query(Node, parent, grandparent). join(parent, Node.parent). join(grandparent, parent.parent). filter(Node.data == 'n122').filter(parent.data == 'n12'). filter(grandparent.data == 'n1').from_self().first(), (Node(data='n122'), Node(data='n12'), Node(data='n1')) ) def test_multiple_explicit_entities_three(self): Node = self.classes.Node sess = create_session() parent = aliased(Node) grandparent = aliased(Node) # same, change order around eq_( sess.query(parent, grandparent, Node). join(parent, Node.parent). join(grandparent, parent.parent). filter(Node.data == 'n122').filter(parent.data == 'n12'). filter(grandparent.data == 'n1').from_self().first(), (Node(data='n12'), Node(data='n1'), Node(data='n122')) ) def test_multiple_explicit_entities_four(self): Node = self.classes.Node sess = create_session() parent = aliased(Node) grandparent = aliased(Node) eq_( sess.query(Node, parent, grandparent). join(parent, Node.parent). join(grandparent, parent.parent). filter(Node.data == 'n122').filter(parent.data == 'n12'). filter(grandparent.data == 'n1'). options(joinedload(Node.children)).first(), (Node(data='n122'), Node(data='n12'), Node(data='n1')) ) def test_multiple_explicit_entities_five(self): Node = self.classes.Node sess = create_session() parent = aliased(Node) grandparent = aliased(Node) eq_( sess.query(Node, parent, grandparent). join(parent, Node.parent). join(grandparent, parent.parent). filter(Node.data == 'n122').filter(parent.data == 'n12'). filter(grandparent.data == 'n1').from_self(). options(joinedload(Node.children)).first(), (Node(data='n122'), Node(data='n12'), Node(data='n1')) ) def test_any(self): Node = self.classes.Node sess = create_session() eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n1')) .all(), []) eq_(sess.query(Node) .filter(Node.children.any(Node.data == 'n12')).all(), [Node(data='n1')]) eq_(sess.query(Node).filter(~Node.children.any()).order_by(Node.id) .all(), [Node(data='n11'), Node(data='n13'), Node(data='n121'), Node(data='n122'), Node(data='n123'), ]) def test_has(self): Node = self.classes.Node sess = create_session() eq_(sess.query(Node).filter(Node.parent.has(Node.data == 'n12')) .order_by(Node.id).all(), [Node(data='n121'), Node(data='n122'), Node(data='n123')]) eq_(sess.query(Node).filter(Node.parent.has(Node.data == 'n122')) .all(), []) eq_(sess.query(Node).filter( ~Node.parent.has()).all(), [Node(data='n1')]) def test_contains(self): Node = self.classes.Node sess = create_session() n122 = sess.query(Node).filter(Node.data == 'n122').one() eq_(sess.query(Node).filter(Node.children.contains(n122)).all(), [Node(data='n12')]) n13 = sess.query(Node).filter(Node.data == 'n13').one() eq_(sess.query(Node).filter(Node.children.contains(n13)).all(), [Node(data='n1')]) def test_eq_ne(self): Node = self.classes.Node sess = create_session() n12 = sess.query(Node).filter(Node.data == 'n12').one() eq_(sess.query(Node).filter(Node.parent == n12).all(), [Node(data='n121'), Node(data='n122'), Node(data='n123')]) eq_(sess.query(Node).filter(Node.parent != n12).all(), [Node(data='n1'), Node(data='n11'), Node(data='n12'), Node(data='n13')]) class SelfReferentialM2MTest(fixtures.MappedTest): run_setup_mappers = 'once' run_inserts = 'once' run_deletes = None @classmethod def define_tables(cls, metadata): nodes = Table('nodes', metadata, Column('id', Integer, primary_key=True, test_needs_autoincrement=True), Column('data', String(30))) node_to_nodes = Table('node_to_nodes', metadata, Column('left_node_id', Integer, ForeignKey( 'nodes.id'), primary_key=True), Column('right_node_id', Integer, ForeignKey( 'nodes.id'), primary_key=True)) @classmethod def setup_classes(cls): class Node(cls.Comparable): pass @classmethod def insert_data(cls): Node, nodes, node_to_nodes = (cls.classes.Node, cls.tables.nodes, cls.tables.node_to_nodes) mapper(Node, nodes, properties={ 'children': relationship( Node, lazy='select', secondary=node_to_nodes, primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id, secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id) }) sess = create_session() n1 = Node(data='n1') n2 = Node(data='n2') n3 = Node(data='n3') n4 = Node(data='n4') n5 = Node(data='n5') n6 = Node(data='n6') n7 = Node(data='n7') n1.children = [n2, n3, n4] n2.children = [n3, n6, n7] n3.children = [n5, n4] sess.add(n1) sess.add(n2) sess.add(n3) sess.add(n4) sess.flush() sess.close() def test_any(self): Node = self.classes.Node sess = create_session() eq_(sess.query(Node).filter(Node.children.any(Node.data == 'n3')) .order_by(Node.data).all(), [Node(data='n1'), Node(data='n2')]) def test_contains(self): Node = self.classes.Node sess = create_session() n4 = sess.query(Node).filter_by(data='n4').one() eq_(sess.query(Node).filter(Node.children.contains(n4)) .order_by(Node.data).all(), [Node(data='n1'), Node(data='n3')]) eq_(sess.query(Node).filter(not_(Node.children.contains(n4))) .order_by(Node.data).all(), [Node(data='n2'), Node(data='n4'), Node(data='n5'), Node(data='n6'), Node(data='n7')]) def test_explicit_join(self): Node = self.classes.Node sess = create_session() n1 = aliased(Node) eq_(sess.query(Node).select_from(join(Node, n1, 'children')) .filter(n1.data.in_(['n3', 'n7'])).order_by(Node.id).all(), [Node(data='n1'), Node(data='n2')]) class AliasFromCorrectLeftTest( fixtures.DeclarativeMappedTest, AssertsCompiledSQL): run_create_tables = None __dialect__ = 'default' @classmethod def setup_classes(cls): Base = cls.DeclarativeBasic class Object(Base): __tablename__ = 'object' type = Column(String(30)) __mapper_args__ = { 'polymorphic_identity': 'object', 'polymorphic_on': type } id = Column(Integer, primary_key=True) name = Column(String(256)) class A(Object): __tablename__ = 'a' __mapper_args__ = {'polymorphic_identity': 'a'} id = Column(Integer, ForeignKey('object.id'), primary_key=True) b_list = relationship( 'B', secondary='a_b_association', backref='a_list' ) class B(Object): __tablename__ = 'b' __mapper_args__ = {'polymorphic_identity': 'b'} id = Column(Integer, ForeignKey('object.id'), primary_key=True) class ABAssociation(Base): __tablename__ = 'a_b_association' a_id = Column(Integer, ForeignKey('a.id'), primary_key=True) b_id = Column(Integer, ForeignKey('b.id'), primary_key=True) class X(Base): __tablename__ = 'x' id = Column(Integer, primary_key=True) name = Column(String(30)) obj_id = Column(Integer, ForeignKey('object.id')) obj = relationship('Object', backref='x_list') def test_join_prop_to_string(self): A, B, X = self.classes("A", "B", "X") s = Session() q = s.query(B).\ join(B.a_list, 'x_list').filter(X.name == 'x1') self.assert_compile( q, "SELECT object.type AS object_type, b.id AS b_id, " "object.id AS object_id, object.name AS object_name " "FROM object JOIN b ON object.id = b.id " "JOIN a_b_association AS a_b_association_1 " "ON b.id = a_b_association_1.b_id " "JOIN (" "object AS object_1 " "JOIN a AS a_1 ON object_1.id = a_1.id" ") ON a_1.id = a_b_association_1.a_id " "JOIN x ON object_1.id = x.obj_id WHERE x.name = :name_1" ) def test_join_prop_to_prop(self): A, B, X = self.classes("A", "B", "X") s = Session() # B -> A, but both are Object. So when we say A.x_list, make sure # we pick the correct right side q = s.query(B).\ join(B.a_list, A.x_list).filter(X.name == 'x1') self.assert_compile( q, "SELECT object.type AS object_type, b.id AS b_id, " "object.id AS object_id, object.name AS object_name " "FROM object JOIN b ON object.id = b.id " "JOIN a_b_association AS a_b_association_1 " "ON b.id = a_b_association_1.b_id " "JOIN (" "object AS object_1 " "JOIN a AS a_1 ON object_1.id = a_1.id" ") ON a_1.id = a_b_association_1.a_id " "JOIN x ON object_1.id = x.obj_id WHERE x.name = :name_1" ) class JoinLateralTest(fixtures.MappedTest, AssertsCompiledSQL): __dialect__ = default.DefaultDialect(supports_native_boolean=True) run_setup_bind = None run_setup_mappers = 'once' run_create_tables = None @classmethod def define_tables(cls, metadata): Table('people', metadata, Column('people_id', Integer, primary_key=True), Column('age', Integer), Column('name', String(30))) Table('bookcases', metadata, Column('bookcase_id', Integer, primary_key=True), Column( 'bookcase_owner_id', Integer, ForeignKey('people.people_id')), Column('bookcase_shelves', Integer), Column('bookcase_width', Integer)) Table('books', metadata, Column('book_id', Integer, primary_key=True), Column( 'bookcase_id', Integer, ForeignKey('bookcases.bookcase_id')), Column('book_owner_id', Integer, ForeignKey('people.people_id')), Column('book_weight', Integer)) @classmethod def setup_classes(cls): people, bookcases, books = cls.tables('people', 'bookcases', 'books') class Person(cls.Comparable): pass class Bookcase(cls.Comparable): pass class Book(cls.Comparable): pass mapper(Person, people) mapper(Bookcase, bookcases, properties={ 'owner': relationship(Person), 'books': relationship(Book) }) mapper(Book, books) def test_select_subquery(self): Person, Book = self.classes("Person", "Book") s = Session() subq = s.query(Book.book_id).correlate(Person).filter( Person.people_id == Book.book_owner_id ).subquery().lateral() stmt = s.query(Person, subq.c.book_id).join( subq, true() ) self.assert_compile( stmt, "SELECT people.people_id AS people_people_id, " "people.age AS people_age, people.name AS people_name, " "anon_1.book_id AS anon_1_book_id " "FROM people JOIN LATERAL " "(SELECT books.book_id AS book_id FROM books " "WHERE people.people_id = books.book_owner_id) AS anon_1 ON true" ) # sef == select_entity_from def test_select_subquery_sef_implicit_correlate(self): Person, Book = self.classes("Person", "Book") s = Session() stmt = s.query(Person).subquery() subq = s.query(Book.book_id).filter( Person.people_id == Book.book_owner_id ).subquery().lateral() stmt = s.query(Person, subq.c.book_id).select_entity_from(stmt).join( subq, true() ) self.assert_compile( stmt, "SELECT anon_1.people_id AS anon_1_people_id, " "anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, " "anon_2.book_id AS anon_2_book_id " "FROM " "(SELECT people.people_id AS people_id, people.age AS age, " "people.name AS name FROM people) AS anon_1 " "JOIN LATERAL " "(SELECT books.book_id AS book_id FROM books " "WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true" ) def test_select_subquery_sef_implicit_correlate_coreonly(self): Person, Book = self.classes("Person", "Book") s = Session() stmt = s.query(Person).subquery() subq = select([Book.book_id]).where( Person.people_id == Book.book_owner_id ).lateral() stmt = s.query(Person, subq.c.book_id).select_entity_from(stmt).join( subq, true() ) self.assert_compile( stmt, "SELECT anon_1.people_id AS anon_1_people_id, " "anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, " "anon_2.book_id AS anon_2_book_id " "FROM " "(SELECT people.people_id AS people_id, people.age AS age, " "people.name AS name FROM people) AS anon_1 " "JOIN LATERAL " "(SELECT books.book_id AS book_id FROM books " "WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true" ) def test_select_subquery_sef_explicit_correlate_coreonly(self): Person, Book = self.classes("Person", "Book") s = Session() stmt = s.query(Person).subquery() subq = select([Book.book_id]).correlate(Person).where( Person.people_id == Book.book_owner_id ).lateral() stmt = s.query(Person, subq.c.book_id).select_entity_from(stmt).join( subq, true() ) self.assert_compile( stmt, "SELECT anon_1.people_id AS anon_1_people_id, " "anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, " "anon_2.book_id AS anon_2_book_id " "FROM " "(SELECT people.people_id AS people_id, people.age AS age, " "people.name AS name FROM people) AS anon_1 " "JOIN LATERAL " "(SELECT books.book_id AS book_id FROM books " "WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true" ) def test_select_subquery_sef_explicit_correlate(self): Person, Book = self.classes("Person", "Book") s = Session() stmt = s.query(Person).subquery() subq = s.query(Book.book_id).correlate(Person).filter( Person.people_id == Book.book_owner_id ).subquery().lateral() stmt = s.query(Person, subq.c.book_id).select_entity_from(stmt).join( subq, true() ) self.assert_compile( stmt, "SELECT anon_1.people_id AS anon_1_people_id, " "anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, " "anon_2.book_id AS anon_2_book_id " "FROM " "(SELECT people.people_id AS people_id, people.age AS age, " "people.name AS name FROM people) AS anon_1 " "JOIN LATERAL " "(SELECT books.book_id AS book_id FROM books " "WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true" ) def test_from_function(self): Bookcase = self.classes.Bookcase s = Session() srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves)) self.assert_compile( s.query(Bookcase).join(srf, true()), "SELECT bookcases.bookcase_id AS bookcases_bookcase_id, " "bookcases.bookcase_owner_id AS bookcases_bookcase_owner_id, " "bookcases.bookcase_shelves AS bookcases_bookcase_shelves, " "bookcases.bookcase_width AS bookcases_bookcase_width " "FROM bookcases JOIN " "LATERAL generate_series(:generate_series_1, " "bookcases.bookcase_shelves) AS anon_1 ON true" ) def test_from_function_select_entity_from(self): Bookcase = self.classes.Bookcase s = Session() subq = s.query(Bookcase).subquery() srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves)) self.assert_compile( s.query(Bookcase).select_entity_from(subq).join(srf, true()), "SELECT anon_1.bookcase_id AS anon_1_bookcase_id, " "anon_1.bookcase_owner_id AS anon_1_bookcase_owner_id, " "anon_1.bookcase_shelves AS anon_1_bookcase_shelves, " "anon_1.bookcase_width AS anon_1_bookcase_width " "FROM (SELECT bookcases.bookcase_id AS bookcase_id, " "bookcases.bookcase_owner_id AS bookcase_owner_id, " "bookcases.bookcase_shelves AS bookcase_shelves, " "bookcases.bookcase_width AS bookcase_width FROM bookcases) " "AS anon_1 " "JOIN LATERAL " "generate_series(:generate_series_1, anon_1.bookcase_shelves) " "AS anon_2 ON true" )
self.children.append(node)
time.ts
export const getTodayFirstTimestamp = function(): Date { let date = new Date(); date.setHours(0); date.setMinutes(0); date.setSeconds(0); return date;
date.setHours(23); date.setMinutes(59); date.setSeconds(59); return date; };
}; export const getTodayLastTimestamp = function(): Date { let date = new Date();
javascript1.js
let step = 'step1'; const step1 = document.getElementById('step1'); const step2 = document.getElementById('step2'); const step3 = document.getElementById('step3'); const step4 = document.getElementById('step4'); if (step === 'step1') { step = 'step2'; console.log(step1); step1.classList.remove("is-active2"); step1.classList.add("is-complete2"); step2.classList.add("is-active2"); } else if (step === 'step2') {
step = 'step3'; step2.classList.remove("is-active2"); step2.classList.add("is-complete2"); step3.classList.add("is-active2"); } else if (step === 'step3') { step = 'step4d'; step3.classList.remove("is-active2"); step3.classList.add("is-complete2"); step4.classList.add("is-active2"); } else if (step === 'step4d') { step = 'complete'; step4.classList.remove("is-active2"); step4.classList.add("is-complete2"); } else if (step === 'complete') { step = 'step1'; step4.classList.remove("is-complete2"); step3.classList.remove("is-complete2"); step2.classList.remove("is-complete2"); step1.classList.remove("is-complete2"); step1.classList.add("is-active2"); }
fs_test.go
package fs_test import ( "testing" . "github.com/alexdreptu/sysinfo/fs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "golang.org/x/sys/unix" ) // test values const ( bsize int64 = 4096 blocks uint64 = 14261319 bavail uint64 = 1313231 bfree uint64 = 2618397 ) const ( totalSpaceInKibibytes float64 = 57045276.000000 totalSpaceInMebibytes float64 = 55708.277344 totalSpaceInGibibytes float64 = 54.402615 freeSpaceInKibibytes float64 = 5252924.000000 freeSpaceInMebibytes float64 = 5129.808594 freeSpaceInGibibytes float64 = 5.009579 usedSpaceInKibibytes float64 = 46571688.000000 usedSpaceInMebibytes float64 = 45480.164062 usedSpaceInGibibytes float64 = 44.414223 ) const delta = 0.01 // mock function func statfs(path string, buf *unix.Statfs_t) error {
buf.Blocks = blocks buf.Bavail = bavail buf.Bfree = bfree return nil } func TestFSNew(t *testing.T) { _, err := New("") require.Error(t, err) _, err = New("/") require.NoError(t, err) } func TestFS(t *testing.T) { fs := &FS{Path: ""} require.Error(t, fs.Fetch()) fs = &FS{Path: "/"} fs.F = statfs require.NoError(t, fs.Fetch()) assert.InDelta(t, totalSpaceInKibibytes, fs.TotalSpaceInKibibytes(), delta) assert.InDelta(t, totalSpaceInMebibytes, fs.TotalSpaceInMebibytes(), delta) assert.InDelta(t, totalSpaceInGibibytes, fs.TotalSpaceInGibibytes(), delta) assert.InDelta(t, freeSpaceInKibibytes, fs.FreeSpaceInKibibytes(), delta) assert.InDelta(t, freeSpaceInMebibytes, fs.FreeSpaceInMebibytes(), delta) assert.InDelta(t, freeSpaceInGibibytes, fs.FreeSpaceInGibibytes(), delta) assert.InDelta(t, usedSpaceInKibibytes, fs.UsedSpaceInKibibytes(), delta) assert.InDelta(t, usedSpaceInMebibytes, fs.UsedSpaceInMebibytes(), delta) assert.InDelta(t, usedSpaceInGibibytes, fs.UsedSpaceInGibibytes(), delta) }
buf.Bsize = bsize
settings.rs
use std::str::FromStr; use clap::ArgMatches; use config::{Config, ConfigError, File, FileFormat, Source}; use crossbeam::channel::{bounded, unbounded, Receiver, Sender}; use serde::Deserialize; use tantivy::merge_policy::*; pub const VERSION: &str = env!("CARGO_PKG_VERSION"); pub const HEADER: &str = r#" ______ __ _ ____ __ /_ __/__ ___ / / (_) / __/__ ___ _________/ / / / / _ \(_-</ _ \/ / _\ \/ -_) _ `/ __/ __/ _ \ /_/ \___/___/_//_/_/ /___/\__/\_,_/_/ \__/_//_/ Such Relevance, Much Index, Many Search, Wow "#; pub const RPC_HEADER: &str = r#" ______ __ _ ___ ___ _____ /_ __/__ ___ / / (_) / _ \/ _ \/ ___/ / / / _ \(_-</ _ \/ / / , _/ ___/ /__ /_/ \___/___/_//_/_/ /_/|_/_/ \___/ Such coordination, Much consensus, Many RPC, Wow "#; #[derive(PartialEq)] pub enum MergePolicyType { Log, NoMerge, } #[derive(Deserialize, Clone, Debug)] pub struct ConfigMergePolicy { kind: String, min_merge_size: Option<usize>, min_layer_size: Option<u32>, level_log_size: Option<f64>, } impl ConfigMergePolicy { pub fn get_kind(&self) -> MergePolicyType { match self.kind.to_ascii_lowercase().as_ref() { "log" => MergePolicyType::Log, "nomerge" => MergePolicyType::NoMerge, _ => panic!("Unknown Merge Typed Defined"), } } } #[derive(Deserialize, Clone, Debug)] pub struct Experimental { #[serde(default = "Settings::default_consul_addr")] pub consul_addr: String, #[serde(default = "Settings::default_cluster_name")] pub cluster_name: String, #[serde(default = "Settings::default_master")] pub master: bool, #[serde(default = "Settings::default_nodes")] pub nodes: Vec<String>, } impl Default for Experimental { fn default() -> Self { Self { consul_addr: Settings::default_consul_addr(), cluster_name: Settings::default_cluster_name(), master: Settings::default_master(), nodes: Settings::default_nodes(), } } } #[derive(Deserialize, Clone, Debug)] pub struct Settings { #[serde(default = "Settings::default_host")] pub host: String, #[serde(default = "Settings::default_port")] pub port: u16, #[serde(default = "Settings::default_path")] pub path: String, #[serde(default = "Settings::default_place_addr")] pub place_addr: String, #[serde(default = "Settings::default_level")] pub log_level: String, #[serde(default = "Settings::default_writer_memory")] pub writer_memory: usize, #[serde(default = "Settings::default_json_parsing_threads")] pub json_parsing_threads: usize, #[serde(default = "Settings::default_auto_commit_duration")] pub auto_commit_duration: u64, #[serde(default = "Settings::default_bulk_buffer_size")] pub bulk_buffer_size: usize, #[serde(default = "Settings::default_merge_policy")] pub merge_policy: ConfigMergePolicy, #[serde(default = "Settings::default_experimental")] pub experimental: bool, #[serde(default = "Experimental::default")] pub experimental_features: Experimental, } impl Default for Settings { fn default() -> Self { Self { host: Settings::default_host(), port: Settings::default_port(), path: Settings::default_path(), place_addr: Settings::default_place_addr(), log_level: Settings::default_level(), writer_memory: Settings::default_writer_memory(), json_parsing_threads: Settings::default_json_parsing_threads(), auto_commit_duration: Settings::default_auto_commit_duration(), bulk_buffer_size: Settings::default_bulk_buffer_size(), merge_policy: Settings::default_merge_policy(), experimental: Settings::default_experimental(), experimental_features: Experimental::default(), } } } impl FromStr for Settings { type Err = ConfigError; fn from_str(cfg: &str) -> Result<Self, ConfigError> { Self::from_config(File::from_str(cfg, FileFormat::Toml)) } } impl Settings { pub fn new(path: &str) -> Result<Self, ConfigError> { Self::from_config(File::with_name(path)) } pub fn from_args(args: &ArgMatches) -> Self { let exper = Experimental { consul_addr: args.value_of("consul-addr").unwrap().to_string(), cluster_name: args.value_of("cluster-name").unwrap().to_string(), master: args.value_of("master").unwrap().parse().unwrap(), nodes: args.values_of("nodes").unwrap().map(ToString::to_string).collect(), }; Self { host: args.value_of("host").unwrap().to_string(), port: args.value_of("port").unwrap().parse().expect("Invalid port given."), path: args.value_of("path").unwrap().to_string(), log_level: args.value_of("level").unwrap().to_string(), experimental: args.is_present("experimental"), experimental_features: exper, ..Default::default() } } pub fn from_config<T: Source + Send + Sync + 'static>(c: T) -> Result<Self, ConfigError> { let mut cfg = Config::new(); match cfg.merge(c) { Ok(_) => {} Err(e) => panic!("Problem with config file: {}", e), }; cfg.try_into() } pub fn default_pretty() -> bool { false } pub fn default_result_limit() -> usize { 100 } pub fn default_host() -> String { "0.0.0.0".to_string() } pub fn default_path() -> String { "data/".to_string() } pub fn default_port() -> u16 { 8080 } pub fn default_place_addr() -> String { "0.0.0.0:8082".to_string() } pub fn default_level() -> String { "info".to_string() } pub fn default_writer_memory() -> usize { 200_000_000 } pub fn default_json_parsing_threads() -> usize { 4 } pub fn default_bulk_buffer_size() -> usize { 10000 } pub fn default_auto_commit_duration() -> u64 { 10 } pub fn default_merge_policy() -> ConfigMergePolicy { ConfigMergePolicy { kind: "log".to_string(), min_merge_size: None, min_layer_size: None, level_log_size: None, } } pub fn default_consul_addr() -> String { "127.0.0.1:8500".to_string() } pub fn default_cluster_name() -> String { "kitsune".to_string() } pub fn default_master() -> bool { false } pub fn default_nodes() -> Vec<String> { Vec::new() } pub fn default_experimental() -> bool { false } pub fn get_channel<T>(&self) -> (Sender<T>, Receiver<T>) { if self.bulk_buffer_size == 0 { unbounded::<T>() } else { bounded::<T>(self.bulk_buffer_size) } } pub fn get_nodes(&self) -> Vec<String> { self.experimental_features.nodes.clone() } pub fn get_merge_policy(&self) -> Box<MergePolicy> { match self.merge_policy.get_kind() { MergePolicyType::Log => { let mut mp = LogMergePolicy::default(); if let Some(v) = self.merge_policy.level_log_size { mp.set_level_log_size(v); } if let Some(v) = self.merge_policy.min_layer_size { mp.set_min_layer_size(v); } if let Some(v) = self.merge_policy.min_merge_size { mp.set_min_merge_size(v); } Box::new(mp) } MergePolicyType::NoMerge => Box::new(NoMergePolicy::default()), } } } #[cfg(test)] mod tests { use super::*; #[test] fn valid_default_config() { let default = Settings::from_str("").unwrap(); assert_eq!(default.host, "0.0.0.0"); assert_eq!(default.port, 8080); assert_eq!(default.path, "data/"); assert_eq!(default.writer_memory, 200_000_000); assert_eq!(default.log_level, "info"); assert_eq!(default.json_parsing_threads, 4); assert_eq!(default.bulk_buffer_size, 10000); assert_eq!(default.merge_policy.kind, "log"); assert_eq!(default.merge_policy.level_log_size, None); assert_eq!(default.merge_policy.min_layer_size, None); assert_eq!(default.merge_policy.min_merge_size, None); assert_eq!(default.experimental, false); assert_eq!(default.experimental_features.master, false); } #[test] fn valid_merge_policy() { let cfg = r#" [merge_policy] kind = "log" level_log_size = 10.5 min_layer_size = 20 min_merge_size = 30"#; let config = Settings::from_str(cfg).unwrap(); assert_eq!(config.merge_policy.level_log_size.unwrap(), 10.5); assert_eq!(config.merge_policy.min_layer_size.unwrap(), 20); assert_eq!(config.merge_policy.min_merge_size.unwrap(), 30); } #[test] fn valid_no_merge_policy() { let cfg = r#" [merge_policy] kind = "nomerge""#; let config = Settings::from_str(cfg).unwrap(); assert!(config.merge_policy.get_kind() == MergePolicyType::NoMerge); assert_eq!(config.merge_policy.kind, "nomerge"); assert_eq!(config.merge_policy.level_log_size, None); assert_eq!(config.merge_policy.min_layer_size, None); assert_eq!(config.merge_policy.min_merge_size, None); } #[test] #[should_panic] fn
() { Settings::new("asdf/casdf").unwrap(); } #[test] #[should_panic] fn bad_merge_type() { let cfg = r#" [merge_policy] kind = "asdf1234""#; let config = Settings::from_str(cfg).unwrap(); config.get_merge_policy(); } }
bad_config_file
lock_helper.go
package util import ( "fmt" "github.com/golang/glog" goutil "github.com/turbonomic/kubeturbo/pkg/util" "time" ) // a lock for bare pods to avoid concurrent contention of actions on the same pod. // detail of its purpose can be found: https://github.com/turbonomic/kubeturbo/issues/104 type LockHelper struct { //for the expirationMap emap *ExpirationMap key string version int64 callback expireCallBack //stop Renewing stop chan struct{} isRenewing bool } func
(podkey string, emap *ExpirationMap) (*LockHelper, error) { p := &LockHelper{ key: podkey, emap: emap, stop: make(chan struct{}), isRenewing: false, } if emap.GetTTL() < time.Second*2 { err := fmt.Errorf("TTL of concurrent control map should be larger than 2 seconds.") glog.Error(err) return nil, err } return p, nil } func (h *LockHelper) Setkey(key string) { h.key = key return } // the input of this callback will be h.key. func (h *LockHelper) AcquireLock(callback expireCallBack) bool { if callback == nil { callback = func(obj interface{}) { h.lockCallBack() } } version, flag := h.emap.Add(h.key, h.key, callback) if !flag { glog.V(3).Infof("Failed to get lock for [%s]", h.key) return false } glog.V(4).Infof("Get lock for [%s]", h.key) h.version = version return true } func (h *LockHelper) Trylock(timeout, interval time.Duration) error { err := goutil.RetryDuring(1000, timeout, interval, func() error { if !h.AcquireLock(nil) { return fmt.Errorf("TryLater") } return nil }) if err != nil { return err } return nil } func (h *LockHelper) ReleaseLock() { h.emap.Del(h.key, h.version) h.StopRenew() glog.V(4).Infof("Released lock for [%s]", h.key) } func (h *LockHelper) lockCallBack() { // do nothing return } func (h *LockHelper) RenewLock() bool { return h.emap.Touch(h.key, h.version) } func (h *LockHelper) KeepRenewLock() { ttl := h.emap.GetTTL() interval := ttl / 2 if interval < time.Second { interval = time.Second } h.isRenewing = true go func() { for { select { case <-h.stop: glog.V(3).Infof("schedulerHelper stop renewlock.") return default: if !h.RenewLock() { return } time.Sleep(interval) glog.V(4).Infof("schedulerHelper renewlock.") } } }() } func (h *LockHelper) StopRenew() { if h.isRenewing { h.isRenewing = false close(h.stop) } }
NewLockHelper
norace_test.go
// +build !race /* * Copyright 2021 ByteDance Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package decoder import ( `testing` `unsafe` `runtime` `github.com/bytedance/sonic/internal/rt` ) var referred = false
func TestStringReferring(t *testing.T) { str := []byte(`{"A":"0","B":"1"}`) sp := *(**byte)(unsafe.Pointer(&str)) println("malloc *byte ", sp) runtime.SetFinalizer(sp, func(sp *byte){ referred = false println("*byte ", sp, " got free") }) runtime.GC() println("first GC") var obj struct{ A string B string } dc := NewDecoder(rt.Mem2Str(str)) dc.CopyString() referred = true if err := dc.Decode(&obj); err != nil { t.Fatal(err) } runtime.GC() println("second GC") if referred { t.Fatal("*byte is being referred") } str2 := []byte(`{"A":"0","B":"1"}`) sp2 := *(**byte)(unsafe.Pointer(&str2)) println("malloc *byte ", sp2) runtime.SetFinalizer(sp2, func(sp *byte){ referred = false println("*byte ", sp, " got free") }) runtime.GC() println("first GC") var obj2 interface{} dc2 := NewDecoder(rt.Mem2Str(str2)) dc2.UseNumber() dc2.CopyString() referred = true if err := dc2.Decode(&obj2); err != nil { t.Fatal(err) } runtime.GC() println("second GC") if referred { t.Fatal("*byte is being referred") } runtime.KeepAlive(&obj) runtime.KeepAlive(&obj2) } func TestDecoderErrorStackOverflower(t *testing.T) { src := `{"a":[]}` N := _MaxStack for i:=0; i<N; i++ { var obj map[string]string err := NewDecoder(src).Decode(&obj) if err == nil { t.Fatal(err) } } }
rintf32.rs
// Copyright Kani Contributors // SPDX-License-Identifier: Apache-2.0 OR MIT // Checks that `rintf32` returns the nearest integer to the argument. The // default rounding mode is rounding half to even, which is described here: // https://en.wikipedia.org/wiki/Rounding#Round_half_to_even // // `rintf32` works like `nearbyintf32`, but it may raise an inexact // floating-point exception which isn't supported in Rust: // https://github.com/rust-lang/rust/issues/10186 // So in practice, `rintf32` and `nearbyintf32` work the same way. #![feature(core_intrinsics)] use std::intrinsics::rintf32; #[kani::proof] fn test_one() { let one = 1.0; let result = unsafe { rintf32(one) }; assert!(result == 1.0); } #[kani::proof] fn test_one_frac() { let one_frac = 1.9; let result = unsafe { rintf32(one_frac) }; assert!(result == 2.0); } #[kani::proof] fn test_half_down()
#[kani::proof] fn test_half_up() { let one_frac = 3.5; let result = unsafe { rintf32(one_frac) }; assert!(result == 4.0); } #[kani::proof] fn test_conc() { let conc = -42.6; let result = unsafe { rintf32(conc) }; assert!(result == -43.0); } #[kani::proof] fn test_conc_sci() { let conc = 5.4e-2; let result = unsafe { rintf32(conc) }; assert!(result == 0.0); } #[kani::proof] fn test_towards_nearest() { let x: f32 = kani::any(); kani::assume(!x.is_nan()); kani::assume(!x.is_infinite()); let result = unsafe { rintf32(x) }; let frac = x.fract().abs(); if x.is_sign_positive() { if frac > 0.5 { assert!(result > x); } else if frac < 0.5 { assert!(result <= x); } else { // This would fail if conversion checks were on let integer = x as i64; if integer % 2 == 0 { assert!(result < x); } else { assert!(result > x); } } } else { if frac > 0.5 { assert!(result < x); } else if frac < 0.5 { assert!(result >= x); } else { // This would fail if conversion checks were on let integer = x as i64; if integer % 2 == 0 { assert!(result > x); } else { assert!(result < x); } } } } #[kani::proof] fn test_diff_half_one() { let x: f32 = kani::any(); kani::assume(!x.is_nan()); kani::assume(!x.is_infinite()); let result = unsafe { rintf32(x) }; let diff = (x - result).abs(); assert!(diff <= 0.5); assert!(diff >= 0.0); }
{ let one_frac = 2.5; let result = unsafe { rintf32(one_frac) }; assert!(result == 2.0); }
clusterclient.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package clusterclient import ( "fmt" "io/ioutil" "os" "os/exec" "strings" "time" apiv1 "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" tcmd "k8s.io/client-go/tools/clientcmd" "sigs.k8s.io/cluster-api/cmd/clusterctl/clientcmd" clusterv1 "sigs.k8s.io/cluster-api/pkg/apis/cluster/v1alpha1" "sigs.k8s.io/cluster-api/pkg/client/clientset_generated/clientset" "sigs.k8s.io/cluster-api/pkg/util" "github.com/golang/glog" ) const ( apiServerPort = 443 retryIntervalKubectlApply = 10 * time.Second retryIntervalResourceReady = 10 * time.Second retryIntervalResourceDelete = 10 * time.Second timeoutKubectlApply = 15 * time.Minute timeoutResourceReady = 15 * time.Minute timeoutMachineReady = 30 * time.Minute timeoutResourceDelete = 15 * time.Minute ) // Provides interaction with a cluster type Client interface { GetContextNamespace() string Apply(string) error Delete(string) error WaitForClusterV1alpha1Ready() error GetClusterObjectsInNamespace(string) ([]*clusterv1.Cluster, error) GetClusterObject(string, string) (*clusterv1.Cluster, error) GetMachineDeploymentObjects() ([]*clusterv1.MachineDeployment, error) GetMachineDeploymentObjectsInNamespace(string) ([]*clusterv1.MachineDeployment, error) GetMachineSetObjects() ([]*clusterv1.MachineSet, error) GetMachineSetObjectsInNamespace(string) ([]*clusterv1.MachineSet, error) GetMachineObjects() ([]*clusterv1.Machine, error) GetMachineObjectsInNamespace(ns string) ([]*clusterv1.Machine, error) CreateClusterObject(*clusterv1.Cluster) error CreateMachineDeploymentObjects([]*clusterv1.MachineDeployment, string) error CreateMachineSetObjects([]*clusterv1.MachineSet, string) error CreateMachineObjects([]*clusterv1.Machine, string) error DeleteClusterObjectsInNamespace(string) error DeleteClusterObjects() error DeleteMachineDeploymentObjectsInNamespace(string) error DeleteMachineDeploymentObjects() error DeleteMachineSetObjectsInNamespace(string) error DeleteMachineSetObjects() error DeleteMachineObjectsInNamespace(string) error DeleteMachineObjects() error UpdateClusterObjectEndpoint(string, string, string) error EnsureNamespace(string) error DeleteNamespace(string) error Close() error } type client struct { clientSet clientset.Interface kubeconfigFile string configOverrides tcmd.ConfigOverrides closeFn func() error } // New creates and returns a Client, the kubeconfig argument is expected to be the string represenation // of a valid kubeconfig. func New(kubeconfig string) (*client, error) { f, err := createTempFile(kubeconfig) if err != nil { return nil, err } defer ifErrRemove(&err, f) c, err := NewFromDefaultSearchPath(f, clientcmd.NewConfigOverrides()) if err != nil { return nil, err } c.closeFn = c.removeKubeconfigFile return c, nil } func (c *client) removeKubeconfigFile() error { return os.Remove(c.kubeconfigFile) } func (c *client) EnsureNamespace(namespaceName string) error { clientset, err := clientcmd.NewCoreClientSetForDefaultSearchPath(c.kubeconfigFile, clientcmd.NewConfigOverrides()) if err != nil { return fmt.Errorf("error creating core clientset: %v", err) } namespace := apiv1.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: namespaceName, }, } _, err = clientset.CoreV1().Namespaces().Create(&namespace) if err != nil && !apierrors.IsAlreadyExists(err) { return err } return nil } func (c *client) DeleteNamespace(namespaceName string) error { if namespaceName == apiv1.NamespaceDefault { return nil } clientset, err := clientcmd.NewCoreClientSetForDefaultSearchPath(c.kubeconfigFile, clientcmd.NewConfigOverrides()) if err != nil { return fmt.Errorf("error creating core clientset: %v", err) } err = clientset.CoreV1().Namespaces().Delete(namespaceName, &metav1.DeleteOptions{}) if err != nil && !apierrors.IsNotFound(err) { return err } return nil } // NewFromDefaultSearchPath creates and returns a Client. The kubeconfigFile argument is expected to be the path to a // valid kubeconfig file. func NewFromDefaultSearchPath(kubeconfigFile string, overrides tcmd.ConfigOverrides) (*client, error) { c, err := clientcmd.NewClusterApiClientForDefaultSearchPath(kubeconfigFile, overrides) if err != nil { return nil, err } return &client{ kubeconfigFile: kubeconfigFile, clientSet: c, configOverrides: overrides, }, nil } // Close frees resources associated with the cluster client func (c *client) Close() error { if c.closeFn != nil { return c.closeFn() } return nil } func (c *client) Delete(manifest string) error { return c.kubectlDelete(manifest) } func (c *client) Apply(manifest string) error { return c.waitForKubectlApply(manifest) } func (c *client) GetContextNamespace() string { if c.configOverrides.Context.Namespace == "" { return apiv1.NamespaceDefault } return c.configOverrides.Context.Namespace } func (c *client) GetClusterObject(name, ns string) (*clusterv1.Cluster, error) { clustersInNamespace, err := c.GetClusterObjectsInNamespace(ns) if err != nil { return nil, err } var cluster *clusterv1.Cluster for _, nc := range clustersInNamespace { if nc.Name == name { cluster = nc break } } return cluster, nil } func (c *client) GetClusterObjectsInNamespace(namespace string) ([]*clusterv1.Cluster, error) { clusters := []*clusterv1.Cluster{} clusterlist, err := c.clientSet.ClusterV1alpha1().Clusters(namespace).List(metav1.ListOptions{}) if err != nil { return nil, fmt.Errorf("error listing cluster objects in namespace %q: %v", namespace, err) } for i := 0; i < len(clusterlist.Items); i++ { clusters = append(clusters, &clusterlist.Items[i]) } return clusters, nil } func (c *client) GetMachineDeploymentObjectsInNamespace(namespace string) ([]*clusterv1.MachineDeployment, error) { machineDeploymentList, err := c.clientSet.ClusterV1alpha1().MachineDeployments(namespace).List(metav1.ListOptions{}) if err != nil { return nil, fmt.Errorf("error listing machine deployment objects in namespace %q: %v", namespace, err) } var machineDeployments []*clusterv1.MachineDeployment for i := 0; i < len(machineDeploymentList.Items); i++ { machineDeployments = append(machineDeployments, &machineDeploymentList.Items[i]) } return machineDeployments, nil } // Deprecated API. Please do not extend or use. func (c *client) GetMachineDeploymentObjects() ([]*clusterv1.MachineDeployment, error) { glog.V(2).Info("GetMachineDeploymentObjects API is deprecated, use GetMachineDeploymentObjectsInNamespace instead") return c.GetMachineDeploymentObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) GetMachineSetObjectsInNamespace(namespace string) ([]*clusterv1.MachineSet, error) { machineSetList, err := c.clientSet.ClusterV1alpha1().MachineSets(namespace).List(metav1.ListOptions{}) if err != nil { return nil, fmt.Errorf("error listing machine set objects in namespace %q: %v", namespace, err) } var machineSets []*clusterv1.MachineSet for i := 0; i < len(machineSetList.Items); i++ { machineSets = append(machineSets, &machineSetList.Items[i]) } return machineSets, nil } // Deprecated API. Please do not extend or use. func (c *client) GetMachineSetObjects() ([]*clusterv1.MachineSet, error) { glog.V(2).Info("GetMachineSetObjects API is deprecated, use GetMachineSetObjectsInNamespace instead") return c.GetMachineSetObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) GetMachineObjectsInNamespace(namespace string) ([]*clusterv1.Machine, error) { machines := []*clusterv1.Machine{} machineslist, err := c.clientSet.ClusterV1alpha1().Machines(namespace).List(metav1.ListOptions{}) if err != nil { return nil, fmt.Errorf("error listing machine objects in namespace %q: %v", namespace, err) } for i := 0; i < len(machineslist.Items); i++ { machines = append(machines, &machineslist.Items[i]) } return machines, nil } // Deprecated API. Please do not extend or use. func (c *client) GetMachineObjects() ([]*clusterv1.Machine, error) { glog.V(2).Info("GetMachineObjects API is deprecated, use GetMachineObjectsInNamespace instead") return c.GetMachineObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) CreateClusterObject(cluster *clusterv1.Cluster) error { namespace := c.GetContextNamespace() if cluster.Namespace != "" { namespace = cluster.Namespace } _, err := c.clientSet.ClusterV1alpha1().Clusters(namespace).Create(cluster) if err != nil { return fmt.Errorf("error creating cluster in namespace %v: %v", namespace, err) } return err } func (c *client) CreateMachineDeploymentObjects(deployments []*clusterv1.MachineDeployment, namespace string) error { for _, deploy := range deployments { // TODO: Run in parallel https://github.com/kubernetes-sigs/cluster-api/issues/258 _, err := c.clientSet.ClusterV1alpha1().MachineDeployments(namespace).Create(deploy) if err != nil { return fmt.Errorf("error creating a machine deployment object in namespace %q: %v", namespace, err) } } return nil } func (c *client) CreateMachineSetObjects(machineSets []*clusterv1.MachineSet, namespace string) error { for _, ms := range machineSets { // TODO: Run in parallel https://github.com/kubernetes-sigs/cluster-api/issues/258 _, err := c.clientSet.ClusterV1alpha1().MachineSets(namespace).Create(ms) if err != nil { return fmt.Errorf("error creating a machine set object in namespace %q: %v", namespace, err) } } return nil } func (c *client) CreateMachineObjects(machines []*clusterv1.Machine, namespace string) error { for _, machine := range machines { // TODO: Run in parallel https://github.com/kubernetes-sigs/cluster-api/issues/258 createdMachine, err := c.clientSet.ClusterV1alpha1().Machines(namespace).Create(machine) if err != nil { return fmt.Errorf("error creating a machine object in namespace %v: %v", namespace, err) } err = waitForMachineReady(c.clientSet, createdMachine) if err != nil { return err } } return nil } // Deprecated API. Please do not extend or use. func (c *client) DeleteClusterObjects() error { glog.V(2).Info("DeleteClusterObjects API is deprecated, use DeleteClusterObjectsInNamespace instead") return c.DeleteClusterObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) DeleteClusterObjectsInNamespace(namespace string) error { err := c.clientSet.ClusterV1alpha1().Clusters(namespace).DeleteCollection(newDeleteOptions(), metav1.ListOptions{}) if err != nil { return fmt.Errorf("error deleting cluster objects in namespace %q: %v", namespace, err) } err = c.waitForClusterDelete(namespace) if err != nil { return fmt.Errorf("error waiting for cluster(s) deletion to complete in namespace %q: %v", namespace, err) } return nil } // Deprecated API. Please do not extend or use. func (c *client) DeleteMachineDeploymentObjects() error { glog.V(2).Info("DeleteMachineDeploymentObjects API is deprecated, use DeleteMachineDeploymentObjectsInNamespace instead") return c.DeleteMachineDeploymentObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) DeleteMachineDeploymentObjectsInNamespace(namespace string) error { err := c.clientSet.ClusterV1alpha1().MachineDeployments(namespace).DeleteCollection(newDeleteOptions(), metav1.ListOptions{}) if err != nil { return fmt.Errorf("error deleting machine deployment objects in namespace %q: %v", namespace, err) } err = c.waitForMachineDeploymentsDelete(namespace) if err != nil { return fmt.Errorf("error waiting for machine deployment(s) deletion to complete in namespace %q: %v", namespace, err) } return nil } // Deprecated API. Please do not extend or use. func (c *client) DeleteMachineSetObjects() error { glog.V(2).Info("DeleteMachineSetObjects API is deprecated, use DeleteMachineSetObjectsInNamespace instead") return c.DeleteMachineSetObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) DeleteMachineSetObjectsInNamespace(namespace string) error { err := c.clientSet.ClusterV1alpha1().MachineSets(namespace).DeleteCollection(newDeleteOptions(), metav1.ListOptions{}) if err != nil { return fmt.Errorf("error deleting machine set objects in namespace %q: %v", namespace, err) } err = c.waitForMachineSetsDelete(namespace) if err != nil { return fmt.Errorf("error waiting for machine set(s) deletion to complete in namespace %q: %v", namespace, err) } return nil } // Deprecated API. Please do not extend or use. func (c *client) DeleteMachineObjects() error { glog.V(2).Info("DeleteMachineObjects API is deprecated, use DeleteMachineObjectsInNamespace instead") return c.DeleteMachineObjectsInNamespace(apiv1.NamespaceDefault) } func (c *client) DeleteMachineObjectsInNamespace(namespace string) error { err := c.clientSet.ClusterV1alpha1().Machines(namespace).DeleteCollection(newDeleteOptions(), metav1.ListOptions{}) if err != nil { return fmt.Errorf("error deleting machine objects in namespace %q: %v", namespace, err) } err = c.waitForMachinesDelete(namespace) if err != nil { return fmt.Errorf("error waiting for machine(s) deletion to complete in namespace %q: %v", namespace, err) } return nil } func newDeleteOptions() *metav1.DeleteOptions { propagationPolicy := metav1.DeletePropagationForeground return &metav1.DeleteOptions{ PropagationPolicy: &propagationPolicy, } } // TODO: Test this function func (c *client) UpdateClusterObjectEndpoint(masterIP, clusterName, namespace string) error { cluster, err := c.GetClusterObject(clusterName, namespace) if err != nil { return err } cluster.Status.APIEndpoints = append(cluster.Status.APIEndpoints, clusterv1.APIEndpoint{ Host: masterIP, Port: apiServerPort, }) _, err = c.clientSet.ClusterV1alpha1().Clusters(namespace).UpdateStatus(cluster) return err } func (c *client) WaitForClusterV1alpha1Ready() error { return waitForClusterResourceReady(c.clientSet) } func (c *client) waitForClusterDelete(namespace string) error { return util.PollImmediate(retryIntervalResourceDelete, timeoutResourceDelete, func() (bool, error) { glog.V(2).Infof("Waiting for cluster objects to be deleted...") response, err := c.clientSet.ClusterV1alpha1().Clusters(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } if len(response.Items) > 0 { return false, nil } return true, nil }) } func (c *client) waitForMachineDeploymentsDelete(namespace string) error { return util.PollImmediate(retryIntervalResourceDelete, timeoutResourceDelete, func() (bool, error) { glog.V(2).Infof("Waiting for machine deployment objects to be deleted...") response, err := c.clientSet.ClusterV1alpha1().MachineDeployments(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } if len(response.Items) > 0 { return false, nil } return true, nil }) } func (c *client) waitForMachineSetsDelete(namespace string) error { return util.PollImmediate(retryIntervalResourceDelete, timeoutResourceDelete, func() (bool, error) { glog.V(2).Infof("Waiting for machine set objects to be deleted...") response, err := c.clientSet.ClusterV1alpha1().MachineSets(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } if len(response.Items) > 0 { return false, nil } return true, nil }) } func (c *client) waitForMachinesDelete(namespace string) error { return util.PollImmediate(retryIntervalResourceDelete, timeoutResourceDelete, func() (bool, error) { glog.V(2).Infof("Waiting for machine objects to be deleted...") response, err := c.clientSet.ClusterV1alpha1().Machines(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } if len(response.Items) > 0 { return false, nil } return true, nil }) } func (c *client) kubectlDelete(manifest string) error { return c.kubectlManifestCmd("delete", manifest) } func (c *client) kubectlApply(manifest string) error { return c.kubectlManifestCmd("apply", manifest) } func (c *client) kubectlManifestCmd(commandName, manifest string) error { cmd := exec.Command("kubectl", c.buildKubectlArgs(commandName)...) cmd.Stdin = strings.NewReader(manifest) out, err := cmd.CombinedOutput() if err != nil { return fmt.Errorf("couldn't kubectl apply: %v, output: %s", err, string(out)) } return nil } func (c *client) buildKubectlArgs(commandName string) []string { args := []string{commandName} if c.kubeconfigFile != "" { args = append(args, "--kubeconfig", c.kubeconfigFile) } if c.configOverrides.Context.Cluster != "" { args = append(args, "--cluster", c.configOverrides.Context.Cluster) } if c.configOverrides.Context.Namespace != "" { args = append(args, "--namespace", c.configOverrides.Context.Namespace) } if c.configOverrides.Context.AuthInfo != "" { args = append(args, "--user", c.configOverrides.Context.AuthInfo) } return append(args, "-f", "-") } func (c *client) waitForKubectlApply(manifest string) error { err := util.PollImmediate(retryIntervalKubectlApply, timeoutKubectlApply, func() (bool, error) { glog.V(2).Infof("Waiting for kubectl apply...") err := c.kubectlApply(manifest) if err != nil { if strings.Contains(err.Error(), "refused") { // Connection was refused, probably because the API server is not ready yet. glog.V(4).Infof("Waiting for kubectl apply... server not yet available: %v", err) return false, nil } if strings.Contains(err.Error(), "unable to recognize") { glog.V(4).Infof("Waiting for kubectl apply... api not yet available: %v", err) return false, nil } if strings.Contains(err.Error(), "namespaces \"default\" not found") { glog.V(4).Infof("Waiting for kubectl apply... default namespace not yet available: %v", err) return false, nil } return false, err } return true, nil }) return err } func waitForClusterResourceReady(cs clientset.Interface) error { deadline := time.Now().Add(timeoutResourceReady) err := util.PollImmediate(retryIntervalResourceReady, timeoutResourceReady, func() (bool, error) { glog.V(2).Info("Waiting for Cluster v1alpha resources to become available...") _, err := cs.Discovery().ServerResourcesForGroupVersion("cluster.k8s.io/v1alpha1") if err == nil { return true, nil } return false, nil }) if err != nil { return err } timeout := time.Until(deadline) return util.PollImmediate(retryIntervalResourceReady, timeout, func() (bool, error) { glog.V(2).Info("Waiting for Cluster v1alpha resources to be listable...") _, err := cs.ClusterV1alpha1().Clusters(apiv1.NamespaceDefault).List(metav1.ListOptions{}) if err == nil { return true, nil } return false, nil }) } func waitForMachineReady(cs clientset.Interface, machine *clusterv1.Machine) error { err := util.PollImmediate(retryIntervalResourceReady, timeoutMachineReady, func() (bool, error) { glog.V(2).Infof("Waiting for Machine %v to become ready...", machine.Name) m, err := cs.ClusterV1alpha1().Machines(machine.Namespace).Get(machine.Name, metav1.GetOptions{}) if err != nil { return false, nil } // TODO: update once machine controllers have a way to indicate a machine has been provisoned. https://github.com/kubernetes-sigs/cluster-api/issues/253 // Seeing a node cannot be purely relied upon because the provisioned master will not be registering with // the stack that provisions it. ready := m.Status.NodeRef != nil || len(m.Annotations) > 0 return ready, nil }) return err } func createTempFile(contents string) (string, error) { f, err := ioutil.TempFile("", "") if err != nil { return "", err } defer ifErrRemove(&err, f.Name()) if err = f.Close(); err != nil { return "", err } err = ioutil.WriteFile(f.Name(), []byte(contents), 0644) if err != nil { return "", err } return f.Name(), nil } func ifErrRemove(pErr *error, path string)
{ if *pErr != nil { if err := os.Remove(path); err != nil { glog.Warningf("Error removing file '%s': %v", path, err) } } }
frame.rs
use std::convert::TryInto; use rand::RngCore; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use super::split::{WebSocketReadHalf, WebSocketWriteHalf}; use super::FrameType; #[allow(unused_imports)] // for intra doc links use super::WebSocket; use crate::error::WebSocketError; const U16_MAX_MINUS_ONE: usize = (u16::MAX - 1) as usize; const U16_MAX: usize = u16::MAX as usize; const U64_MAX_MINUS_ONE: usize = (u64::MAX - 1) as usize; // https://tools.ietf.org/html/rfc6455#section-5.2 /// Data which is sent and received through the WebSocket connection. /// /// # Sending /// /// To send a Frame, you can construct it normally and use the [`WebSocket::send()`] method, /// or use the convenience methods for each frame type /// ([`send_text()`](WebSocket::send_text()), [`send_binary()`](WebSocket::send_binary()), /// [`close()`](WebSocket::close()), [`send_ping()`](WebSocket::send_ping()), /// and [`send_pong()`](WebSocket::send_pong())). /// /// # Receiving /// /// Frames can be received through the [`WebSocket::receive()`] method. /// To extract the underlying data from a received Frame, /// you can `match` or use the convenience methods—for example, for text frames, /// you can use the method [`as_text`](Frame::as_text()) to get an immutable reference /// to the data, [`as_text_mut`](Frame::as_text_mut()) to get a mutable reference to the data, /// or [`into_text`](Frame::into_text()) to get ownership of the data. /// /// # Fragmentation /// /// As per the WebSocket protocol, frames can actually be fragments in a larger message /// (see [https://tools.ietf.org/html/rfc6455#section-5.4](https://tools.ietf.org/html/rfc6455#section-5.4)). /// However, the maximum frame size allowed by the WebSocket protocol is larger /// than what can be stored in a `Vec`. Therefore, no strategy for splitting messages /// into Frames is provided by this library. /// /// If you would like to use fragmentation manually, this can be done by setting /// the `continuation` and `fin` flags on the `Text` and `Binary` variants. /// `continuation` signifies that the Frame is a Continuation frame in the message, /// and `fin` signifies that the Frame is the final frame in the message /// (see the above linked RFC for more details). /// /// For example, if the message contains only one Frame, the single frame /// should have `continuation` set to `false` and `fin` set to `true`. If the message /// contains more than one frame, the first frame should have `continuation` set to /// `false` and `fin` set to `false`, all other frames except the last frame should /// have `continuation` set to `true` and `fin` set to `false`, and the last frame should /// have `continuation` set to `true` and `fin` set to `true`. #[derive(Debug)] pub enum Frame { /// A Text frame Text { /// The payload for the Text frame payload: String, /// Whether the Text frame is a continuation frame in the message continuation: bool, /// Whether the Text frame is the final frame in the message fin: bool, }, /// A Binary frame Binary { /// The payload for the Binary frame payload: Vec<u8>, /// Whether the Binary frame is a continuation frame in the message continuation: bool, /// Whether the Binary frame is the final frame in the message fin: bool, }, /// A Close frame Close { /// The payload for the Close frame payload: Option<(u16, String)>, }, /// A Ping frame Ping { /// The payload for the Ping frame payload: Option<Vec<u8>>, }, /// A Pong frame Pong { /// The payload for the Pong frame payload: Option<Vec<u8>>, }, } impl Frame { /// Constructs a Text frame from the given payload. /// `continuation` will be `false` and `fin` will be `true`. /// This can be modified by chaining [`Frame::set_continuation()`] or [`Frame::set_fin()`]. pub fn text(payload: String) -> Self { Self::Text { payload, continuation: false, fin: true, } } /// Returns whether the frame is a Text frame. pub fn is_text(&self) -> bool { self.as_text().is_some() } /// Attempts to interpret the frame as a Text frame, /// returning a reference to the underlying data if it is, /// and None otherwise. pub fn as_text(&self) -> Option<(&String, &bool, &bool)> { match self { Self::Text { payload,
continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Attempts to interpret the frame as a Text frame, /// returning a mutable reference to the underlying data if it is, /// and None otherwise. pub fn as_text_mut(&mut self) -> Option<(&mut String, &mut bool, &mut bool)> { match self { Self::Text { payload, continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Attempts to interpret the frame as a Text frame, /// consuming and returning the underlying data if it is, /// and returning None otherwise. pub fn into_text(self) -> Option<(String, bool, bool)> { match self { Self::Text { payload, continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Constructs a Binary frame from the given payload. /// `continuation` will be `false` and `fin` will be `true`. /// This can be modified by chaining [`Frame::set_continuation()`] or [`Frame::set_fin()`]. pub fn binary(payload: Vec<u8>) -> Self { Self::Binary { payload, continuation: false, fin: true, } } /// Returns whether the frame is a Binary frame. pub fn is_binary(&self) -> bool { self.as_binary().is_some() } /// Attempts to interpret the frame as a Binary frame, /// returning a reference to the underlying data if it is, /// and None otherwise. pub fn as_binary(&self) -> Option<(&Vec<u8>, &bool, &bool)> { match self { Self::Binary { payload, continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Attempts to interpret the frame as a Binary frame, /// returning a mutable reference to the underlying data if it is, /// and None otherwise. pub fn as_binary_mut(&mut self) -> Option<(&mut Vec<u8>, &mut bool, &mut bool)> { match self { Self::Binary { payload, continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Attempts to interpret the frame as a Binary frame, /// consuming and returning the underlying data if it is, /// and returning None otherwise. pub fn into_binary(self) -> Option<(Vec<u8>, bool, bool)> { match self { Self::Binary { payload, continuation, fin, } => Some((payload, continuation, fin)), _ => None, } } /// Constructs a Close frame from the given payload. pub fn close(payload: Option<(u16, String)>) -> Self { Self::Close { payload } } /// Returns whether the frame is a Close frame. pub fn is_close(&self) -> bool { self.as_close().is_some() } /// Attempts to interpret the frame as a Close frame, /// returning a reference to the underlying data if it is, /// and None otherwise. pub fn as_close(&self) -> Option<&(u16, String)> { match self { Self::Close { payload } => payload.as_ref(), _ => None, } } /// Attempts to interpret the frame as a Close frame, /// returning a mutable reference to the underlying data if it is, /// and None otherwise. pub fn as_close_mut(&mut self) -> Option<&mut (u16, String)> { match self { Self::Close { payload } => payload.as_mut(), _ => None, } } /// Attempts to interpret the frame as a Close frame, /// consuming and returning the underlying data if it is, /// and returning None otherwise. pub fn into_close(self) -> Option<(u16, String)> { match self { Self::Close { payload } => payload, _ => None, } } /// Constructs a Ping frame from the given payload. pub fn ping(payload: Option<Vec<u8>>) -> Self { Self::Ping { payload } } /// Returns whether the frame is a Ping frame. pub fn is_ping(&self) -> bool { self.as_ping().is_some() } /// Attempts to interpret the frame as a Ping frame, /// returning a reference to the underlying data if it is, /// and None otherwise. pub fn as_ping(&self) -> Option<&Vec<u8>> { match self { Self::Ping { payload } => payload.as_ref(), _ => None, } } /// Attempts to interpret the frame as a Ping frame, /// returning a mutable reference to the underlying data if it is, /// and None otherwise. pub fn as_ping_mut(&mut self) -> Option<&mut Vec<u8>> { match self { Self::Ping { payload } => payload.as_mut(), _ => None, } } /// Attempts to interpret the frame as a Ping frame, /// consuming and returning the underlying data if it is, /// and returning None otherwise. pub fn into_ping(self) -> Option<Vec<u8>> { match self { Self::Ping { payload } => payload, _ => None, } } /// Constructs a Pong frame from the given payload. pub fn pong(payload: Option<Vec<u8>>) -> Self { Self::Pong { payload } } /// Returns whether the frame is a Pong frame. pub fn is_pong(&self) -> bool { self.as_pong().is_some() } /// Attempts to interpret the frame as a Pong frame, /// returning a reference to the underlying data if it is, /// and None otherwise. pub fn as_pong(&self) -> Option<&Vec<u8>> { match self { Self::Pong { payload } => payload.as_ref(), _ => None, } } /// Attempts to interpret the frame as a Pong frame, /// returning a mutable reference to the underlying data if it is, /// and None otherwise. pub fn as_pong_mut(&mut self) -> Option<&mut Vec<u8>> { match self { Self::Pong { payload } => payload.as_mut(), _ => None, } } /// Attempts to interpret the frame as a Pong frame, /// consuming and returning the underlying data if it is, /// and returning None otherwise. pub fn into_pong(self) -> Option<Vec<u8>> { match self { Self::Pong { payload } => payload, _ => None, } } /// Modifies the frame to set `continuation` to the desired value. /// If the frame is not a Text or Binary frame, no operation is performed. pub fn set_continuation(self, continuation: bool) -> Self { match self { Self::Text { payload, fin, .. } => Self::Text { payload, continuation, fin, }, Self::Binary { payload, fin, .. } => Self::Binary { payload, continuation, fin, }, _ => self, } } /// Modifies the frame to set `fin` to the desired value. /// If the frame is not a Text or Binary frame, no operation is performed. pub fn set_fin(self, fin: bool) -> Self { match self { Self::Text { payload, continuation, .. } => Self::Text { payload, continuation, fin, }, Self::Binary { payload, continuation, .. } => Self::Binary { payload, continuation, fin, }, _ => self, } } pub(super) async fn send( self, write_half: &mut WebSocketWriteHalf, ) -> Result<(), WebSocketError> { // calculate before moving payload out of self let is_control = self.is_control(); let opcode = self.opcode(); let fin = self.fin(); let mut payload = match self { // https://tools.ietf.org/html/rfc6455#section-5.6 Self::Text { payload, .. } => payload.into_bytes(), Self::Binary { payload, .. } => payload, // https://tools.ietf.org/html/rfc6455#section-5.5.1 Self::Close { payload: Some((status_code, reason)), } => { let mut payload = status_code.to_be_bytes().to_vec(); payload.append(&mut reason.into_bytes()); payload } Self::Close { payload: None } => Vec::new(), // https://tools.ietf.org/html/rfc6455#section-5.5.2 Self::Ping { payload } => payload.unwrap_or(Vec::new()), // https://tools.ietf.org/html/rfc6455#section-5.5.3 Self::Pong { payload } => payload.unwrap_or(Vec::new()), }; // control frame cannot be longer than 125 bytes: https://tools.ietf.org/html/rfc6455#section-5.5 if is_control && payload.len() > 125 { return Err(WebSocketError::ControlFrameTooLargeError); } // set payload len: https://tools.ietf.org/html/rfc6455#section-5.2 let mut raw_frame = Vec::with_capacity(payload.len() + 14); raw_frame.push(opcode + fin); let mut payload_len_data = match payload.len() { 0..=125 => (payload.len() as u8).to_be_bytes().to_vec(), 126..=U16_MAX_MINUS_ONE => { let mut payload_len_data = vec![126]; payload_len_data.extend_from_slice(&(payload.len() as u16).to_be_bytes()); payload_len_data } U16_MAX..=U64_MAX_MINUS_ONE => { let mut payload_len_data = vec![127]; payload_len_data.extend_from_slice(&(payload.len() as u64).to_be_bytes()); payload_len_data } _ => return Err(WebSocketError::PayloadTooLargeError), }; payload_len_data[0] += 0b10000000; // set masking bit: https://tools.ietf.org/html/rfc6455#section-5.3 raw_frame.append(&mut payload_len_data); // payload masking: https://tools.ietf.org/html/rfc6455#section-5.3 let mut masking_key = vec![0; 4]; write_half.rng.fill_bytes(&mut masking_key); for (i, byte) in payload.iter_mut().enumerate() { *byte = *byte ^ (masking_key[i % 4]); } raw_frame.append(&mut masking_key); raw_frame.append(&mut payload); write_half .stream .write_all(&raw_frame) .await .map_err(|e| WebSocketError::WriteError(e))?; write_half .stream .flush() .await .map_err(|e| WebSocketError::WriteError(e))?; Ok(()) } fn is_control(&self) -> bool { // control frames: https://tools.ietf.org/html/rfc6455#section-5.5 match self { Self::Text { .. } => false, Self::Binary { .. } => false, Self::Close { .. } => true, Self::Ping { .. } => true, Self::Pong { .. } => true, } } fn opcode(&self) -> u8 { // opcodes: https://tools.ietf.org/html/rfc6455#section-5.2 match self { Self::Text { continuation, .. } => { if *continuation { 0x0 } else { 0x1 } } Self::Binary { continuation, .. } => { if *continuation { 0x0 } else { 0x2 } } Self::Close { .. } => 0x8, Self::Ping { .. } => 0x9, Self::Pong { .. } => 0xA, } } fn fin(&self) -> u8 { // fin bit: https://tools.ietf.org/html/rfc6455#section-5.2 match self { Self::Text { fin, .. } => (*fin as u8) << 7, Self::Binary { fin, .. } => (*fin as u8) << 7, Self::Close { .. } => 0b10000000, Self::Ping { .. } => 0b10000000, Self::Pong { .. } => 0b10000000, } } pub(super) async fn read_from_websocket( read_half: &mut WebSocketReadHalf, ) -> Result<Self, WebSocketError> { // https://tools.ietf.org/html/rfc6455#section-5.2 let fin_and_opcode = read_half .stream .read_u8() .await .map_err(|e| WebSocketError::ReadError(e))?; let fin: bool = fin_and_opcode & 0b10000000_u8 != 0; let opcode = fin_and_opcode & 0b00001111_u8; let mask_and_payload_len_first_byte = read_half .stream .read_u8() .await .map_err(|e| WebSocketError::ReadError(e))?; let masked = mask_and_payload_len_first_byte & 0b10000000_u8 != 0; if masked { // server to client frames should not be masked return Err(WebSocketError::ReceivedMaskedFrameError); } let payload_len_first_byte = mask_and_payload_len_first_byte & 0b01111111_u8; let payload_len = match payload_len_first_byte { 0..=125 => payload_len_first_byte as usize, 126 => read_half .stream .read_u16() .await .map_err(|e| WebSocketError::ReadError(e))? as usize, 127 => read_half .stream .read_u64() .await .map_err(|e| WebSocketError::ReadError(e))? as usize, _ => unreachable!(), }; let mut payload = vec![0; payload_len]; read_half .stream .read_exact(&mut payload) .await .map_err(|e| WebSocketError::ReadError(e))?; match opcode { 0x0 => match read_half.last_frame_type { FrameType::Text => Ok(Self::Text { payload: String::from_utf8(payload) .map_err(|_e| WebSocketError::InvalidFrameError)?, continuation: true, fin, }), FrameType::Binary => Ok(Self::Binary { payload, continuation: true, fin, }), FrameType::Control => Err(WebSocketError::InvalidFrameError), }, 0x1 => Ok(Self::Text { payload: String::from_utf8(payload) .map_err(|_e| WebSocketError::InvalidFrameError)?, continuation: false, fin, }), 0x2 => Ok(Self::Binary { payload, continuation: false, fin, }), // reserved range 0x3..=0x7 => Err(WebSocketError::InvalidFrameError), 0x8 if payload_len == 0 => Ok(Self::Close { payload: None }), // if there is a payload it must have a u16 status code 0x8 if payload_len < 2 => Err(WebSocketError::InvalidFrameError), 0x8 => { let (status_code, reason) = payload.split_at(2); let status_code = u16::from_be_bytes( status_code .try_into() .map_err(|_e| WebSocketError::InvalidFrameError)?, ); Ok(Self::Close { payload: Some(( status_code, String::from_utf8(reason.to_vec()) .map_err(|_e| WebSocketError::InvalidFrameError)?, )), }) } 0x9 if payload_len == 0 => Ok(Self::Ping { payload: None }), 0x9 => Ok(Self::Ping { payload: Some(payload), }), 0xA if payload_len == 0 => Ok(Self::Pong { payload: None }), 0xA => Ok(Self::Pong { payload: Some(payload), }), // reserved range 0xB..=0xFF => Err(WebSocketError::InvalidFrameError), } } } impl From<String> for Frame { fn from(s: String) -> Self { Self::text(s) } } impl From<Vec<u8>> for Frame { fn from(v: Vec<u8>) -> Self { Self::binary(v) } }
lasso.rs
use numpy::{PyArray, PyArray1, PyArray2}; use pyo3::prelude::*; use sparseglm::{ datasets::{csc_array::CSCArray, DenseDatasetView, SparseDataset}, estimators::hyperparams::LassoParams, estimators::traits::Fit, }; #[pyclass] pub struct
{ inner: LassoParams<f64>, } #[pymethods] impl LassoWrapper { #[new] fn new( alpha: f64, max_iterations: usize, max_epochs: usize, tolerance: f64, p0: usize, use_acceleration: bool, k: usize, verbose: bool, ) -> PyResult<Self> { let _estimator = LassoParams::new() .alpha(alpha) .max_iterations(max_iterations) .max_epochs(max_epochs) .tolerance(tolerance) .p0(p0) .use_acceleration(use_acceleration) .K(k) .verbose(verbose); Ok(LassoWrapper { inner: _estimator }) } unsafe fn fit<'py>( &mut self, py: Python<'py>, x: &PyArray2<f64>, y: &PyArray1<f64>, ) -> PyResult<&'py PyArray1<f64>> { let dataset = DenseDatasetView::from((x.as_array(), y.as_array())); let _estimator = self.inner.fit(&dataset).unwrap(); Ok(PyArray::from_array(py, &_estimator.coefficients())) } unsafe fn fit_sparse<'py>( &mut self, py: Python<'py>, data: &PyArray1<f64>, indices: &PyArray1<i32>, indptr: &PyArray1<i32>, y: &PyArray1<f64>, ) -> PyResult<&'py PyArray1<f64>> { let x = CSCArray::new(data.as_array(), indices.as_array(), indptr.as_array()); let dataset = SparseDataset::from((x, y.as_array())); let _estimator = self.inner.fit(&dataset).unwrap(); Ok(PyArray::from_array(py, &_estimator.coefficients())) } }
LassoWrapper
test_main.py
# -*- coding: utf-8 -*- import os import sys root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, root_dir) import unittest class TestMain(unittest.TestCase): def test_main(self): # NOTE the blank index of result is a minor different to the requirement. # If the requirement is confirmed to be very strict, then feel free to contact me, # and I'll write a manual one. from prints_a_multiplication_table_of_primes_numbers import PrimeTable count_1_output = """ | 2 ---+--- 2 | 4""".lstrip("\n") self.assertEqual(PrimeTable.output(1), count_1_output) count_10_output = """ | 2 3 5 7 11 13 17 19 23 29 ---+--------------------------------------------- 2 | 4 6 10 14 22 26 34 38 46 58 3 | 6 9 15 21 33 39 51 57 69 87 5 | 10 15 25 35 55 65 85 95 115 145 7 | 14 21 35 49 77 91 119 133 161 203 11 | 22 33 55 77 121 143 187 209 253 319 13 | 26 39 65 91 143 169 221 247 299 377 17 | 34 51 85 119 187 221 289 323 391 493 19 | 38 57 95 133 209 247 323 361 437 551 23 | 46 69 115 161 253 299 391 437 529 667 29 | 58 87 145 203 319 377 493 551 667 841""".lstrip("\n") self.assertEqual(PrimeTable.output(10), count_10_output) def
(self): from prints_a_multiplication_table_of_primes_numbers.prime_generator import PrimeGenerator pg = PrimeGenerator() self.assertEqual(pg.generate(1), [2]) self.assertEqual(pg.generate(2), [2, 3]) self.assertEqual(pg.generate(3), [2, 3, 5]) self.assertEqual(pg.generate(4), [2, 3, 5, 7]) self.assertEqual(pg.generate(5), [2, 3, 5, 7, 11]) self.assertEqual(pg.generate(6), [2, 3, 5, 7, 11, 13]) self.assertEqual(pg.generate(7), [2, 3, 5, 7, 11, 13, 17]) self.assertEqual(pg.generate(8), [2, 3, 5, 7, 11, 13, 17, 19]) self.assertEqual(pg.generate(9), [2, 3, 5, 7, 11, 13, 17, 19, 23]) self.assertEqual(pg.generate(10), [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]) def test_FibonacciGenerator(self): from prints_a_multiplication_table_of_primes_numbers.fibonacci_generator import FibonacciGenerator fg = FibonacciGenerator() self.assertEqual(fg.generate(1), [1]) self.assertEqual(fg.generate(2), [1, 1]) self.assertEqual(fg.generate(3), [1, 1, 2]) self.assertEqual(fg.generate(4), [1, 1, 2, 3]) self.assertEqual(fg.generate(5), [1, 1, 2, 3, 5]) self.assertEqual(fg.generate(6), [1, 1, 2, 3, 5, 8]) def test_NumLines(self): from prints_a_multiplication_table_of_primes_numbers.num_lines import NumLines self.assertEqual(NumLines.generate([2, 3, 5, 7, 11]), [ ["", 2, 3, 5, 7, 11], [2, 4, 6, 10, 14, 22], [3, 6, 9, 15, 21, 33], [5, 10, 15, 25, 35, 55], [7, 14, 21, 35, 49, 77], [11, 22, 33, 55, 77, 121]]) self.assertEqual(NumLines.generate([2]), [ ["", 2], [2, 4], ]) if __name__ == '__main__': unittest.main()
test_PrimeGenerator
termination.rs
use crate::error::*; pub(crate) fn register_handler<T>(handler: T) -> Result<()> where T: Fn() -> () + 'static + Send,
{ ctrlc::set_handler(handler).wrap_error("termination", "failed to set termination handler") }