prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>glusterfs.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package glusterfs import ( "fmt" "math" "os" "path" "runtime" "strconv" dstrings "strings" "sync" "github.com/golang/glog" gcli "github.com/heketi/heketi/client/api/go-client" gapi "github.com/heketi/heketi/pkg/glusterfs/api" "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/uuid" clientset "k8s.io/client-go/kubernetes" v1helper "k8s.io/kubernetes/pkg/apis/core/v1/helper" "k8s.io/kubernetes/pkg/util/mount" "k8s.io/kubernetes/pkg/util/strings" "k8s.io/kubernetes/pkg/volume" volutil "k8s.io/kubernetes/pkg/volume/util" ) // ProbeVolumePlugins is the primary entrypoint for volume plugins. func ProbeVolumePlugins() []volume.VolumePlugin { return []volume.VolumePlugin{&glusterfsPlugin{host: nil, gidTable: make(map[string]*MinMaxAllocator)}} } type glusterfsPlugin struct { host volume.VolumeHost gidTable map[string]*MinMaxAllocator gidTableLock sync.Mutex } var _ volume.VolumePlugin = &glusterfsPlugin{} var _ volume.PersistentVolumePlugin = &glusterfsPlugin{} var _ volume.DeletableVolumePlugin = &glusterfsPlugin{} var _ volume.ProvisionableVolumePlugin = &glusterfsPlugin{} var _ volume.ExpandableVolumePlugin = &glusterfsPlugin{} var _ volume.Provisioner = &glusterfsVolumeProvisioner{} var _ volume.Deleter = &glusterfsVolumeDeleter{} const ( glusterfsPluginName = "kubernetes.io/glusterfs" volPrefix = "vol_" dynamicEpSvcPrefix = "glusterfs-dynamic-" replicaCount = 3 durabilityType = "replicate" secretKeyName = "key" // key name used in secret gciLinuxGlusterMountBinaryPath = "/sbin/mount.glusterfs" defaultGidMin = 2000 defaultGidMax = math.MaxInt32 // absoluteGidMin/Max are currently the same as the // default values, but they play a different role and // could take a different value. Only thing we need is: // absGidMin <= defGidMin <= defGidMax <= absGidMax absoluteGidMin = 2000 absoluteGidMax = math.MaxInt32 linuxGlusterMountBinary = "mount.glusterfs" heketiAnn = "heketi-dynamic-provisioner" glusterTypeAnn = "gluster.org/type" glusterDescAnn = "Gluster-Internal: Dynamically provisioned PV" heketiVolIDAnn = "gluster.kubernetes.io/heketi-volume-id" ) func (plugin *glusterfsPlugin) Init(host volume.VolumeHost) error { plugin.host = host return nil } func (plugin *glusterfsPlugin) GetPluginName() string { return glusterfsPluginName } func (plugin *glusterfsPlugin) GetVolumeName(spec *volume.Spec) (string, error) { volumeSource, _, err := getVolumeSource(spec) if err != nil { return "", err } return fmt.Sprintf( "%v:%v", volumeSource.EndpointsName, volumeSource.Path), nil } func (plugin *glusterfsPlugin) CanSupport(spec *volume.Spec) bool { return (spec.PersistentVolume != nil && spec.PersistentVolume.Spec.Glusterfs != nil) || (spec.Volume != nil && spec.Volume.Glusterfs != nil) } func (plugin *glusterfsPlugin) RequiresRemount() bool { return false } func (plugin *glusterfsPlugin) SupportsMountOption() bool { return true } func (plugin *glusterfsPlugin) SupportsBulkVolumeVerification() bool { return false } func (plugin *glusterfsPlugin) RequiresFSResize() bool { return false } func (plugin *glusterfsPlugin) GetAccessModes() []v1.PersistentVolumeAccessMode { return []v1.PersistentVolumeAccessMode{ v1.ReadWriteOnce, v1.ReadOnlyMany, v1.ReadWriteMany, } } func (plugin *glusterfsPlugin) NewMounter(spec *volume.Spec, pod *v1.Pod, _ volume.VolumeOptions) (volume.Mounter, error) { source, _, err := getVolumeSource(spec) if err != nil { glog.Errorf("failed to get gluster volumesource: %v", err) return nil, err } epName := source.EndpointsName // PVC/POD is in same namespace. podNs := pod.Namespace kubeClient := plugin.host.GetKubeClient() if kubeClient == nil { return nil, fmt.Errorf("failed to get kube client to initialize mounter") } ep, err := kubeClient.CoreV1().Endpoints(podNs).Get(epName, metav1.GetOptions{}) if err != nil { glog.Errorf("failed to get endpoint %s: %v", epName, err) return nil, err } glog.V(4).Infof("glusterfs pv endpoint %v", ep) return plugin.newMounterInternal(spec, ep, pod, plugin.host.GetMounter(plugin.GetPluginName())) } func (plugin *glusterfsPlugin) newMounterInternal(spec *volume.Spec, ep *v1.Endpoints, pod *v1.Pod, mounter mount.Interface) (volume.Mounter, error) { source, readOnly, _ := getVolumeSource(spec) return &glusterfsMounter{ glusterfs: &glusterfs{ volName: spec.Name(), mounter: mounter, pod: pod, plugin: plugin, MetricsProvider: volume.NewMetricsStatFS(plugin.host.GetPodVolumeDir(pod.UID, strings.EscapeQualifiedNameForDisk(glusterfsPluginName), spec.Name())), }, hosts: ep, path: source.Path, readOnly: readOnly, mountOptions: volutil.MountOptionFromSpec(spec), }, nil } func (plugin *glusterfsPlugin) NewUnmounter(volName string, podUID types.UID) (volume.Unmounter, error) { return plugin.newUnmounterInternal(volName, podUID, plugin.host.GetMounter(plugin.GetPluginName())) } func (plugin *glusterfsPlugin) newUnmounterInternal(volName string, podUID types.UID, mounter mount.Interface) (volume.Unmounter, error) { return &glusterfsUnmounter{&glusterfs{ volName: volName, mounter: mounter, pod: &v1.Pod{ObjectMeta: metav1.ObjectMeta{UID: podUID}}, plugin: plugin, MetricsProvider: volume.NewMetricsStatFS(plugin.host.GetPodVolumeDir(podUID, strings.EscapeQualifiedNameForDisk(glusterfsPluginName), volName)), }}, nil } func (plugin *glusterfsPlugin) ConstructVolumeSpec(volumeName, mountPath string) (*volume.Spec, error) { // To reconstruct volume spec we need endpoint where fetching endpoint from mount // string looks to be impossible, so returning error. return nil, fmt.Errorf("impossible to reconstruct glusterfs volume spec from volume mountpath") } // Glusterfs volumes represent a bare host file or directory mount of an Glusterfs export. type glusterfs struct { volName string pod *v1.Pod mounter mount.Interface plugin *glusterfsPlugin volume.MetricsProvider } type glusterfsMounter struct { *glusterfs hosts *v1.Endpoints path string readOnly bool mountOptions []string } var _ volume.Mounter = &glusterfsMounter{} func (b *glusterfsMounter) GetAttributes() volume.Attributes { return volume.Attributes{ ReadOnly: b.readOnly, Managed: false, SupportsSELinux: false, } } // Checks prior to mount operations to verify that the required components (binaries, etc.) // to mount the volume are available on the underlying node. // If not, it returns an error func (b *glusterfsMounter) CanMount() error { exe := b.plugin.host.GetExec(b.plugin.GetPluginName()) switch runtime.GOOS { case "linux": if _, err := exe.Run("test", "-x", gciLinuxGlusterMountBinaryPath); err != nil { return fmt.Errorf("Required binary %s is missing", gciLinuxGlusterMountBinaryPath) } } return nil } // SetUp attaches the disk and bind mounts to the volume path. func (b *glusterfsMounter) SetUp(fsGroup *int64) error { return b.SetUpAt(b.GetPath(), fsGroup) } func (b *glusterfsMounter) SetUpAt(dir string, fsGroup *int64) error { notMnt, err := b.mounter.IsLikelyNotMountPoint(dir) glog.V(4).Infof("mount setup: %s %v %v", dir, !notMnt, err) if err != nil && !os.IsNotExist(err) { return err } if !notMnt { return nil } if err := os.MkdirAll(dir, 0750); err != nil { return err } err = b.setUpAtInternal(dir) if err == nil { return nil } // Cleanup upon failure. volutil.UnmountPath(dir, b.mounter) return err } func (glusterfsVolume *glusterfs) GetPath() string { name := glusterfsPluginName return glusterfsVolume.plugin.host.GetPodVolumeDir(glusterfsVolume.pod.UID, strings.EscapeQualifiedNameForDisk(name), glusterfsVolume.volName) } type glusterfsUnmounter struct { *glusterfs } var _ volume.Unmounter = &glusterfsUnmounter{} func (c *glusterfsUnmounter) TearDown() error { return c.TearDownAt(c.GetPath()) } func (c *glusterfsUnmounter) TearDownAt(dir string) error { return volutil.UnmountPath(dir, c.mounter) } func (b *glusterfsMounter) setUpAtInternal(dir string) error { var errs error options := []string{} hasLogFile := false log := "" if b.readOnly { options = append(options, "ro") } // Check logfile has been provided by user, if provided, use that as the log file. for _, userOpt := range b.mountOptions { if dstrings.HasPrefix(userOpt, "log-file") { glog.V(4).Infof("log-file mount option has provided") hasLogFile = true break } } // If logfile has not been provided, create driver specific log file. if !hasLogFile { log = "" p := path.Join(b.glusterfs.plugin.host.GetPluginDir(glusterfsPluginName), b.glusterfs.volName) if err := os.MkdirAll(p, 0750); err != nil { return fmt.Errorf("failed to create directory %v: %v", p, err) } // adding log-level ERROR to remove noise // and more specific log path so each pod has // its own log based on PV + Pod log = path.Join(p, b.pod.Name+"-glusterfs.log") } // Use derived/provided log file in gluster fuse mount options = append(options, "log-file="+log) options = append(options, "log-level=ERROR") var addrlist []string if b.hosts == nil { return fmt.Errorf("glusterfs endpoint is nil in mounter") } addr := sets.String{} if b.hosts.Subsets != nil { for _, s := range b.hosts.Subsets { for _, a := range s.Addresses { if !addr.Has(a.IP) { addr.Insert(a.IP) addrlist = append(addrlist, a.IP) } } } } //Add backup-volfile-servers and auto_unmount options. options = append(options, "backup-volfile-servers="+dstrings.Join(addrlist[:], ":")) options = append(options, "auto_unmount") mountOptions := volutil.JoinMountOptions(b.mountOptions, options) // with `backup-volfile-servers` mount option in place, it is not required to // iterate over all the servers in the addrlist. A mount attempt with this option // will fetch all the servers mentioned in the backup-volfile-servers list. // Refer to backup-volfile-servers @ http://docs.gluster.org/en/latest/Administrator%20Guide/Setting%20Up%20Clients/ if (len(addrlist) > 0) && (addrlist[0] != "") { ip := addrlist[0] errs = b.mounter.Mount(ip+":"+b.path, dir, "glusterfs", mountOptions) if errs == nil { glog.Infof("successfully mounted directory %s", dir) return nil } if dstrings.Contains(errs.Error(), "Invalid option auto_unmount") || dstrings.Contains(errs.Error(), "Invalid argument") { // Give a try without `auto_unmount` mount option, because // it could be that gluster fuse client is older version and // mount.glusterfs is unaware of `auto_unmount`. noAutoMountOptions := make([]string, 0, len(mountOptions)) for _, opt := range mountOptions { if opt != "auto_unmount" { noAutoMountOptions = append(noAutoMountOptions, opt) } } errs = b.mounter.Mount(ip+":"+b.path, dir, "glusterfs", noAutoMountOptions) if errs == nil { glog.Infof("successfully mounted %s", dir) return nil } } } else { return fmt.Errorf("failed to execute mount command:[no valid ipaddress found in endpoint address list]") } // Failed mount scenario. // Since glusterfs does not return error text // it all goes in a log file, we will read the log file logErr := readGlusterLog(log, b.pod.Name) if logErr != nil { return fmt.Errorf("mount failed: %v the following error information was pulled from the glusterfs log to help diagnose this issue: %v", errs, logErr) } return fmt.Errorf("mount failed: %v", errs) } func getVolumeSource(spec *volume.Spec) (*v1.GlusterfsVolumeSource, bool, error) { if spec.Volume != nil && spec.Volume.Glusterfs != nil { return spec.Volume.Glusterfs, spec.Volume.Glusterfs.ReadOnly, nil } else if spec.PersistentVolume != nil && spec.PersistentVolume.Spec.Glusterfs != nil { return spec.PersistentVolume.Spec.Glusterfs, spec.ReadOnly, nil } return nil, false, fmt.Errorf("Spec does not reference a Glusterfs volume type") } func (plugin *glusterfsPlugin) NewProvisioner(options volume.VolumeOptions) (volume.Provisioner, error) { return plugin.newProvisionerInternal(options) } func (plugin *glusterfsPlugin) newProvisionerInternal(options volume.VolumeOptions) (volume.Provisioner, error) { return &glusterfsVolumeProvisioner{ glusterfsMounter: &glusterfsMounter{ glusterfs: &glusterfs{ plugin: plugin, }, }, options: options, }, nil } type provisionerConfig struct { url string user string userKey string secretNamespace string secretName string secretValue string clusterID string gidMin int gidMax int volumeType gapi.VolumeDurabilityInfo volumeOptions []string volumeNamePrefix string thinPoolSnapFactor float32 } type glusterfsVolumeProvisioner struct { *glusterfsMounter provisionerConfig options volume.VolumeOptions } func convertGid(gidString string) (int, error) { gid64, err := strconv.ParseInt(gidString, 10, 32) if err != nil { return 0, fmt.Errorf("failed to parse gid %v: %v", gidString, err) } if gid64 < 0 { return 0, fmt.Errorf("negative GIDs %v are not allowed", gidString) } // ParseInt returns a int64, but since we parsed only // for 32 bit, we can cast to int without loss: gid := int(gid64) return gid, nil } func convertVolumeParam(volumeString string) (int, error) { count, err := strconv.Atoi(volumeString) if err != nil { return 0, fmt.Errorf("failed to parse volumestring %q: %v", volumeString, err) } if count < 0 { return 0, fmt.Errorf("negative values are not allowed") } return count, nil } func (plugin *glusterfsPlugin) NewDeleter(spec *volume.Spec) (volume.Deleter, error) { return plugin.newDeleterInternal(spec) } func (plugin *glusterfsPlugin) newDeleterInternal(spec *volume.Spec) (volume.Deleter, error) { if spec.PersistentVolume != nil && spec.PersistentVolume.Spec.Glusterfs == nil { return nil, fmt.Errorf("spec.PersistentVolume.Spec.Glusterfs is nil") } return &glusterfsVolumeDeleter{ glusterfsMounter: &glusterfsMounter{ glusterfs: &glusterfs{ volName: spec.Name(), plugin: plugin, }, path: spec.PersistentVolume.Spec.Glusterfs.Path, }, spec: spec.PersistentVolume, }, nil } type glusterfsVolumeDeleter struct { *glusterfsMounter provisionerConfig spec *v1.PersistentVolume } func (d *glusterfsVolumeDeleter) GetPath() string { name := glusterfsPluginName return d.plugin.host.GetPodVolumeDir(d.glusterfsMounter.glusterfs.pod.UID, strings.EscapeQualifiedNameForDisk(name), d.glusterfsMounter.glusterfs.volName) } // Traverse the PVs, fetching all the GIDs from those // in a given storage class, and mark them in the table. func (plugin *glusterfsPlugin) collectGids(className string, gidTable *MinMaxAllocator) error { kubeClient := plugin.host.GetKubeClient() if kubeClient == nil { return fmt.Errorf("failed to get kube client when collecting gids") } pvList, err := kubeClient.CoreV1().PersistentVolumes().List(metav1.ListOptions{LabelSelector: labels.Everything().String()}) if err != nil { glog.Error("failed to get existing persistent volumes") return err } for _, pv := range pvList.Items { if v1helper.GetPersistentVolumeClass(&pv) != className { continue } pvName := pv.ObjectMeta.Name gidStr, ok := pv.Annotations[volutil.VolumeGidAnnotationKey] if !ok { glog.Warningf("no GID found in pv %v", pvName) continue } gid, err := convertGid(gidStr) if err != nil { glog.Errorf("failed to parse gid %s: %v", gidStr, err) continue } _, err = gidTable.Allocate(gid) if err == ErrConflict { glog.Warningf("GID %v found in pv %v was already allocated", gid, pvName) } else if err != nil { glog.Errorf("failed to store gid %v found in pv %v: %v", gid, pvName, err) return err } } return nil } // Return the gid table for a storage class. // - If this is the first time, fill it with all the gids // used in PVs of this storage class by traversing the PVs. // - Adapt the range of the table to the current range of the SC. func (plugin *glusterfsPlugin) getGidTable(className string, min int, max int) (*MinMaxAllocator, error) { plugin.gidTableLock.Lock() gidTable, ok := plugin.gidTable[className] plugin.gidTableLock.Unlock() if ok { err := gidTable.SetRange(min, max) if err != nil { return nil, err } return gidTable, nil } // create a new table and fill it newGidTable, err := NewMinMaxAllocator(0, absoluteGidMax) if err != nil { return nil, err } // collect gids with the full range err = plugin.collectGids(className, newGidTable) if err != nil { return nil, err } // and only reduce the range afterwards err = newGidTable.SetRange(min, max) if err != nil { return nil, err } // if in the meantime a table appeared, use it plugin.gidTableLock.Lock() defer plugin.gidTableLock.Unlock() gidTable, ok = plugin.gidTable[className] if ok { err = gidTable.SetRange(min, max) if err != nil { return nil, err } return gidTable, nil } plugin.gidTable[className] = newGidTable return newGidTable, nil } func (d *glusterfsVolumeDeleter) getGid() (int, bool, error) { gidStr, ok := d.spec.Annotations[volutil.VolumeGidAnnotationKey] if !ok { return 0, false, nil } gid, err := convertGid(gidStr) return gid, true, err } func (d *glusterfsVolumeDeleter) Delete() error { glog.V(2).Infof("delete volume %s", d.glusterfsMounter.path) volumeName := d.glusterfsMounter.path volumeID, err := getVolumeID(d.spec, volumeName) if err != nil { return fmt.Errorf("failed to get volumeID: %v", err) } class, err := volutil.GetClassForVolume(d.plugin.host.GetKubeClient(), d.spec) if err != nil { return err } cfg, err := parseClassParameters(class.Parameters, d.plugin.host.GetKubeClient()) if err != nil { return err } d.provisionerConfig = *cfg glog.V(4).Infof("deleting volume %q", volumeID) gid, exists, err := d.getGid() if err != nil { glog.Error(err) } else if exists { gidTable, err := d.plugin.getGidTable(class.Name, cfg.gidMin, cfg.gidMax) if err != nil { return fmt.Errorf("failed to get gidTable: %v", err) } err = gidTable.Release(gid) if err != nil { return fmt.Errorf("failed to release gid %v: %v", gid, err) } } cli := gcli.NewClient(d.url, d.user, d.secretValue) if cli == nil { glog.Errorf("failed to create glusterfs REST client") return fmt.Errorf("failed to create glusterfs REST client, REST server authentication failed") } err = cli.VolumeDelete(volumeID) if err != nil { glog.Errorf("failed to delete volume %s: %v", volumeName, err) return err } glog.V(2).Infof("volume %s deleted successfully", volumeName) //Deleter takes endpoint and namespace from pv spec. pvSpec := d.spec.Spec var dynamicEndpoint, dynamicNamespace string if pvSpec.ClaimRef == nil { glog.Errorf("ClaimRef is nil") return fmt.Errorf("ClaimRef is nil") } if pvSpec.ClaimRef.Namespace == "" { glog.Errorf("namespace is nil") return fmt.Errorf("namespace is nil") } dynamicNamespace = pvSpec.ClaimRef.Namespace if pvSpec.Glusterfs.EndpointsName != "" { dynamicEndpoint = pvSpec.Glusterfs.EndpointsName } glog.V(3).Infof("dynamic namespace and endpoint %v/%v", dynamicNamespace, dynamicEndpoint) err = d.deleteEndpointService(dynamicNamespace, dynamicEndpoint) if err != nil { glog.Errorf("failed to delete endpoint/service %v/%v: %v", dynamicNamespace, dynamicEndpoint, err) } else { glog.V(1).Infof("endpoint %v/%v is deleted successfully ", dynamicNamespace, dynamicEndpoint) } return nil } func (p *glusterfsVolumeProvisioner) Provision(selectedNode *v1.Node, allowedTopologies []v1.TopologySelectorTerm) (*v1.PersistentVolume, error) { if !volutil.AccessModesContainedInAll(p.plugin.GetAccessModes(), p.options.PVC.Spec.AccessModes) { return nil, fmt.Errorf("invalid AccessModes %v: only AccessModes %v are supported", p.options.PVC.Spec.AccessModes, p.plugin.GetAccessModes()) } if p.options.PVC.Spec.Selector != nil { glog.V(4).Infof("not able to parse your claim Selector") return nil, fmt.Errorf("not able to parse your claim Selector") } if volutil.CheckPersistentVolumeClaimModeBlock(p.options.PVC) { return nil, fmt.Errorf("%s does not support block volume provisioning", p.plugin.GetPluginName()) } glog.V(4).Infof("Provision VolumeOptions %v", p.options) scName := v1helper.GetPersistentVolumeClaimClass(p.options.PVC) cfg, err := parseClassParameters(p.options.Parameters, p.plugin.host.GetKubeClient()) if err != nil { return nil, err } p.provisionerConfig = *cfg gidTable, err := p.plugin.getGidTable(scName, cfg.gidMin, cfg.gidMax) if err != nil { return nil, fmt.Errorf("failed to get gidTable: %v", err) } gid, _, err := gidTable.AllocateNext() if err != nil { glog.Errorf("failed to reserve GID from table: %v", err) return nil, fmt.Errorf("failed to reserve GID from table: %v", err) } glog.V(2).Infof("Allocated GID %d for PVC %s", gid, p.options.PVC.Name) glusterfs, sizeGiB, volID, err := p.CreateVolume(gid) if err != nil { if releaseErr := gidTable.Release(gid); releaseErr != nil { glog.Errorf("error when releasing GID in storageclass %s: %v", scName, releaseErr) } glog.Errorf("failed to create volume: %v", err) return nil, fmt.Errorf("failed to create volume: %v", err) } mode := v1.PersistentVolumeFilesystem pv := new(v1.PersistentVolume) pv.Spec.PersistentVolumeSource.Glusterfs = glusterfs pv.Spec.PersistentVolumeReclaimPolicy = p.options.PersistentVolumeReclaimPolicy pv.Spec.AccessModes = p.options.PVC.Spec.AccessModes pv.Spec.VolumeMode = &mode if len(pv.Spec.AccessModes) == 0 { pv.Spec.AccessModes = p.plugin.GetAccessModes() } pv.Spec.MountOptions = p.options.MountOptions gidStr := strconv.FormatInt(int64(gid), 10) pv.Annotations = map[string]string{ volutil.VolumeGidAnnotationKey: gidStr, volutil.VolumeDynamicallyCreatedByKey: heketiAnn, glusterTypeAnn: "file", "Description": glusterDescAnn, heketiVolIDAnn: volID, } pv.Spec.Capacity = v1.ResourceList{ v1.ResourceName(v1.ResourceStorage): resource.MustParse(fmt.Sprintf("%dGi", sizeGiB)), } return pv, nil } func (p *glusterfsVolumeProvisioner) CreateVolume(gid int) (r *v1.GlusterfsVolumeSource, size int, volID string, err error) { var clusterIDs []string customVolumeName := "" capacity := p.options.PVC.Spec.Resources.Requests[v1.ResourceName(v1.ResourceStorage)] // GlusterFS/heketi creates volumes in units of GiB. sz, err := volutil.RoundUpToGiBInt(capacity) if err != nil { return nil, 0, "", err } glog.V(2).Infof("create volume of size %dGiB", sz) if p.url == "" { glog.Errorf("REST server endpoint is empty") return nil, 0, "", fmt.Errorf("failed to create glusterfs REST client, REST URL is empty") } cli := gcli.NewClient(p.url, p.user, p.secretValue) if cli == nil { glog.Errorf("failed to create glusterfs REST client") return nil, 0, "", fmt.Errorf("failed to create glusterfs REST client, REST server authentication failed") } if p.provisionerConfig.clusterID != "" { clusterIDs = dstrings.Split(p.clusterID, ",") glog.V(4).Infof("provided clusterIDs %v", clusterIDs) } if p.provisionerConfig.volumeNamePrefix != "" { customVolumeName = fmt.Sprintf("%s_%s_%s_%s", p.provisionerConfig.volumeNamePrefix, p.options.PVC.Namespace, p.options.PVC.Name, uuid.NewUUID()) } gid64 := int64(gid) snaps := struct { Enable bool `json:"enable"` Factor float32 `json:"factor"` }{ true, p.provisionerConfig.thinPoolSnapFactor, } volumeReq := &gapi.VolumeCreateRequest{Size: sz, Name: customVolumeName, Clusters: clusterIDs, Gid: gid64, Durability: p.volumeType, GlusterVolumeOptions: p.volumeOptions, Snapshot: snaps} volume, err := cli.VolumeCreate(volumeReq) if err != nil { glog.Errorf("failed to create volume: %v", err) return nil, 0, "", fmt.Errorf("failed to create volume: %v", err) } glog.V(1).Infof("volume with size %d and name %s created", volume.Size, volume.Name) volID = volume.Id dynamicHostIps, err := getClusterNodes(cli, volume.Cluster) if err != nil { glog.Errorf("failed to get cluster nodes for volume %s: %v", volume, err) return nil, 0, "", fmt.Errorf("failed to get cluster nodes for volume %s: %v", volume, err) } // The 'endpointname' is created in form of 'glusterfs-dynamic-<claimname>'. // createEndpointService() checks for this 'endpoint' existence in PVC's namespace and // If not found, it create an endpoint and service using the IPs we dynamically picked at time // of volume creation. epServiceName := dynamicEpSvcPrefix + p.options.PVC.Name epNamespace := p.options.PVC.Namespace endpoint, service, err := p.createEndpointService(epNamespace, epServiceName, dynamicHostIps, p.options.PVC.Name) if err != nil { glog.Errorf("failed to create endpoint/service %v/%v: %v", epNamespace, epServiceName, err) deleteErr := cli.VolumeDelete(volume.Id) if deleteErr != nil { glog.Errorf("failed to delete volume: %v, manual deletion of the volume required", deleteErr) } return nil, 0, "", fmt.Errorf("failed to create endpoint/service %v/%v: %v", epNamespace, epServiceName, err) } glog.V(3).Infof("dynamic endpoint %v and service %v ", endpoint, service) return &v1.GlusterfsVolumeSource{ EndpointsName: endpoint.Name, Path: volume.Name, ReadOnly: false, }, sz, volID, nil } func (p *glusterfsVolumeProvisioner) createEndpointService(namespace string, epServiceName string, hostips []string, pvcname string) (endpoint *v1.Endpoints, service *v1.Service, err error) { addrlist := make([]v1.EndpointAddress, len(hostips)) for i, v := range hostips { addrlist[i].IP = v } endpoint = &v1.Endpoints{ ObjectMeta: metav1.ObjectMeta{ Namespace: namespace, Name: epServiceName, Labels: map[string]string{ "gluster.kubernetes.io/provisioned-for-pvc": pvcname, }, }, Subsets: []v1.EndpointSubset{{ Addresses: addrlist, Ports: []v1.EndpointPort{{Port: 1, Protocol: "TCP"}}, }}, } kubeClient := p.plugin.host.GetKubeClient() if kubeClient == nil { return nil, nil, fmt.Errorf("failed to get kube client when creating endpoint service") } _, err = kubeClient.CoreV1().Endpoints(namespace).Create(endpoint) if err != nil && errors.IsAlreadyExists(err) { glog.V(1).Infof("endpoint %s already exist in namespace %s", endpoint, namespace) err = nil<|fim▁hole|> return nil, nil, fmt.Errorf("failed to create endpoint: %v", err) } service = &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: epServiceName, Namespace: namespace, Labels: map[string]string{ "gluster.kubernetes.io/provisioned-for-pvc": pvcname, }, }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{ {Protocol: "TCP", Port: 1}}}} _, err = kubeClient.CoreV1().Services(namespace).Create(service) if err != nil && errors.IsAlreadyExists(err) { glog.V(1).Infof("service %s already exist in namespace %s", service, namespace) err = nil } if err != nil { glog.Errorf("failed to create service: %v", err) return nil, nil, fmt.Errorf("error creating service: %v", err) } return endpoint, service, nil } func (d *glusterfsVolumeDeleter) deleteEndpointService(namespace string, epServiceName string) (err error) { kubeClient := d.plugin.host.GetKubeClient() if kubeClient == nil { return fmt.Errorf("failed to get kube client when deleting endpoint service") } err = kubeClient.CoreV1().Services(namespace).Delete(epServiceName, nil) if err != nil { glog.Errorf("failed to delete service %s/%s: %v", namespace, epServiceName, err) return fmt.Errorf("failed to delete service %s/%s: %v", namespace, epServiceName, err) } glog.V(1).Infof("service/endpoint: %s/%s deleted successfully", namespace, epServiceName) return nil } // parseSecret finds a given Secret instance and reads user password from it. func parseSecret(namespace, secretName string, kubeClient clientset.Interface) (string, error) { secretMap, err := volutil.GetSecretForPV(namespace, secretName, glusterfsPluginName, kubeClient) if err != nil { glog.Errorf("failed to get secret: %s/%s: %v", namespace, secretName, err) return "", fmt.Errorf("failed to get secret %s/%s: %v", namespace, secretName, err) } if len(secretMap) == 0 { return "", fmt.Errorf("empty secret map") } secret := "" for k, v := range secretMap { if k == secretKeyName { return v, nil } secret = v } // If not found, the last secret in the map wins as done before return secret, nil } // getClusterNodes() returns the cluster nodes of a given cluster func getClusterNodes(cli *gcli.Client, cluster string) (dynamicHostIps []string, err error) { clusterinfo, err := cli.ClusterInfo(cluster) if err != nil { glog.Errorf("failed to get cluster details: %v", err) return nil, fmt.Errorf("failed to get cluster details: %v", err) } // For the dynamically provisioned volume, we gather the list of node IPs // of the cluster on which provisioned volume belongs to, as there can be multiple // clusters. for _, node := range clusterinfo.Nodes { nodeInfo, err := cli.NodeInfo(string(node)) if err != nil { glog.Errorf("failed to get host ipaddress: %v", err) return nil, fmt.Errorf("failed to get host ipaddress: %v", err) } ipaddr := dstrings.Join(nodeInfo.NodeAddRequest.Hostnames.Storage, "") dynamicHostIps = append(dynamicHostIps, ipaddr) } glog.V(3).Infof("host list :%v", dynamicHostIps) if len(dynamicHostIps) == 0 { glog.Errorf("no hosts found: %v", err) return nil, fmt.Errorf("no hosts found: %v", err) } return dynamicHostIps, nil } // parseClassParameters parses StorageClass parameters. func parseClassParameters(params map[string]string, kubeClient clientset.Interface) (*provisionerConfig, error) { var cfg provisionerConfig var err error cfg.gidMin = defaultGidMin cfg.gidMax = defaultGidMax authEnabled := true parseVolumeType := "" parseVolumeOptions := "" parseVolumeNamePrefix := "" parseThinPoolSnapFactor := "" //thin pool snap factor default to 1.0 cfg.thinPoolSnapFactor = float32(1.0) for k, v := range params { switch dstrings.ToLower(k) { case "resturl": cfg.url = v case "restuser": cfg.user = v case "restuserkey": cfg.userKey = v case "secretname": cfg.secretName = v case "secretnamespace": cfg.secretNamespace = v case "clusterid": if len(v) != 0 { cfg.clusterID = v } case "restauthenabled": authEnabled = dstrings.ToLower(v) == "true" case "gidmin": parseGidMin, err := convertGid(v) if err != nil { return nil, fmt.Errorf("invalid gidMin value %q for volume plugin %s", k, glusterfsPluginName) } if parseGidMin < absoluteGidMin { return nil, fmt.Errorf("gidMin must be >= %v", absoluteGidMin) } if parseGidMin > absoluteGidMax { return nil, fmt.Errorf("gidMin must be <= %v", absoluteGidMax) } cfg.gidMin = parseGidMin case "gidmax": parseGidMax, err := convertGid(v) if err != nil { return nil, fmt.Errorf("invalid gidMax value %q for volume plugin %s", k, glusterfsPluginName) } if parseGidMax < absoluteGidMin { return nil, fmt.Errorf("gidMax must be >= %v", absoluteGidMin) } if parseGidMax > absoluteGidMax { return nil, fmt.Errorf("gidMax must be <= %v", absoluteGidMax) } cfg.gidMax = parseGidMax case "volumetype": parseVolumeType = v case "volumeoptions": if len(v) != 0 { parseVolumeOptions = v } case "volumenameprefix": if len(v) != 0 { parseVolumeNamePrefix = v } case "snapfactor": if len(v) != 0 { parseThinPoolSnapFactor = v } default: return nil, fmt.Errorf("invalid option %q for volume plugin %s", k, glusterfsPluginName) } } if len(cfg.url) == 0 { return nil, fmt.Errorf("StorageClass for provisioner %s must contain 'resturl' parameter", glusterfsPluginName) } if len(parseVolumeType) == 0 { cfg.volumeType = gapi.VolumeDurabilityInfo{Type: gapi.DurabilityReplicate, Replicate: gapi.ReplicaDurability{Replica: replicaCount}} } else { parseVolumeTypeInfo := dstrings.Split(parseVolumeType, ":") switch parseVolumeTypeInfo[0] { case "replicate": if len(parseVolumeTypeInfo) >= 2 { newReplicaCount, err := convertVolumeParam(parseVolumeTypeInfo[1]) if err != nil { return nil, fmt.Errorf("error parsing volumeType %q: %s", parseVolumeTypeInfo[1], err) } cfg.volumeType = gapi.VolumeDurabilityInfo{Type: gapi.DurabilityReplicate, Replicate: gapi.ReplicaDurability{Replica: newReplicaCount}} } else { cfg.volumeType = gapi.VolumeDurabilityInfo{Type: gapi.DurabilityReplicate, Replicate: gapi.ReplicaDurability{Replica: replicaCount}} } case "disperse": if len(parseVolumeTypeInfo) >= 3 { newDisperseData, err := convertVolumeParam(parseVolumeTypeInfo[1]) if err != nil { return nil, fmt.Errorf("error parsing volumeType %q: %s", parseVolumeTypeInfo[1], err) } newDisperseRedundancy, err := convertVolumeParam(parseVolumeTypeInfo[2]) if err != nil { return nil, fmt.Errorf("error parsing volumeType %q: %s", parseVolumeTypeInfo[2], err) } cfg.volumeType = gapi.VolumeDurabilityInfo{Type: gapi.DurabilityEC, Disperse: gapi.DisperseDurability{Data: newDisperseData, Redundancy: newDisperseRedundancy}} } else { return nil, fmt.Errorf("StorageClass for provisioner %q must have data:redundancy count set for disperse volumes in storage class option '%s'", glusterfsPluginName, "volumetype") } case "none": cfg.volumeType = gapi.VolumeDurabilityInfo{Type: gapi.DurabilityDistributeOnly} default: return nil, fmt.Errorf("error parsing value for option 'volumetype' for volume plugin %s", glusterfsPluginName) } } if !authEnabled { cfg.user = "" cfg.secretName = "" cfg.secretNamespace = "" cfg.userKey = "" cfg.secretValue = "" } if len(cfg.secretName) != 0 || len(cfg.secretNamespace) != 0 { // secretName + Namespace has precedence over userKey if len(cfg.secretName) != 0 && len(cfg.secretNamespace) != 0 { cfg.secretValue, err = parseSecret(cfg.secretNamespace, cfg.secretName, kubeClient) if err != nil { return nil, err } } else { return nil, fmt.Errorf("StorageClass for provisioner %q must have secretNamespace and secretName either both set or both empty", glusterfsPluginName) } } else { cfg.secretValue = cfg.userKey } if cfg.gidMin > cfg.gidMax { return nil, fmt.Errorf("StorageClass for provisioner %q must have gidMax value >= gidMin", glusterfsPluginName) } if len(parseVolumeOptions) != 0 { volOptions := dstrings.Split(parseVolumeOptions, ",") if len(volOptions) == 0 { return nil, fmt.Errorf("StorageClass for provisioner %q must have valid (for e.g., 'client.ssl on') volume option", glusterfsPluginName) } cfg.volumeOptions = volOptions } if len(parseVolumeNamePrefix) != 0 { if dstrings.Contains(parseVolumeNamePrefix, "_") { return nil, fmt.Errorf("Storageclass parameter 'volumenameprefix' should not contain '_' in its value") } cfg.volumeNamePrefix = parseVolumeNamePrefix } if len(parseThinPoolSnapFactor) != 0 { thinPoolSnapFactor, err := strconv.ParseFloat(parseThinPoolSnapFactor, 32) if err != nil { return nil, fmt.Errorf("failed to convert snapfactor %v to float: %v", parseThinPoolSnapFactor, err) } if thinPoolSnapFactor < 1.0 || thinPoolSnapFactor > 100.0 { return nil, fmt.Errorf("invalid snapshot factor %v, the value must be between 1 to 100", thinPoolSnapFactor) } cfg.thinPoolSnapFactor = float32(thinPoolSnapFactor) } return &cfg, nil } // getVolumeID returns volumeID from the PV or volumename. func getVolumeID(pv *v1.PersistentVolume, volumeName string) (string, error) { volumeID := "" // Get volID from pvspec if available, else fill it from volumename. if pv != nil { if pv.Annotations[heketiVolIDAnn] != "" { volumeID = pv.Annotations[heketiVolIDAnn] } else { volumeID = dstrings.TrimPrefix(volumeName, volPrefix) } } else { return volumeID, fmt.Errorf("provided PV spec is nil") } if volumeID == "" { return volumeID, fmt.Errorf("volume ID is empty") } return volumeID, nil } func (plugin *glusterfsPlugin) ExpandVolumeDevice(spec *volume.Spec, newSize resource.Quantity, oldSize resource.Quantity) (resource.Quantity, error) { pvSpec := spec.PersistentVolume.Spec volumeName := pvSpec.Glusterfs.Path glog.V(2).Infof("Received request to expand volume %s", volumeName) volumeID, err := getVolumeID(spec.PersistentVolume, volumeName) if err != nil { return oldSize, fmt.Errorf("failed to get volumeID for volume %s: %v", volumeName, err) } //Get details of StorageClass. class, err := volutil.GetClassForVolume(plugin.host.GetKubeClient(), spec.PersistentVolume) if err != nil { return oldSize, err } cfg, err := parseClassParameters(class.Parameters, plugin.host.GetKubeClient()) if err != nil { return oldSize, err } glog.V(4).Infof("expanding volume: %q", volumeID) //Create REST server connection cli := gcli.NewClient(cfg.url, cfg.user, cfg.secretValue) if cli == nil { glog.Errorf("failed to create glusterfs REST client") return oldSize, fmt.Errorf("failed to create glusterfs REST client, REST server authentication failed") } // Find out delta size expansionSize := (newSize.Value() - oldSize.Value()) expansionSizeGiB := int(volutil.RoundUpSize(expansionSize, volutil.GIB)) // Find out requested Size requestGiB := volutil.RoundUpToGiB(newSize) //Check the existing volume size currentVolumeInfo, err := cli.VolumeInfo(volumeID) if err != nil { glog.Errorf("error when fetching details of volume %s: %v", volumeName, err) return oldSize, err } if int64(currentVolumeInfo.Size) >= requestGiB { return newSize, nil } // Make volume expansion request volumeExpandReq := &gapi.VolumeExpandRequest{Size: expansionSizeGiB} // Expand the volume volumeInfoRes, err := cli.VolumeExpand(volumeID, volumeExpandReq) if err != nil { glog.Errorf("failed to expand volume %s: %v", volumeName, err) return oldSize, err } glog.V(2).Infof("volume %s expanded to new size %d successfully", volumeName, volumeInfoRes.Size) newVolumeSize := resource.MustParse(fmt.Sprintf("%dGi", volumeInfoRes.Size)) return newVolumeSize, nil }<|fim▁end|>
} if err != nil { glog.Errorf("failed to create endpoint: %v", err)
<|file_name|>day5.go<|end_file_name|><|fim▁begin|>package main import ( "crypto/md5" "flag" "fmt" "log" "os" "runtime/pprof" "strconv" "sync" ) var prefix = []byte{0x6f, 0x6a, 0x76, 0x74, 0x70, 0x75, 0x76, 0x67} var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to file") func find(l int) { cg := gen(1000000000) c1 := hash(cg) c2 := hash(cg) c3 := hash(cg) c4 := hash(cg) c5 := hash(cg) c6 := hash(cg) c7 := hash(cg) c8 := hash(cg) c := merge(c1, c2, c3, c4, c5, c6, c7, c8) var p1 uint32 var p2 uint32 var p1s uint32 var p2m uint32 p1s = 32 p2m = 0xff for hash := range c { six := uint32(hash[2] & 0xf) seven := uint32(hash[3] >> 4) if p1s != 0 { p1s -= 4 p1 |= six << p1s } if p2m&(1<<six) != 0x00 { p2m ^= 1 << six p2 |= seven << ((7 - six) << 2) } fmt.Printf("part one: %08x part two: %08x\r", p1, p2) if p1s == 0 && p2m == 0 { break } } fmt.Printf("part one: %08x part two: %08x\n", p1, p2) } func gen(max int64) <-chan int64 { out := make(chan int64) go func() { var i int64 for i = 0; i < max; i++ { out <- i } close(out) }() return out } func hash(in <-chan int64) <-chan []byte { out := make(chan []byte) go func() { for i := range in { m := md5.Sum(strconv.AppendInt(prefix, i, 10)) if m[0] == 0x00 && m[1] == 0x00 && (m[2]&0xf0 == 0x00) { out <- m[:] } } close(out) }() return out } func main() { flag.Parse() if *cpuprofile != "" { f, err := os.Create(*cpuprofile) if err != nil { log.Fatal(err) } pprof.StartCPUProfile(f) defer pprof.StopCPUProfile() } find(5) } func merge(cs ...<-chan []byte) <-chan []byte {<|fim▁hole|> // copies values from c to out until c is closed, then calls wg.Done. output := func(c <-chan []byte) { for n := range c { out <- n } wg.Done() } wg.Add(len(cs)) for _, c := range cs { go output(c) } // Start a goroutine to close out once all the output goroutines are // done. This must start after the wg.Add call. go func() { wg.Wait() close(out) }() return out }<|fim▁end|>
var wg sync.WaitGroup out := make(chan []byte, 2) // Start an output goroutine for each input channel in cs. output
<|file_name|>bayesian.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Johannes Köster. // Licensed under the MIT license (http://opensource.org/licenses/MIT) // This file may not be copied, modified, or distributed // except according to those terms. //! Utilities for Bayesian statistics. use itertools::Itertools; use ordered_float::OrderedFloat; use stats::LogProb; /// For each of the hypothesis tests given as posterior error probabilities /// (PEPs, i.e. the posterior probability of the null hypothesis), estimate the FDR /// for the case that all null hypotheses with at most this PEP are rejected. /// FDR is calculated as presented by Müller, Parmigiani, and Rice, /// "FDR and Bayesian Multiple Comparisons Rules" (July 2006). /// Johns Hopkin's University, Dept. of Biostatistics Working Papers. Working Paper 115. /// /// # Returns ///<|fim▁hole|> (0..peps.len()).sorted_by(|&i, &j| OrderedFloat(*peps[i]).cmp(&OrderedFloat(*peps[j]))); // estimate FDR let mut expected_fdr = vec![LogProb::ln_zero(); peps.len()]; for (i, expected_fp) in LogProb::ln_cumsum_exp(sorted_idx.iter().map(|&i| peps[i])).enumerate() { let fdr = LogProb(*expected_fp - ((i + 1) as f64).ln()); expected_fdr[i] = if fdr <= LogProb::ln_one() { fdr } else { LogProb::ln_one() }; } expected_fdr } #[cfg(test)] mod tests { use super::*; use stats::LogProb; #[test] fn test_expected_fdr() { let peps = [ LogProb(0.1f64.ln()), LogProb::ln_zero(), LogProb(0.25f64.ln()), ]; let fdrs = expected_fdr(&peps); println!("{:?}", fdrs); assert_relative_eq!(*fdrs[1], *LogProb::ln_zero()); assert_relative_eq!(*fdrs[0], *LogProb(0.05f64.ln())); assert_relative_eq!(*fdrs[2], *LogProb((0.35 / 3.0f64).ln())); } }<|fim▁end|>
/// A vector of expected FDRs in the same order as the given PEPs. pub fn expected_fdr(peps: &[LogProb]) -> Vec<LogProb> { // sort indices let sorted_idx =
<|file_name|>analyzer.py<|end_file_name|><|fim▁begin|># Copyright (C) 2019 Fassio Blatter from stopeight import analyzer version=analyzer.version from stopeight.util.editor.data import ScribbleData <|fim▁hole|> from stopeight.matrix import Vectors from stopeight.analyzer import legal_segments return legal_segments(Vectors(data)).__array__().view(ScribbleData) legal_segments.__annotations__ = {'data':ScribbleData,'return':ScribbleData}<|fim▁end|>
def legal_segments(data):
<|file_name|>cell-does-not-clone.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::cell::Cell; #[derive(Copy)] struct Foo { x: int } impl Clone for Foo { fn clone(&self) -> Foo { // Using Cell in any way should never cause clone() to be<|fim▁hole|> // invoked -- after all, that would permit evil user code to // abuse `Cell` and trigger crashes. panic!(); } } pub fn main() { let x = Cell::new(Foo { x: 22 }); let _y = x.get(); let _z = x.clone(); }<|fim▁end|>
<|file_name|>TestRbacDeprecated.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2010-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.model.intest.rbac; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.ContextConfiguration; import java.io.File; /** * @author semancik * */ @ContextConfiguration(locations = {"classpath:ctx-model-intest-test-main.xml"}) @DirtiesContext(classMode = ClassMode.AFTER_CLASS) public class TestRbacDeprecated extends TestRbac { protected static final File ROLE_GOVERNOR_DEPRECATED_FILE = new File(TEST_DIR, "role-governor-deprecated.xml"); protected static final File ROLE_CANNIBAL_DEPRECATED_FILE = new File(TEST_DIR, "role-cannibal-deprecated.xml"); @Override protected File getRoleGovernorFile() {<|fim▁hole|> protected File getRoleCannibalFile() { return ROLE_CANNIBAL_DEPRECATED_FILE; } @Override protected boolean testMultiplicityConstraintsForNonDefaultRelations() { return false; } }<|fim▁end|>
return ROLE_GOVERNOR_DEPRECATED_FILE; } @Override
<|file_name|>libaio.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os from autotest.client import utils version = 1 def setup(tarball, topdir): srcdir = os.path.join(topdir, 'src') utils.extract_tarball_to_dir(tarball, srcdir) os.chdir(srcdir) utils.system('patch -p1 < ../00_arches.patch') utils.make() utils.make('prefix=%s install' % topdir) os.chdir(topdir) # old source was # http://www.kernel.org/pub/linux/kernel/people/bcrl/aio/libaio-0.3.92.tar.bz2<|fim▁hole|> pwd = os.getcwd() tarball = os.path.join(pwd, 'libaio_0.3.106.orig.tar.gz') utils.update_version(pwd+'/src', False, version, setup, tarball, pwd)<|fim▁end|>
# now grabbing from debian # http://ftp.debian.org/debian/pool/main/liba/libaio/libaio_0.3.106.orig.tar.gz # architecture patch from here # http://git.hadrons.org/?p=debian/pkgs/libaio.git;a=tree;f=debian/patches
<|file_name|>client.js<|end_file_name|><|fim▁begin|>"use strict"; module.exports = { delegates: { im_too_lazy: function() { $C("button", { name: "my first button" }, function(btn) { $("#button_holder").empty(); // extra<|fim▁hole|> btn.prependTo($("#button_holder")); // alright, this is technically extra btn.$el.hide(); btn.$el.fadeIn(); }); } } };<|fim▁end|>
<|file_name|>FormCreateRequest.java<|end_file_name|><|fim▁begin|>/** * Copyright 2017, 2018, 2019, 2020 Stephen Powis https://github.com/Crim/pardot-java-client * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit * persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.darksci.pardot.api.request.form; import com.darksci.pardot.api.request.BaseRequest; /** * For creating new Forms using Pardot's API. */ public class FormCreateRequest extends BaseRequest<FormCreateRequest> { @Override public String getApiEndpoint() { return "form/do/create"; } /** * Define the name of the form. * @param name The name of the form. * @return FormCreateRequest builder. */ public FormCreateRequest withName(final String name) { setParam("name", name); return this; } /** * Associate form with a campaign. * @param campaignId Id of campaign to associate with form. * @return FormCreateRequest builder. */ public FormCreateRequest withCampaignId(final Long campaignId) { setParam("campaign_id", campaignId); return this; } /** * Associate form with a layout template. * @param layoutTemplateId Id of layout template to associate with form. * @return FormCreateRequest builder. */ public FormCreateRequest withLayoutTemplateId(final Long layoutTemplateId) { setParam("layout_template_id", layoutTemplateId);<|fim▁hole|> /** * Associate form with a folder. * @param folderId Id of folder to associate with form. * @return FormCreateRequest builder. */ public FormCreateRequest withFolderId(final Long folderId) { setParam("folder_id", folderId); return this; } }<|fim▁end|>
return this; }
<|file_name|>action_log.py<|end_file_name|><|fim▁begin|># Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nailgun.db import db from nailgun.db.sqlalchemy import models from nailgun import consts from nailgun.objects import NailgunCollection from nailgun.objects import NailgunObject from nailgun.objects.serializers.action_log import ActionLogSerializer <|fim▁hole|> #: SQLAlchemy model for ActionLog model = models.ActionLog #: Serializer for ActionLog serializer = ActionLogSerializer #: JSON schema for ActionLog schema = { "$schema": "http://json-schema.org/draft-04/schema#", "title": "ActionLog", "description": "Serialized ActionLog object", "type": "object", "properties": { "id": {"type": "number"}, "actor_id": {"type": ["string", "null"]}, "action_group": {"type": "string"}, "action_name": {"type": "string"}, "action_type": { "type": "string", "enum": list(consts.ACTION_TYPES) }, "start_timestamp": {"type": "string"}, "end_timestamp": {"type": "string"}, "additional_info": {"type": "object"}, "is_sent": {"type": "boolean"}, "cluster_id": {"type": ["number", "null"]}, "task_uuid": {"type": ["string", "null"]} } } @classmethod def update(cls, instance, data): """Form additional info for further instance update. Extend corresponding method of the parent class. Side effects: overrides already present keys of additional_info attribute of instance if this attribute is present in data argument :param instance: instance of ActionLog class that is processed :param data: dictionary containing keyword arguments for entity to be updated :return: returned by parent class method value """ if data.get('additional_info'): add_info = dict(instance.additional_info) add_info.update(data['additional_info']) data['additional_info'] = add_info return super(ActionLog, cls).update(instance, data) @classmethod def get_by_kwargs(cls, **kwargs): """Get action_log entry by set of attributes values. :return: - matching instance of action_log entity """ instance = db().query(models.ActionLog)\ .filter_by(**kwargs)\ .first() return instance class ActionLogCollection(NailgunCollection): single = ActionLog<|fim▁end|>
class ActionLog(NailgunObject):
<|file_name|>destroy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import click from aeriscloud.cli.helpers import standard_options, Command @click.command(cls=Command) @standard_options(start_prompt=False) def cli(box):<|fim▁hole|> if __name__ == '__main__': cli()<|fim▁end|>
""" Destroy a box """ box.destroy()
<|file_name|>test_analytic_dimension.py<|end_file_name|><|fim▁begin|># Copyright 2019 Ecosoft Co., Ltd (http://ecosoft.co.th/) # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html) from odoo.exceptions import ValidationError from odoo.addons.analytic_tag_dimension.tests.test_analytic_dimension import ( TestAnalyticDimensionBase, ) class TestAnalyticDimensionCase(TestAnalyticDimensionBase): @classmethod def setUpClass(cls): super().setUpClass() cls.account_obj = cls.env["account.account"] cls.model_obj = cls.env["ir.model"] cls.field_obj = cls.env["ir.model.fields"] cls.invoice = cls.env["account.move"].create( {"journal_id": cls.journal.id, "partner_id": cls.partner.id} )<|fim▁hole|> "name": "A", "line_ids": [ # use sequence as record identifier (0, 0, {"value": "percent", "sequence": 1001}), (0, 0, {"value": "balance", "sequence": 1002}), ], } cls.payterm_a = cls.env["account.payment.term"].create(vals) def test_invoice_line_dimension_required(self): """If dimension is marked as required, I expect error on save if the required dimension is not selected """ self.dimension_1.required = True self.dimension_2.required = True values = { "name": "test", "price_unit": 1, "account_id": self.account.id, "move_id": self.invoice.id, "analytic_account_id": self.analytic_account.id, "analytic_tag_ids": [(6, 0, [self.analytic_tag_1a.id])], } invoice_line_obj = self.env["account.move.line"] # Error if missing required dimension with self.assertRaises(ValidationError): invoice_line_obj.create(values) self.invoice.invoice_line_ids.unlink() values["analytic_tag_ids"] = [ (6, 0, [self.analytic_tag_1a.id, self.analytic_tag_2a.id]) ] # Valid if all required dimension is filled line = invoice_line_obj.create(values) self.assertTrue(line.x_dimension_test_dim_1.id == self.analytic_tag_1a.id) self.assertTrue(line.x_dimension_test_dim_2.id == self.analytic_tag_2a.id) def test_invoice_line_dimension_by_sequence(self): """If dimension is by sequence, I expect, - No duplicated sequence - Selection allowed by sequence, i.e., Concept then Type """ invoice_line_obj = self.env["account.move.line"] # Test no dimension with any sequence values = { "name": "test no sequence", "price_unit": 1, "account_id": self.account.id, "move_id": self.invoice.id, "analytic_account_id": self.analytic_account.id, } line = invoice_line_obj.create(values) res = line._compute_analytic_tags_domain() self.assertFalse(res["domain"]["analytic_tag_ids"]) # Now, user will see tags in sequence 1) Type 2) Concept self.dimension_1.write({"required": False, "by_sequence": True, "sequence": 1}) with self.assertRaises(ValidationError): self.dimension_2.write( {"required": False, "by_sequence": True, "sequence": 1} ) self.dimension_2.write({"required": False, "by_sequence": True, "sequence": 2}) # Now, user will see tags in sequence 1) Type 2) Concept values = { "name": "test sequence", "price_unit": 1, "account_id": self.account.id, "move_id": self.invoice.id, "analytic_account_id": self.analytic_account.id, } line = invoice_line_obj.create(values) # First selection, dimension 1 tag shouldn't be in the domain res = line._compute_analytic_tags_domain() tag_ids = res["domain"]["analytic_tag_ids"][0][2] self.assertNotIn(self.analytic_tag_2a.id, tag_ids) # Select a dimension 1 tag line.analytic_tag_ids += self.analytic_tag_1a res = line._compute_analytic_tags_domain() tag_ids = res["domain"]["analytic_tag_ids"][0][2] # Test that all dimension 1 tags are not in list type_tag_ids = [self.analytic_tag_1a.id, self.analytic_tag_1b.id] for type_tag_id in type_tag_ids: self.assertNotIn(type_tag_id, tag_ids) def test_zz_invoice_line_dimension_ref_model_with_filter(self): """ For dimension tags created by ref model with by_sequence and filtered, We expected that, - If user select A, user can only select payment term line 1001, 1002 Note: We use payment term and payment term line for testing purposes, although it does not make sense in real life #""" # It should be executed the last one for avoiding side effects # as not everything is undone in this removal # Clear all dimension self.tag_obj.search([]).unlink() self.dimension_obj.search([]).unlink() # Create new dimension, using reference model pt = self.model_obj.search([("model", "=", "account.payment.term")]) pt_dimension = self.dimension_obj.create( { "name": "Payment Term", "code": "payterm", "by_sequence": True, "sequence": 1, } ) pt_dimension.create_analytic_tags() # Test create without model pt_dimension.ref_model_id = pt pt_dimension.create_analytic_tags() ptl = self.model_obj.search([("model", "=", "account.payment.term.line")]) # Payment term line will be filtered with payment_id ptl_dimension = self.dimension_obj.create( { "name": "Payment Term Line", "code": "payterm_line", "ref_model_id": ptl.id, "by_sequence": True, "sequence": 2, } ) filter_field = self.field_obj.search( [("model_id", "=", ptl.id), ("name", "=", "payment_id")] ) ptl_dimension.filtered_field_ids += filter_field ptl_dimension.create_analytic_tags() values = { "name": "test", "price_unit": 1, "account_id": self.account.id, "move_id": self.invoice.id, "analytic_account_id": self.analytic_account.id, } invoice_line_obj = self.env["account.move.line"] line = invoice_line_obj.create(values) tag = self.tag_obj.search([("name", "=", "A")]) line.analytic_tag_ids += tag res = line._compute_analytic_tags_domain() # Test whether this will list only 2 tags of payment term line 1001, 1002 tag_ids = res["domain"]["analytic_tag_ids"][0][2] tags = self.tag_obj.search([("id", "in", tag_ids)]) sequences = [x.sequence for x in tags.mapped("resource_ref")] self.assertEqual({1001, 1002}, set(sequences))<|fim▁end|>
# Mock data for testing model dimension, by_sequence with fitered vals = {
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <[email protected]> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os import sys import binascii from smtplib import SMTPException from django.db import models from django.dispatch import receiver from django.conf import settings from django.contrib.auth.signals import user_logged_in from django.db.models.signals import post_save, post_migrate from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.models import Group, User, Permission from django.utils import translation as django_translation from django.template.loader import render_to_string from django.core.mail import EmailMultiAlternatives, get_connection from django.utils.translation import LANGUAGE_SESSION_KEY from social.apps.django_app.default.models import UserSocialAuth from weblate.lang.models import Language from weblate.trans.site import get_site_url, get_site_domain from weblate.accounts.avatar import get_user_display from weblate.trans.util import report_error from weblate.trans.signals import user_pre_delete from weblate import VERSION from weblate.logger import LOGGER from weblate.appsettings import ANONYMOUS_USER_NAME, SITE_TITLE def send_mails(mails): """Sends multiple mails in single connection.""" try: connection = get_connection() connection.send_messages(mails) except SMTPException as error: LOGGER.error('Failed to send email: %s', error) report_error(error, sys.exc_info()) def get_author_name(user, email=True): """Returns formatted author name with email.""" # Get full name from database full_name = user.first_name # Use username if full name is empty if full_name == '': full_name = user.username # Add email if we are asked for it if not email: return full_name return '%s <%s>' % (full_name, user.email) def notify_merge_failure(subproject, error, status): ''' Notification on merge failure. ''' subscriptions = Profile.objects.subscribed_merge_failure( subproject.project, ) users = set() mails = [] for subscription in subscriptions: mails.append( subscription.notify_merge_failure(subproject, error, status) ) users.add(subscription.user_id) for owner in subproject.project.owners.all(): mails.append( owner.profile.notify_merge_failure( subproject, error, status ) ) # Notify admins mails.append( get_notification_email( 'en', 'ADMINS', 'merge_failure', subproject, { 'subproject': subproject, 'status': status, 'error': error, } ) ) send_mails(mails) def notify_new_string(translation): ''' Notification on new string to translate. ''' mails = [] subscriptions = Profile.objects.subscribed_new_string( translation.subproject.project, translation.language ) for subscription in subscriptions: mails.append( subscription.notify_new_string(translation) ) send_mails(mails) def notify_new_language(subproject, language, user): ''' Notify subscribed users about new language requests ''' mails = [] subscriptions = Profile.objects.subscribed_new_language( subproject.project, user ) users = set() for subscription in subscriptions: mails.append( subscription.notify_new_language(subproject, language, user) ) users.add(subscription.user_id) for owner in subproject.project.owners.all(): mails.append( owner.profile.notify_new_language( subproject, language, user ) ) # Notify admins mails.append( get_notification_email( 'en', 'ADMINS', 'new_language', subproject, { 'language': language, 'user': user, }, user=user, ) ) send_mails(mails) def notify_new_translation(unit, oldunit, user): ''' Notify subscribed users about new translation ''' mails = [] subscriptions = Profile.objects.subscribed_any_translation( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_any_translation(unit, oldunit) ) send_mails(mails) def notify_new_contributor(unit, user): ''' Notify about new contributor. ''' mails = [] subscriptions = Profile.objects.subscribed_new_contributor( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_contributor( unit.translation, user ) ) send_mails(mails) def notify_new_suggestion(unit, suggestion, user): ''' Notify about new suggestion. ''' mails = [] subscriptions = Profile.objects.subscribed_new_suggestion( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_suggestion( unit.translation, suggestion, unit ) ) send_mails(mails) def notify_new_comment(unit, comment, user, report_source_bugs): ''' Notify about new comment. ''' mails = [] subscriptions = Profile.objects.subscribed_new_comment( unit.translation.subproject.project, comment.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_comment(unit, comment, user) ) # Notify upstream if comment.language is None and report_source_bugs != '': send_notification_email( 'en', report_source_bugs, 'new_comment', unit.translation, { 'unit': unit, 'comment': comment, 'subproject': unit.translation.subproject, }, user=user, ) send_mails(mails) def get_notification_email(language, email, notification, translation_obj=None, context=None, headers=None, user=None, info=None): ''' Renders notification email. ''' cur_language = django_translation.get_language() context = context or {} headers = headers or {} references = None if 'unit' in context: unit = context['unit'] references = '{0}/{1}/{2}/{3}'.format( unit.translation.subproject.project.slug, unit.translation.subproject.slug, unit.translation.language.code, unit.id ) if references is not None: references = '<{0}@{1}>'.format(references, get_site_domain()) headers['In-Reply-To'] = references headers['References'] = references try: if info is None: info = translation_obj.__unicode__() LOGGER.info( 'sending notification %s on %s to %s', notification, info, email ) # Load user language if language is not None: django_translation.activate(language) # Template name context['subject_template'] = 'mail/{}_subject.txt'.format( notification ) # Adjust context context['current_site_url'] = get_site_url() if translation_obj is not None: context['translation'] = translation_obj context['translation_url'] = get_site_url( translation_obj.get_absolute_url() ) context['site_title'] = SITE_TITLE # Render subject subject = render_to_string( context['subject_template'], context ).strip() # Render body body = render_to_string( 'mail/{}.txt'.format(notification), context ) html_body = render_to_string( 'mail/{}.html'.format(notification), context ) # Define headers headers['Auto-Submitted'] = 'auto-generated' headers['X-AutoGenerated'] = 'yes' headers['Precedence'] = 'bulk' headers['X-Mailer'] = 'Weblate {}'.format(VERSION) # Reply to header if user is not None: headers['Reply-To'] = user.email # List of recipients if email == 'ADMINS': emails = [a[1] for a in settings.ADMINS] else: emails = [email] # Create message email = EmailMultiAlternatives( settings.EMAIL_SUBJECT_PREFIX + subject, body, to=emails, headers=headers, ) email.attach_alternative( html_body, 'text/html' ) # Return the mail return email finally: django_translation.activate(cur_language) def send_notification_email(language, email, notification, translation_obj=None, context=None, headers=None, user=None, info=None): ''' Renders and sends notification email. ''' email = get_notification_email( language, email, notification, translation_obj, context, headers, user, info ) send_mails([email]) class VerifiedEmail(models.Model): ''' Storage for verified emails from auth backends. ''' social = models.ForeignKey(UserSocialAuth) email = models.EmailField(max_length=254) def __unicode__(self): return u'{0} - {1}'.format( self.social.user.username, self.email ) class ProfileManager(models.Manager): ''' Manager providing shortcuts for subscription queries. ''' # pylint: disable=W0232 def subscribed_any_translation(self, project, language, user): return self.filter( subscribe_any_translation=True, subscriptions=project, languages=language ).exclude( user=user ) def subscribed_new_language(self, project, user): return self.filter( subscribe_new_language=True, subscriptions=project, ).exclude( user=user ) def subscribed_new_string(self, project, language): return self.filter( subscribe_new_string=True, subscriptions=project, languages=language ) def subscribed_new_suggestion(self, project, language, user): ret = self.filter( subscribe_new_suggestion=True, subscriptions=project, languages=language ) # We don't want to filter out anonymous user if user is not None and user.is_authenticated(): ret = ret.exclude(user=user) return ret def subscribed_new_contributor(self, project, language, user): return self.filter( subscribe_new_contributor=True, subscriptions=project, languages=language ).exclude( user=user ) def subscribed_new_comment(self, project, language, user): ret = self.filter( subscribe_new_comment=True, subscriptions=project ).exclude( user=user ) # Source comments go to every subscriber if language is not None: ret = ret.filter(languages=language) return ret def subscribed_merge_failure(self, project): return self.filter(subscribe_merge_failure=True, subscriptions=project) class Profile(models.Model): ''' User profiles storage. ''' user = models.OneToOneField(User, unique=True, editable=False) language = models.CharField( verbose_name=_(u"Interface Language"), max_length=10, choices=settings.LANGUAGES ) languages = models.ManyToManyField( Language, verbose_name=_('Translated languages'), blank=True, help_text=_('Choose languages to which you can translate.') ) secondary_languages = models.ManyToManyField( Language, verbose_name=_('Secondary languages'), related_name='secondary_profile_set', blank=True, ) suggested = models.IntegerField(default=0, db_index=True) translated = models.IntegerField(default=0, db_index=True) hide_completed = models.BooleanField( verbose_name=_('Hide completed translations on dashboard'), default=False ) secondary_in_zen = models.BooleanField( verbose_name=_('Show secondary translations in zen mode'), default=True ) hide_source_secondary = models.BooleanField( verbose_name=_('Hide source if there is secondary language'), default=False ) subscriptions = models.ManyToManyField( 'trans.Project', verbose_name=_('Subscribed projects'), blank=True, ) subscribe_any_translation = models.BooleanField( verbose_name=_('Notification on any translation'), default=False ) subscribe_new_string = models.BooleanField( verbose_name=_('Notification on new string to translate'), default=False ) subscribe_new_suggestion = models.BooleanField( verbose_name=_('Notification on new suggestion'), default=False ) subscribe_new_contributor = models.BooleanField( verbose_name=_('Notification on new contributor'), default=False ) subscribe_new_comment = models.BooleanField( verbose_name=_('Notification on new comment'), default=False ) subscribe_merge_failure = models.BooleanField( verbose_name=_('Notification on merge failure'), default=False ) subscribe_new_language = models.BooleanField( verbose_name=_('Notification on new language request'), default=False ) SUBSCRIPTION_FIELDS = ( 'subscribe_any_translation', 'subscribe_new_string', 'subscribe_new_suggestion', 'subscribe_new_contributor', 'subscribe_new_comment', 'subscribe_merge_failure', 'subscribe_new_language', ) objects = ProfileManager() def __unicode__(self): return self.user.username def get_user_display(self): return get_user_display(self.user) def get_user_display_link(self): return get_user_display(self.user, True, True) def get_user_name(self): return get_user_display(self.user, False) @models.permalink def get_absolute_url(self): return ('user_page', (), { 'user': self.user.username }) @property def last_change(self): ''' Returns date of last change user has done in Weblate. ''' try: return self.user.change_set.all()[0].timestamp except IndexError: return None def notify_user(self, notification, translation_obj, context=None, headers=None, user=None): ''' Wrapper for sending notifications to user. ''' if context is None: context = {} if headers is None: headers = {} # Check whether user is still allowed to access this project if not translation_obj.has_acl(self.user): return # Generate notification return get_notification_email( self.language, self.user.email, notification, translation_obj,<|fim▁hole|> context, headers, user=user ) def notify_any_translation(self, unit, oldunit): ''' Sends notification on translation. ''' if oldunit.translated: template = 'changed_translation' else: template = 'new_translation' return self.notify_user( template, unit.translation, { 'unit': unit, 'oldunit': oldunit, } ) def notify_new_language(self, subproject, language, user): ''' Sends notification on new language request. ''' return self.notify_user( 'new_language', subproject, { 'language': language, 'user': user, }, user=user ) def notify_new_string(self, translation): ''' Sends notification on new strings to translate. ''' return self.notify_user( 'new_string', translation, ) def notify_new_suggestion(self, translation, suggestion, unit): ''' Sends notification on new suggestion. ''' return self.notify_user( 'new_suggestion', translation, { 'suggestion': suggestion, 'unit': unit, } ) def notify_new_contributor(self, translation, user): ''' Sends notification on new contributor. ''' return self.notify_user( 'new_contributor', translation, { 'user': user, } ) def notify_new_comment(self, unit, comment, user): ''' Sends notification about new comment. ''' return self.notify_user( 'new_comment', unit.translation, { 'unit': unit, 'comment': comment, 'subproject': unit.translation.subproject, }, user=user, ) def notify_merge_failure(self, subproject, error, status): ''' Sends notification on merge failure. ''' return self.notify_user( 'merge_failure', subproject, { 'subproject': subproject, 'error': error, 'status': status, } ) @property def full_name(self): ''' Returns user's full name. ''' return self.user.first_name def set_lang(request, profile): """ Sets session language based on user preferences. """ request.session[LANGUAGE_SESSION_KEY] = profile.language @receiver(user_logged_in) def post_login_handler(sender, request, user, **kwargs): ''' Signal handler for setting user language and migrating profile if needed. ''' # Warning about setting password if (getattr(user, 'backend', '').endswith('.EmailAuth') and not user.has_usable_password()): request.session['show_set_password'] = True # Ensure user has a profile profile = Profile.objects.get_or_create(user=user)[0] # Migrate django-registration based verification to python-social-auth if (user.has_usable_password() and not user.social_auth.filter(provider='email').exists()): social = user.social_auth.create( provider='email', uid=user.email, ) VerifiedEmail.objects.create( social=social, email=user.email, ) # Set language for session based on preferences set_lang(request, profile) def create_groups(update): ''' Creates standard groups and gives them permissions. ''' guest_group, created = Group.objects.get_or_create(name='Guests') if created or update: guest_group.permissions.add( Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_suggestion'), ) group, created = Group.objects.get_or_create(name='Users') if created or update: group.permissions.add( Permission.objects.get(codename='upload_translation'), Permission.objects.get(codename='overwrite_translation'), Permission.objects.get(codename='save_translation'), Permission.objects.get(codename='save_template'), Permission.objects.get(codename='accept_suggestion'), Permission.objects.get(codename='delete_suggestion'), Permission.objects.get(codename='vote_suggestion'), Permission.objects.get(codename='ignore_check'), Permission.objects.get(codename='upload_dictionary'), Permission.objects.get(codename='add_dictionary'), Permission.objects.get(codename='change_dictionary'), Permission.objects.get(codename='delete_dictionary'), Permission.objects.get(codename='lock_translation'), Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_comment'), Permission.objects.get(codename='add_suggestion'), Permission.objects.get(codename='use_mt'), ) owner_permissions = ( Permission.objects.get(codename='author_translation'), Permission.objects.get(codename='upload_translation'), Permission.objects.get(codename='overwrite_translation'), Permission.objects.get(codename='commit_translation'), Permission.objects.get(codename='update_translation'), Permission.objects.get(codename='push_translation'), Permission.objects.get(codename='automatic_translation'), Permission.objects.get(codename='save_translation'), Permission.objects.get(codename='save_template'), Permission.objects.get(codename='accept_suggestion'), Permission.objects.get(codename='vote_suggestion'), Permission.objects.get(codename='override_suggestion'), Permission.objects.get(codename='delete_comment'), Permission.objects.get(codename='delete_suggestion'), Permission.objects.get(codename='ignore_check'), Permission.objects.get(codename='upload_dictionary'), Permission.objects.get(codename='add_dictionary'), Permission.objects.get(codename='change_dictionary'), Permission.objects.get(codename='delete_dictionary'), Permission.objects.get(codename='lock_subproject'), Permission.objects.get(codename='reset_translation'), Permission.objects.get(codename='lock_translation'), Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_comment'), Permission.objects.get(codename='delete_comment'), Permission.objects.get(codename='add_suggestion'), Permission.objects.get(codename='use_mt'), Permission.objects.get(codename='edit_priority'), Permission.objects.get(codename='edit_flags'), Permission.objects.get(codename='manage_acl'), Permission.objects.get(codename='download_changes'), Permission.objects.get(codename='view_reports'), ) group, created = Group.objects.get_or_create(name='Managers') if created or update: group.permissions.add(*owner_permissions) group, created = Group.objects.get_or_create(name='Owners') if created or update: group.permissions.add(*owner_permissions) created = True try: anon_user = User.objects.get( username=ANONYMOUS_USER_NAME, ) created = False if anon_user.is_active: raise ValueError( 'Anonymous user ({}) already exists and enabled, ' 'please change ANONYMOUS_USER_NAME setting.'.format( ANONYMOUS_USER_NAME, ) ) except User.DoesNotExist: anon_user = User.objects.create( username=ANONYMOUS_USER_NAME, is_active=False, ) if created or update: anon_user.set_unusable_password() anon_user.groups.clear() anon_user.groups.add(guest_group) def move_users(): ''' Moves users to default group. ''' group = Group.objects.get(name='Users') for user in User.objects.all(): user.groups.add(group) def remove_user(user): ''' Removes user account. ''' # Send signal (to commit any pending changes) user_pre_delete.send(instance=user, sender=user.__class__) # Change username user.username = 'deleted-{0}'.format(user.pk) while User.objects.filter(username=user.username).exists(): user.username = 'deleted-{0}-{1}'.format( user.pk, binascii.b2a_hex(os.urandom(5)) ) # Remove user information user.first_name = 'Deleted User' user.last_name = '' user.email = '[email protected]' # Disable the user user.is_active = False user.set_unusable_password() user.save() # Remove all social auth associations user.social_auth.all().delete() @receiver(post_migrate) def sync_create_groups(sender, **kwargs): ''' Create groups on syncdb. ''' if sender.label == 'accounts': create_groups(False) @receiver(post_save, sender=User) def create_profile_callback(sender, instance, created=False, **kwargs): ''' Automatically adds user to Users group. ''' if created: # Add user to Users group if it exists try: group = Group.objects.get(name='Users') instance.groups.add(group) except Group.DoesNotExist: pass<|fim▁end|>
<|file_name|>TMXTiledMapArtifactFixerTest.java<|end_file_name|><|fim▁begin|>package org.anddev.andengine.entity.layer.tiled.tmx; import static junit.framework.Assert.assertEquals; import java.io.File; import org.junit.Test; /** * @author Nicolas Gramlich * @since 21:21:51 - 28.07.2010 */ public class TMXTiledMapArtifactFixerTest { // =========================================================== // Constants // =========================================================== // =========================================================== // Fields // =========================================================== // =========================================================== // Constructors // =========================================================== // =========================================================== // Getter & Setter // =========================================================== // =========================================================== // Methods for/from SuperClass/Interfaces // =========================================================== // =========================================================== // Methods // =========================================================== @Test public void testDetermineTileColumnCount() throws Exception { assertEquals(8, TMXTiledMapArtifactFixer.determineCount(265, 32, 1, 1)); assertEquals(6, TMXTiledMapArtifactFixer.determineCount(199, 32, 1, 1)); } @Test public void testGenerateOutputFile() throws Exception { assertEquals("C:\\Test\\fixed_bla.png", TMXTiledMapArtifactFixer.generateOutputFile(new File("C:\\Test\\bla.png")).toString());<|fim▁hole|> // =========================================================== // Inner and Anonymous Classes // =========================================================== }<|fim▁end|>
}
<|file_name|>mtl.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 GFX Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Parsing and writing of a .mtl file as defined in the //! [full spec](http://paulbourke.net/dataformats/mtl/). use std::{ borrow::Cow, fmt, io::{self, BufRead, BufReader, Error, Read, Write}, path::Path, str::FromStr, sync::Arc, }; /// The model of an a single Material as defined in the .mtl spec. #[derive(Debug, Clone, PartialEq)] pub struct Material { pub name: String, // Material color and illumination pub ka: Option<[f32; 3]>, pub kd: Option<[f32; 3]>, pub ks: Option<[f32; 3]>, pub ke: Option<[f32; 3]>, pub km: Option<f32>, pub tf: Option<[f32; 3]>, pub ns: Option<f32>, pub ni: Option<f32>, pub tr: Option<f32>, pub d: Option<f32>, pub illum: Option<i32>, // Texture and reflection maps pub map_ka: Option<String>, pub map_kd: Option<String>,<|fim▁hole|> pub map_d: Option<String>, pub map_bump: Option<String>, pub map_refl: Option<String>, } impl Material { pub fn new(name: String) -> Self { Material { name, ka: None, kd: None, ks: None, ke: None, km: None, ns: None, ni: None, tr: None, tf: None, d: None, map_ka: None, map_kd: None, map_ks: None, map_ke: None, map_ns: None, map_d: None, map_bump: None, map_refl: None, illum: None, } } } /// Indicates type of a missing value #[derive(Debug)] pub enum MtlMissingType { /// i32 I32, /// f32 F32, /// String String, } impl fmt::Display for MtlMissingType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { MtlMissingType::I32 => write!(f, "i32"), MtlMissingType::F32 => write!(f, "f32"), MtlMissingType::String => write!(f, "String"), } } } /// Errors parsing or loading a .mtl file. #[derive(Debug)] pub enum MtlError { Io(io::Error), /// Given instruction was not in .mtl spec. InvalidInstruction(String), /// Attempted to parse value, but failed. InvalidValue(String), /// `newmtl` issued, but no name provided. MissingMaterialName, /// Instruction requires a value, but that value was not provided. MissingValue(MtlMissingType), } impl std::error::Error for MtlError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match self { MtlError::Io(err) => Some(err), _ => None, } } } impl fmt::Display for MtlError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { MtlError::Io(err) => write!(f, "I/O error loading a .mtl file: {}", err), MtlError::InvalidInstruction(instruction) => write!(f, "Unsupported mtl instruction: {}", instruction), MtlError::InvalidValue(val) => write!(f, "Attempted to parse the value '{}' but failed.", val), MtlError::MissingMaterialName => write!(f, "newmtl issued, but no name provided."), MtlError::MissingValue(ty) => write!(f, "Instruction is missing a value of type '{}'", ty), } } } impl From<io::Error> for MtlError { fn from(e: Error) -> Self { Self::Io(e) } } impl<'a> From<Material> for Cow<'a, Material> { #[inline] fn from(s: Material) -> Cow<'a, Material> { Cow::Owned(s) } } struct Parser<I>(I); impl<'a, I: Iterator<Item = &'a str>> Parser<I> { fn get_vec(&mut self) -> Result<[f32; 3], MtlError> { let (x, y, z) = match (self.0.next(), self.0.next(), self.0.next()) { (Some(x), Some(y), Some(z)) => (x, y, z), other => { return Err(MtlError::InvalidValue(format!("{:?}", other))); } }; match (x.parse::<f32>(), y.parse::<f32>(), z.parse::<f32>()) { (Ok(x), Ok(y), Ok(z)) => Ok([x, y, z]), other => Err(MtlError::InvalidValue(format!("{:?}", other))), } } fn get_i32(&mut self) -> Result<i32, MtlError> { match self.0.next() { Some(v) => FromStr::from_str(v).map_err(|_| MtlError::InvalidValue(v.to_string())), None => Err(MtlError::MissingValue(MtlMissingType::I32)), } } fn get_f32(&mut self) -> Result<f32, MtlError> { match self.0.next() { Some(v) => FromStr::from_str(v).map_err(|_| MtlError::InvalidValue(v.to_string())), None => Err(MtlError::MissingValue(MtlMissingType::F32)), } } fn into_string(mut self) -> Result<String, MtlError> { match self.0.next() { Some(v) => { // See note on mtllib parsing in obj.rs for why this is needed/works Ok(self.0.fold(v.to_string(), |mut existing, next| { existing.push(' '); existing.push_str(next); existing })) } None => Err(MtlError::MissingValue(MtlMissingType::String)), } } } /// The data represented by the `mtllib` command. /// /// The material name is replaced by the actual material data when the material libraries are /// laoded if a match is found. #[derive(Debug, Clone, PartialEq)] pub struct Mtl { /// Name of the .mtl file. pub filename: String, /// A list of loaded materials. /// /// The individual materials are wrapped into an `Arc` to facilitate referencing this data /// where these materials are assigned in the `.obj` file. pub materials: Vec<Arc<Material>>, } impl Mtl { /// Construct a new empty mtl lib with the given file name. pub fn new(filename: String) -> Self { Mtl { filename, materials: Vec::new(), } } /// Load the mtl library from the input buffer generated by the given closure. /// /// This function overwrites the contents of this library if it has already been loaded. pub fn reload_with<R, F>(&mut self, obj_dir: impl AsRef<Path>, mut resolve: F) -> Result<&mut Self, MtlError> where R: BufRead, F: FnMut(&Path, &str) -> io::Result<R>, { self.reload(resolve(obj_dir.as_ref(), &self.filename)?) } /// Load the mtl library from the given input buffer. /// /// This function overwrites the contents of this library if it has already been loaded. pub fn reload(&mut self, input: impl Read) -> Result<&mut Self, MtlError> { self.materials.clear(); let input = BufReader::new(input); let mut material = None; for line in input.lines() { let mut parser = match line { Ok(ref line) => Parser(line.split_whitespace().filter(|s| !s.is_empty())), Err(err) => return Err(MtlError::Io(err)), }; match parser.0.next() { Some("newmtl") => { self.materials.extend(material.take().map(Arc::new)); material = Some(Material::new( parser .0 .next() .ok_or_else(|| MtlError::MissingMaterialName)? .to_string(), )); } Some("Ka") => { if let Some(ref mut m) = material { m.ka = Some(parser.get_vec()?); } } Some("Kd") => { if let Some(ref mut m) = material { m.kd = Some(parser.get_vec()?); } } Some("Ks") => { if let Some(ref mut m) = material { m.ks = Some(parser.get_vec()?); } } Some("Ke") => { if let Some(ref mut m) = material { m.ke = Some(parser.get_vec()?); } } Some("Ns") => { if let Some(ref mut m) = material { m.ns = Some(parser.get_f32()?); } } Some("Ni") => { if let Some(ref mut m) = material { m.ni = Some(parser.get_f32()?); } } Some("Km") => { if let Some(ref mut m) = material { m.km = Some(parser.get_f32()?); } } Some("d") => { if let Some(ref mut m) = material { m.d = Some(parser.get_f32()?); } } Some("Tr") => { if let Some(ref mut m) = material { m.tr = Some(parser.get_f32()?); } } Some("Tf") => { if let Some(ref mut m) = material { m.tf = Some(parser.get_vec()?); } } Some("illum") => { if let Some(ref mut m) = material { m.illum = Some(parser.get_i32()?); } } Some("map_Ka") => { if let Some(ref mut m) = material { m.map_ka = Some(parser.into_string()?); } } Some("map_Kd") => { if let Some(ref mut m) = material { m.map_kd = Some(parser.into_string()?); } } Some("map_Ks") => { if let Some(ref mut m) = material { m.map_ks = Some(parser.into_string()?); } } Some("map_d") => { if let Some(ref mut m) = material { m.map_d = Some(parser.into_string()?); } } Some("map_refl") | Some("refl") => { if let Some(ref mut m) = material { m.map_refl = Some(parser.into_string()?); } } Some("map_bump") | Some("map_Bump") | Some("bump") => { if let Some(ref mut m) = material { m.map_bump = Some(parser.into_string()?); } } Some(other) => { if !other.starts_with('#') { return Err(MtlError::InvalidInstruction(other.to_string())); } } None => {} } } if let Some(material) = material { self.materials.push(Arc::new(material)); } Ok(self) } pub fn write_to_buf(&self, out: &mut impl Write) -> Result<(), io::Error> { for mtl in &self.materials { writeln!(out, "newmtl {}", mtl.name)?; if let Some([ka0, ka1, ka2]) = mtl.ka { writeln!(out, "Ka {} {} {}", ka0, ka1, ka2)?; } if let Some([kd0, kd1, kd2]) = mtl.kd { writeln!(out, "Kd {} {} {}", kd0, kd1, kd2)?; } if let Some([ks0, ks1, ks2]) = mtl.ks { writeln!(out, "Ks {} {} {}", ks0, ks1, ks2)?; } if let Some([ke0, ke1, ke2]) = mtl.ke { writeln!(out, "Ke {} {} {}", ke0, ke1, ke2)?; } if let Some(ns) = mtl.ns { writeln!(out, "Ns {}", ns)?; } if let Some(ns) = mtl.ns { writeln!(out, "Ns {}", ns)?; } if let Some(ni) = mtl.ni { writeln!(out, "Ni {}", ni)?; } if let Some(km) = mtl.km { writeln!(out, "Km {}", km)?; } if let Some(d) = mtl.d { writeln!(out, "d {}", d)?; } if let Some(tr) = mtl.tr { writeln!(out, "Tr {}", tr)?; } if let Some([tf0, tf1, tf2]) = mtl.tf { writeln!(out, "Tf {} {} {}", tf0, tf1, tf2)?; } if let Some(illum) = mtl.illum { writeln!(out, "illum {}", illum)?; } if let Some(map_ka) = &mtl.map_ka { writeln!(out, "map_Ka {}", map_ka)?; } if let Some(map_kd) = &mtl.map_kd { writeln!(out, "map_Kd {}", map_kd)?; } if let Some(map_ks) = &mtl.map_ks { writeln!(out, "map_Ks {}", map_ks)?; } if let Some(map_d) = &mtl.map_d { writeln!(out, "map_d {}", map_d)?; } if let Some(map_refl) = &mtl.map_refl { writeln!(out, "refl {}", map_refl)?; } if let Some(map_bump) = &mtl.map_bump { writeln!(out, "bump {}", map_bump)?; } } Ok(()) } }<|fim▁end|>
pub map_ks: Option<String>, pub map_ke: Option<String>, pub map_ns: Option<String>,
<|file_name|>selectionConfig.js<|end_file_name|><|fim▁begin|>/*jshint unused:false */ var dojoConfig = { async: 1,<|fim▁hole|> layers: {} }, packages: [ { name: 'oe_dojo', location: '..' } ] };<|fim▁end|>
cacheBust: 0, 'routing-map': { pathPrefix: '',
<|file_name|>raw.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow; use std::fmt; use http::buf::MemSlice; /// A raw header value. #[derive(Clone, PartialEq, Eq)] pub struct Raw(Lines); impl Raw { /// Returns the amount of lines. #[inline] pub fn len(&self) -> usize { match self.0 { Lines::One(..) => 1, Lines::Many(ref lines) => lines.len() } } /// Returns the line if there is only 1. #[inline] pub fn one(&self) -> Option<&[u8]> { match self.0 { Lines::One(ref line) => Some(line.as_ref()), Lines::Many(ref lines) if lines.len() == 1 => Some(lines[0].as_ref()), _ => None } } /// Iterate the lines of raw bytes. #[inline] pub fn iter(&self) -> RawLines { RawLines { inner: &self.0, pos: 0, } } /// Append a line to this `Raw` header value. pub fn push(&mut self, val: &[u8]) { self.push_line(maybe_literal(val.into())); } fn push_line(&mut self, line: Line) { let lines = ::std::mem::replace(&mut self.0, Lines::Many(Vec::new())); match lines { Lines::One(one) => { self.0 = Lines::Many(vec![one, line]); } Lines::Many(mut lines) => { lines.push(line); self.0 = Lines::Many(lines); } } } } #[derive(Debug, Clone, PartialEq, Eq)] enum Lines { One(Line), Many(Vec<Line>), } #[derive(Debug, Clone, PartialEq, Eq)] enum Line { Static(&'static [u8]), Owned(Vec<u8>), Shared(MemSlice), } fn eq<A: AsRef<[u8]>, B: AsRef<[u8]>>(a: &[A], b: &[B]) -> bool { if a.len() != b.len() { false } else { for (a, b) in a.iter().zip(b.iter()) { if a.as_ref() != b.as_ref() { return false } } true } } impl PartialEq<[Vec<u8>]> for Raw { fn eq(&self, bytes: &[Vec<u8>]) -> bool { match self.0 { Lines::One(ref line) => eq(&[line], bytes), Lines::Many(ref lines) => eq(lines, bytes) } } } impl PartialEq<[u8]> for Raw { fn eq(&self, bytes: &[u8]) -> bool { match self.0 { Lines::One(ref line) => line.as_ref() == bytes, Lines::Many(..) => false } } } impl PartialEq<str> for Raw { fn eq(&self, s: &str) -> bool { match self.0 { Lines::One(ref line) => line.as_ref() == s.as_bytes(), Lines::Many(..) => false } } } impl From<Vec<Vec<u8>>> for Raw { #[inline] fn from(val: Vec<Vec<u8>>) -> Raw { Raw(Lines::Many( val.into_iter() .map(|vec| maybe_literal(vec.into())) .collect() )) } } impl From<String> for Raw { #[inline] fn from(val: String) -> Raw { let vec: Vec<u8> = val.into(); vec.into() } } impl From<Vec<u8>> for Raw { #[inline] fn from(val: Vec<u8>) -> Raw { Raw(Lines::One(Line::from(val))) } } impl From<&'static str> for Raw { fn from(val: &'static str) -> Raw { Raw(Lines::One(Line::Static(val.as_bytes()))) } } impl From<&'static [u8]> for Raw { fn from(val: &'static [u8]) -> Raw { Raw(Lines::One(Line::Static(val))) } } impl From<MemSlice> for Raw { #[inline] fn from(val: MemSlice) -> Raw { Raw(Lines::One(Line::Shared(val))) } } impl From<Vec<u8>> for Line { #[inline] fn from(val: Vec<u8>) -> Line { Line::Owned(val) } } impl From<MemSlice> for Line { #[inline] fn from(val: MemSlice) -> Line { Line::Shared(val) } } impl AsRef<[u8]> for Line { fn as_ref(&self) -> &[u8] { match *self { Line::Static(ref s) => s, Line::Owned(ref v) => v.as_ref(), Line::Shared(ref m) => m.as_ref(), } } } pub fn parsed(val: MemSlice) -> Raw { Raw(Lines::One(From::from(val))) } pub fn push(raw: &mut Raw, val: MemSlice) { raw.push_line(Line::from(val)); } impl fmt::Debug for Raw { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.0 { Lines::One(ref line) => fmt::Debug::fmt(&[line], f), Lines::Many(ref lines) => fmt::Debug::fmt(lines, f) } } } impl ::std::ops::Index<usize> for Raw { type Output = [u8]; fn index(&self, idx: usize) -> &[u8] { match self.0 { Lines::One(ref line) => if idx == 0 { line.as_ref() } else { panic!("index out of bounds: {}", idx) }, Lines::Many(ref lines) => lines[idx].as_ref() } } } macro_rules! literals { ($($len:expr => $($value:expr),+;)+) => ( fn maybe_literal<'a>(s: Cow<'a, [u8]>) -> Line { match s.len() { $($len => { $( if s.as_ref() == $value { return Line::Static($value); } )+ })+ _ => () } Line::from(s.into_owned()) } #[test]<|fim▁hole|> fn test_literal_lens() { $( $({ let s = $value; assert!(s.len() == $len, "{:?} has len of {}, listed as {}", s, s.len(), $len); })+ )+ } ); } literals! { 1 => b"*", b"0"; 3 => b"*/*"; 4 => b"gzip"; 5 => b"close"; 7 => b"chunked"; 10 => b"keep-alive"; } impl<'a> IntoIterator for &'a Raw { type IntoIter = RawLines<'a>; type Item = &'a [u8]; fn into_iter(self) -> RawLines<'a> { self.iter() } } #[derive(Debug)] pub struct RawLines<'a> { inner: &'a Lines, pos: usize, } impl<'a> Iterator for RawLines<'a> { type Item = &'a [u8]; #[inline] fn next(&mut self) -> Option<&'a [u8]> { let current_pos = self.pos; self.pos += 1; match *self.inner { Lines::One(ref line) => { if current_pos == 0 { Some(line.as_ref()) } else { None } } Lines::Many(ref lines) => lines.get(current_pos).map(|l| l.as_ref()), } } }<|fim▁end|>
<|file_name|>test_config_flow.py<|end_file_name|><|fim▁begin|>"""Test the Balboa Spa Client config flow.""" from unittest.mock import patch from homeassistant import config_entries, data_entry_flow from homeassistant.components.balboa.const import CONF_SYNC_TIME, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import ( RESULT_TYPE_ABORT, RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM, ) from . import BalboaMock from tests.common import MockConfigEntry TEST_DATA = { CONF_HOST: "1.1.1.1", } TEST_ID = "FakeBalboa" async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["errors"] == {} with patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.connect", new=BalboaMock.connect, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.disconnect", new=BalboaMock.disconnect, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.listen", new=BalboaMock.listen, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.send_mod_ident_req", new=BalboaMock.send_mod_ident_req, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.send_panel_req", new=BalboaMock.send_panel_req, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.spa_configured", new=BalboaMock.spa_configured, ), patch( "homeassistant.components.balboa.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_DATA, ) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_CREATE_ENTRY assert result2["data"] == TEST_DATA assert len(mock_setup_entry.mock_calls) == 1 async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.connect", new=BalboaMock.broken_connect, ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.disconnect", new=BalboaMock.disconnect, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_DATA, ) assert result2["type"] == RESULT_TYPE_FORM assert result2["errors"] == {"base": "cannot_connect"} async def test_unknown_error(hass: HomeAssistant) -> None: """Test we handle unknown error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.connect", side_effect=Exception, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_DATA, ) assert result2["type"] == RESULT_TYPE_FORM assert result2["errors"] == {"base": "unknown"} async def test_already_configured(hass: HomeAssistant) -> None: """Test when provided credentials are already configured.""" MockConfigEntry(domain=DOMAIN, data=TEST_DATA, unique_id=TEST_ID).add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == SOURCE_USER <|fim▁hole|> ), patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.disconnect", new=BalboaMock.disconnect, ), patch( "homeassistant.components.balboa.async_setup_entry", return_value=True, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], TEST_DATA, ) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_ABORT assert result2["reason"] == "already_configured" async def test_options_flow(hass): """Test specifying non default settings using options flow.""" config_entry = MockConfigEntry(domain=DOMAIN, data=TEST_DATA, unique_id=TEST_ID) config_entry.add_to_hass(hass) # Rather than mocking out 15 or so functions, we just need to mock # the entire library, otherwise it will get stuck in a listener and # the various loops in pybalboa. with patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi", new=BalboaMock, ), patch( "homeassistant.components.balboa.BalboaSpaWifi", new=BalboaMock, ): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] == data_entry_flow.RESULT_TYPE_FORM assert result["step_id"] == "init" result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={CONF_SYNC_TIME: True}, ) assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert config_entry.options == {CONF_SYNC_TIME: True}<|fim▁end|>
with patch( "homeassistant.components.balboa.config_flow.BalboaSpaWifi.connect", new=BalboaMock.connect,
<|file_name|>Permission.java<|end_file_name|><|fim▁begin|>package com.app.annotation.aspect; /** * Created by baixiaokang on 17/1/31. */<|fim▁hole|>import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface Permission { String[] value(); }<|fim▁end|>
<|file_name|>visit.py<|end_file_name|><|fim▁begin|># # Copyright (C) 2014 Jonathan Finlay <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """ The patient visit module ======================== Implements the classes: * Visit: Main visit module * ConsultingRoom: Consultings room module """ from openerp.osv import osv, fields class Visit(osv.osv): """ The visit module """ _name = 'visit' _description = 'The visit module' _states = [ ('draft', 'Draft'),<|fim▁hole|> ('confirmed', 'Confirmed'), ('canceled', 'Canceled'), ('assisted', 'Assisted'), ] def _default_room(self, cr, uid, id, context=None): consulroom_obj = self.pool.get('consulting.room') room = consulroom_obj.search(cr, uid, [('default', '=', '1')]) if room: return room[0] return 1 def check_duration(self, cr, uid, id, context=None): """ Check the consistency of the visit duration :param cr: :param uid: :param id: :param context: :return: """ return {} def onchange_consulting_room(self, cr, uid, id, consulting_room, context=None): """ :param cr: :param uid: :param id: :param starts: :param consulting_room: :param context: :return: """ if consulting_room: consulroom_obj = self.pool.get('consulting.room') duration = consulroom_obj.browse(cr, uid, consulting_room, context=context)[0].duration else: duration = 0.0 vals = { 'value': { 'duration': duration, } } return vals _columns = { 'name': fields.char('Identifier'), 'starts': fields.datetime('Start date'), 'duration': fields.float('Duration', help='Duration in minutes'), 'patient_id': fields.many2one('patient', 'Patient'), 'consultingroom_id': fields.many2one('consulting.room', 'Consulting room'), 'state': fields.selection(_states, 'State') } _defaults = { 'consultingroom_id': _default_room, } class ConsultingRoom(osv.osv): """ Consulting rooms """ _name = 'consulting.room' _description = 'Consulting rooms configuration module' _columns = { 'name': fields.char('Name'), 'duration': fields.float('Standard duration', help='Visit standard duration time in minutes'), 'price': fields.float('Price', help='Standard consultation fee'), 'address': fields.text('Address'), 'default': fields.boolean('Default', help='Set as default consulting room'), }<|fim▁end|>
<|file_name|>courseware_index.py<|end_file_name|><|fim▁begin|>""" Code to allow module store to interface with courseware index """ from __future__ import absolute_import from abc import ABCMeta, abstractmethod from datetime import timedelta import logging import re from six import add_metaclass from django.conf import settings from django.utils.translation import ugettext_lazy, ugettext as _ from django.core.urlresolvers import resolve from contentstore.course_group_config import GroupConfiguration from course_modes.models import CourseMode from eventtracking import tracker from openedx.core.lib.courses import course_image_url from search.search_engine_base import SearchEngine from xmodule.annotator_mixin import html_to_text from xmodule.modulestore import ModuleStoreEnum from xmodule.library_tools import normalize_key_for_search # REINDEX_AGE is the default amount of time that we look back for changes # that might have happened. If we are provided with a time at which the # indexing is triggered, then we know it is safe to only index items # recently changed at that time. This is the time period that represents # how far back from the trigger point to look back in order to index REINDEX_AGE = timedelta(0, 60) # 60 seconds log = logging.getLogger('edx.modulestore') def strip_html_content_to_text(html_content): """ Gets only the textual part for html content - useful for building text to be searched """ # Removing HTML-encoded non-breaking space characters text_content = re.sub(r"(\s|&nbsp;|//)+", " ", html_to_text(html_content)) # Removing HTML CDATA text_content = re.sub(r"<!\[CDATA\[.*\]\]>", "", text_content) # Removing HTML comments text_content = re.sub(r"<!--.*-->", "", text_content) return text_content def indexing_is_enabled(): """ Checks to see if the indexing feature is enabled """ return settings.FEATURES.get('ENABLE_COURSEWARE_INDEX', False) class SearchIndexingError(Exception): """ Indicates some error(s) occured during indexing """ def __init__(self, message, error_list): super(SearchIndexingError, self).__init__(message) self.error_list = error_list @add_metaclass(ABCMeta) class SearchIndexerBase(object): """ Base class to perform indexing for courseware or library search from different modulestores """ __metaclass__ = ABCMeta INDEX_NAME = None DOCUMENT_TYPE = None ENABLE_INDEXING_KEY = None INDEX_EVENT = { 'name': None, 'category': None } @classmethod def indexing_is_enabled(cls): """ Checks to see if the indexing feature is enabled """ return settings.FEATURES.get(cls.ENABLE_INDEXING_KEY, False) @classmethod @abstractmethod def normalize_structure_key(cls, structure_key): """ Normalizes structure key for use in indexing """ @classmethod @abstractmethod def _fetch_top_level(cls, modulestore, structure_key): """ Fetch the item from the modulestore location """ @classmethod @abstractmethod def _get_location_info(cls, normalized_structure_key): """ Builds location info dictionary """ @classmethod def _id_modifier(cls, usage_id): """ Modifies usage_id to submit to index """ return usage_id @classmethod def remove_deleted_items(cls, searcher, structure_key, exclude_items): """ remove any item that is present in the search index that is not present in updated list of indexed items as we find items we can shorten the set of items to keep """ response = searcher.search( doc_type=cls.DOCUMENT_TYPE, field_dictionary=cls._get_location_info(structure_key), exclude_dictionary={"id": list(exclude_items)} ) result_ids = [result["data"]["id"] for result in response["results"]] searcher.remove(cls.DOCUMENT_TYPE, result_ids) @classmethod def index(cls, modulestore, structure_key, triggered_at=None, reindex_age=REINDEX_AGE): """ Process course for indexing Arguments: modulestore - modulestore object to use for operations structure_key (CourseKey|LibraryKey) - course or library identifier triggered_at (datetime) - provides time at which indexing was triggered; useful for index updates - only things changed recently from that date (within REINDEX_AGE above ^^) will have their index updated, others skip updating their index but are still walked through in order to identify which items may need to be removed from the index If None, then a full reindex takes place Returns: Number of items that have been added to the index """ error_list = [] searcher = SearchEngine.get_search_engine(cls.INDEX_NAME) if not searcher: return structure_key = cls.normalize_structure_key(structure_key) location_info = cls._get_location_info(structure_key) # Wrap counter in dictionary - otherwise we seem to lose scope inside the embedded function `prepare_item_index` indexed_count = { "count": 0 } # indexed_items is a list of all the items that we wish to remain in the # index, whether or not we are planning to actually update their index. # This is used in order to build a query to remove those items not in this # list - those are ready to be destroyed indexed_items = set() # items_index is a list of all the items index dictionaries. # it is used to collect all indexes and index them using bulk API, # instead of per item index API call. items_index = [] def get_item_location(item): """ Gets the version agnostic item location """ return item.location.version_agnostic().replace(branch=None) def prepare_item_index(item, skip_index=False, groups_usage_info=None): """ Add this item to the items_index and indexed_items list Arguments: item - item to add to index, its children will be processed recursively skip_index - simply walk the children in the tree, the content change is older than the REINDEX_AGE window and would have been already indexed. This should really only be passed from the recursive child calls when this method has determined that it is safe to do so Returns: item_content_groups - content groups assigned to indexed item """ is_indexable = hasattr(item, "index_dictionary") item_index_dictionary = item.index_dictionary() if is_indexable else None # if it's not indexable and it does not have children, then ignore if not item_index_dictionary and not item.has_children: return item_content_groups = None if item.category == "split_test": split_partition = item.get_selected_partition() for split_test_child in item.get_children(): if split_partition: for group in split_partition.groups: group_id = unicode(group.id) child_location = item.group_id_to_child.get(group_id, None) if child_location == split_test_child.location: groups_usage_info.update({ unicode(get_item_location(split_test_child)): [group_id], }) for component in split_test_child.get_children(): groups_usage_info.update({ unicode(get_item_location(component)): [group_id] }) if groups_usage_info: item_location = get_item_location(item) item_content_groups = groups_usage_info.get(unicode(item_location), None) item_id = unicode(cls._id_modifier(item.scope_ids.usage_id)) indexed_items.add(item_id) if item.has_children: # determine if it's okay to skip adding the children herein based upon how recently any may have changed skip_child_index = skip_index or \ (triggered_at is not None and (triggered_at - item.subtree_edited_on) > reindex_age) children_groups_usage = [] for child_item in item.get_children(): if modulestore.has_published_version(child_item): children_groups_usage.append( prepare_item_index( child_item, skip_index=skip_child_index, groups_usage_info=groups_usage_info ) ) if None in children_groups_usage: item_content_groups = None if skip_index or not item_index_dictionary: return item_index = {} # if it has something to add to the index, then add it try: item_index.update(location_info) item_index.update(item_index_dictionary) item_index['id'] = item_id if item.start: item_index['start_date'] = item.start item_index['content_groups'] = item_content_groups if item_content_groups else None item_index.update(cls.supplemental_fields(item)) items_index.append(item_index) indexed_count["count"] += 1 return item_content_groups except Exception as err: # pylint: disable=broad-except # broad exception so that index operation does not fail on one item of many log.warning('Could not index item: %s - %r', item.location, err) error_list.append(_('Could not index item: {}').format(item.location)) try: with modulestore.branch_setting(ModuleStoreEnum.RevisionOption.published_only): structure = cls._fetch_top_level(modulestore, structure_key) groups_usage_info = cls.fetch_group_usage(modulestore, structure) # First perform any additional indexing from the structure object cls.supplemental_index_information(modulestore, structure) # Now index the content for item in structure.get_children(): prepare_item_index(item, groups_usage_info=groups_usage_info) searcher.index(cls.DOCUMENT_TYPE, items_index) cls.remove_deleted_items(searcher, structure_key, indexed_items) except Exception as err: # pylint: disable=broad-except # broad exception so that index operation does not prevent the rest of the application from working log.exception( "Indexing error encountered, courseware index may be out of date %s - %r", structure_key, err ) error_list.append(_('General indexing error occurred')) if error_list: raise SearchIndexingError('Error(s) present during indexing', error_list) return indexed_count["count"] @classmethod def _do_reindex(cls, modulestore, structure_key): """ (Re)index all content within the given structure (course or library), tracking the fact that a full reindex has taken place """ indexed_count = cls.index(modulestore, structure_key) if indexed_count: cls._track_index_request(cls.INDEX_EVENT['name'], cls.INDEX_EVENT['category'], indexed_count) return indexed_count @classmethod def _track_index_request(cls, event_name, category, indexed_count): """Track content index requests. Arguments: event_name (str): Name of the event to be logged. category (str): category of indexed items indexed_count (int): number of indexed items Returns: None """ data = { "indexed_count": indexed_count, 'category': category, } tracker.emit( event_name, data ) @classmethod def fetch_group_usage(cls, modulestore, structure): # pylint: disable=unused-argument """ Base implementation of fetch group usage on course/library. """ return None @classmethod def supplemental_index_information(cls, modulestore, structure): """ Perform any supplemental indexing given that the structure object has already been loaded. Base implementation performs no operation. Arguments: modulestore - modulestore object used during the indexing operation structure - structure object loaded during the indexing job Returns: None """ pass @classmethod def supplemental_fields(cls, item): # pylint: disable=unused-argument """ Any supplemental fields that get added to the index for the specified item. Base implementation returns an empty dictionary """ return {} class CoursewareSearchIndexer(SearchIndexerBase): """ Class to perform indexing for courseware search from different modulestores """ INDEX_NAME = "courseware_index" DOCUMENT_TYPE = "courseware_content" ENABLE_INDEXING_KEY = 'ENABLE_COURSEWARE_INDEX' INDEX_EVENT = { 'name': 'edx.course.index.reindexed', 'category': 'courseware_index' } UNNAMED_MODULE_NAME = ugettext_lazy("(Unnamed)") @classmethod def normalize_structure_key(cls, structure_key): """ Normalizes structure key for use in indexing """ return structure_key @classmethod def _fetch_top_level(cls, modulestore, structure_key): """ Fetch the item from the modulestore location """ return modulestore.get_course(structure_key, depth=None) @classmethod def _get_location_info(cls, normalized_structure_key): """ Builds location info dictionary """ return {"course": unicode(normalized_structure_key), "org": normalized_structure_key.org} @classmethod def do_course_reindex(cls, modulestore, course_key): """ (Re)index all content within the given course, tracking the fact that a full reindex has taken place """ return cls._do_reindex(modulestore, course_key) @classmethod def fetch_group_usage(cls, modulestore, structure): groups_usage_dict = {} groups_usage_info = GroupConfiguration.get_content_groups_usage_info(modulestore, structure).items() groups_usage_info.extend( GroupConfiguration.get_content_groups_items_usage_info( modulestore, structure ).items() ) if groups_usage_info: for name, group in groups_usage_info: for module in group: view, args, kwargs = resolve(module['url']) # pylint: disable=unused-variable usage_key_string = unicode(kwargs['usage_key_string']) if groups_usage_dict.get(usage_key_string, None): groups_usage_dict[usage_key_string].append(name) else: groups_usage_dict[usage_key_string] = [name] return groups_usage_dict @classmethod def supplemental_index_information(cls, modulestore, structure): """ Perform additional indexing from loaded structure object """ CourseAboutSearchIndexer.index_about_information(modulestore, structure) @classmethod def supplemental_fields(cls, item): """ Add location path to the item object Once we've established the path of names, the first name is the course name, and the next 3 names are the navigable path within the edx application. Notice that we stop at that level because a full path to deep children would be confusing. """ location_path = [] parent = item while parent is not None: path_component_name = parent.display_name if not path_component_name: path_component_name = unicode(cls.UNNAMED_MODULE_NAME) location_path.append(path_component_name) parent = parent.get_parent() location_path.reverse() return { "course_name": location_path[0], "location": location_path[1:4] } class LibrarySearchIndexer(SearchIndexerBase): """ Base class to perform indexing for library search from different modulestores """ INDEX_NAME = "library_index" DOCUMENT_TYPE = "library_content" ENABLE_INDEXING_KEY = 'ENABLE_LIBRARY_INDEX' INDEX_EVENT = { 'name': 'edx.library.index.reindexed', 'category': 'library_index' } @classmethod def normalize_structure_key(cls, structure_key): """ Normalizes structure key for use in indexing """ return normalize_key_for_search(structure_key) @classmethod def _fetch_top_level(cls, modulestore, structure_key): """ Fetch the item from the modulestore location """ return modulestore.get_library(structure_key, depth=None) @classmethod def _get_location_info(cls, normalized_structure_key): """ Builds location info dictionary """ return {"library": unicode(normalized_structure_key)} @classmethod def _id_modifier(cls, usage_id): """ Modifies usage_id to submit to index """ return usage_id.replace(library_key=(usage_id.library_key.replace(version_guid=None, branch=None))) @classmethod def do_library_reindex(cls, modulestore, library_key): """ (Re)index all content within the given library, tracking the fact that a full reindex has taken place """ return cls._do_reindex(modulestore, library_key) class AboutInfo(object): """ About info structure to contain 1) Property name to use 2) Where to add in the index (using flags above) 3) Where to source the properties value """ # Bitwise Flags for where to index the information # # ANALYSE - states that the property text contains content that we wish to be able to find matched within # e.g. "joe" should yield a result for "I'd like to drink a cup of joe" # # PROPERTY - states that the property text should be a property of the indexed document, to be returned with the # results: search matches will only be made on exact string matches # e.g. "joe" will only match on "joe" # # We are using bitwise flags because one may want to add the property to EITHER or BOTH parts of the index # e.g. university name is desired to be analysed, so that a search on "Oxford" will match # property values "University of Oxford" and "Oxford Brookes University", # but it is also a useful property, because within a (future) filtered search a user # may have chosen to filter courses from "University of Oxford" # # see https://wiki.python.org/moin/BitwiseOperators for information about bitwise shift operator used below # ANALYSE = 1 << 0 # Add the information to the analysed content of the index PROPERTY = 1 << 1 # Add the information as a property of the object being indexed (not analysed) def __init__(self, property_name, index_flags, source_from): self.property_name = property_name self.index_flags = index_flags self.source_from = source_from def get_value(self, **kwargs): """ get the value for this piece of information, using the correct source """ return self.source_from(self, **kwargs) def from_about_dictionary(self, **kwargs): """ gets the value from the kwargs provided 'about_dictionary' """ about_dictionary = kwargs.get('about_dictionary', None) if not about_dictionary: raise ValueError("Context dictionary does not contain expected argument 'about_dictionary'") return about_dictionary.get(self.property_name, None) def from_course_property(self, **kwargs): """ gets the value from the kwargs provided 'course' """ course = kwargs.get('course', None) if not course: raise ValueError("Context dictionary does not contain expected argument 'course'") return getattr(course, self.property_name, None) def from_course_mode(self, **kwargs): """ fetches the available course modes from the CourseMode model """ course = kwargs.get('course', None) if not course: raise ValueError("Context dictionary does not contain expected argument 'course'") return [mode.slug for mode in CourseMode.modes_for_course(course.id)] # Source location options - either from the course or the about info FROM_ABOUT_INFO = from_about_dictionary FROM_COURSE_PROPERTY = from_course_property FROM_COURSE_MODE = from_course_mode class CourseAboutSearchIndexer(object): """ Class to perform indexing of about information from course object """ DISCOVERY_DOCUMENT_TYPE = "course_info" INDEX_NAME = CoursewareSearchIndexer.INDEX_NAME # List of properties to add to the index - each item in the list is an instance of AboutInfo object ABOUT_INFORMATION_TO_INCLUDE = [ AboutInfo("advertised_start", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("announcement", AboutInfo.PROPERTY, AboutInfo.FROM_ABOUT_INFO), AboutInfo("start", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("end", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("effort", AboutInfo.PROPERTY, AboutInfo.FROM_ABOUT_INFO), AboutInfo("display_name", AboutInfo.ANALYSE, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("overview", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("title", AboutInfo.ANALYSE | AboutInfo.PROPERTY, AboutInfo.FROM_ABOUT_INFO), AboutInfo("university", AboutInfo.ANALYSE | AboutInfo.PROPERTY, AboutInfo.FROM_ABOUT_INFO), AboutInfo("number", AboutInfo.ANALYSE | AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("short_description", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("description", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("key_dates", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("video", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("course_staff_short", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("course_staff_extended", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("requirements", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("syllabus", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("textbook", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("faq", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("more_info", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("ocw_links", AboutInfo.ANALYSE, AboutInfo.FROM_ABOUT_INFO), AboutInfo("enrollment_start", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("enrollment_end", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("org", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), AboutInfo("modes", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_MODE), AboutInfo("language", AboutInfo.PROPERTY, AboutInfo.FROM_COURSE_PROPERTY), ] @classmethod def index_about_information(cls, modulestore, course): """ Add the given course to the course discovery index Arguments: modulestore - modulestore object to use for operations<|fim▁hole|> if not searcher: return course_id = unicode(course.id) course_info = { 'id': course_id, 'course': course_id, 'content': {}, 'image_url': course_image_url(course), } # load data for all of the 'about' modules for this course into a dictionary about_dictionary = { item.location.name: item.data for item in modulestore.get_items(course.id, qualifiers={"category": "about"}) } about_context = { "course": course, "about_dictionary": about_dictionary, } for about_information in cls.ABOUT_INFORMATION_TO_INCLUDE: # Broad exception handler so that a single bad property does not scupper the collection of others try: section_content = about_information.get_value(**about_context) except: # pylint: disable=bare-except section_content = None log.warning( "Course discovery could not collect property %s for course %s", about_information.property_name, course_id, exc_info=True, ) if section_content: if about_information.index_flags & AboutInfo.ANALYSE: analyse_content = section_content if isinstance(section_content, basestring): analyse_content = strip_html_content_to_text(section_content) course_info['content'][about_information.property_name] = analyse_content if about_information.index_flags & AboutInfo.PROPERTY: course_info[about_information.property_name] = section_content # Broad exception handler to protect around and report problems with indexing try: searcher.index(cls.DISCOVERY_DOCUMENT_TYPE, [course_info]) except: # pylint: disable=bare-except log.exception( "Course discovery indexing error encountered, course discovery index may be out of date %s", course_id, ) raise log.debug( "Successfully added %s course to the course discovery index", course_id ) @classmethod def _get_location_info(cls, normalized_structure_key): """ Builds location info dictionary """ return {"course": unicode(normalized_structure_key), "org": normalized_structure_key.org} @classmethod def remove_deleted_items(cls, structure_key): """ Remove item from Course About Search_index """ searcher = SearchEngine.get_search_engine(cls.INDEX_NAME) if not searcher: return response = searcher.search( doc_type=cls.DISCOVERY_DOCUMENT_TYPE, field_dictionary=cls._get_location_info(structure_key) ) result_ids = [result["data"]["id"] for result in response["results"]] searcher.remove(cls.DISCOVERY_DOCUMENT_TYPE, result_ids)<|fim▁end|>
course - course object from which to take properties, locate about information """ searcher = SearchEngine.get_search_engine(cls.INDEX_NAME)
<|file_name|>NoteSuppressedWarnings.java<|end_file_name|><|fim▁begin|>/* * FindBugs - Find bugs in Java programs * Copyright (C) 2003,2004 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.detect; import java.util.*; import edu.umd.cs.findbugs.*; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.ClassContext; import edu.umd.cs.findbugs.visitclass.AnnotationVisitor; import org.apache.bcel.Repository; import org.apache.bcel.classfile.*; import edu.umd.cs.findbugs.visitclass.Constants2; import static edu.umd.cs.findbugs.visitclass.Constants2.*; public class NoteSuppressedWarnings extends AnnotationVisitor implements Detector, Constants2 { private static Set<String> packages = new HashSet<String>(); private SuppressionMatcher suppressionMatcher; private BugReporter bugReporter; private AnalysisContext analysisContext; private NoteSuppressedWarnings recursiveDetector; public NoteSuppressedWarnings(BugReporter bugReporter) { this(bugReporter, false); } public NoteSuppressedWarnings(BugReporter bugReporter, boolean recursive) { if (!recursive) { DelegatingBugReporter b = (DelegatingBugReporter) bugReporter; BugReporter origBugReporter = b.getRealBugReporter(); suppressionMatcher = new SuppressionMatcher(); BugReporter filterBugReporter = new FilterBugReporter(origBugReporter, suppressionMatcher, false); b.setRealBugReporter(filterBugReporter); recursiveDetector = new NoteSuppressedWarnings(bugReporter,true); recursiveDetector.suppressionMatcher = suppressionMatcher; } this.bugReporter = bugReporter; } public void setAnalysisContext(AnalysisContext analysisContext) { this.analysisContext = analysisContext; } public void visitClassContext(ClassContext classContext) { classContext.getJavaClass().accept(this); } public void visit(JavaClass obj) { if (recursiveDetector == null) return; try { if (getClassName().endsWith("package-info")) return; String packageName = getPackageName().replace('/', '.'); if (!packages.add(packageName)) return; String packageInfo = "package-info"; if (packageName.length() > 0) packageInfo = packageName + "." + packageInfo; JavaClass packageInfoClass = Repository.lookupClass(packageInfo); recursiveDetector.visitJavaClass(packageInfoClass); } catch (ClassNotFoundException e) { // ignore } } public void visitAnnotation(String annotationClass, Map<String, Object> map, boolean runtimeVisible) { if (!annotationClass.endsWith("SuppressWarnings")) return; Object value = map.get("value"); if (value == null || !(value instanceof Object[])) { suppressWarning(null); return; } Object [] suppressedWarnings = (Object[]) value; if (suppressedWarnings.length == 0) suppressWarning(null);<|fim▁hole|> private void suppressWarning(String pattern) { String className = getDottedClassName(); ClassAnnotation clazz = new ClassAnnotation(getDottedClassName()); if (className.endsWith("package-info") && recursiveDetector == null) suppressionMatcher.addPackageSuppressor( new PackageWarningSuppressor(pattern, getPackageName().replace('/', '.'))); else if (visitingMethod()) suppressionMatcher.addSuppressor( new MethodWarningSuppressor(pattern, clazz, MethodAnnotation.fromVisitedMethod(this))); else if (visitingField()) suppressionMatcher.addSuppressor( new FieldWarningSuppressor(pattern, clazz, FieldAnnotation.fromVisitedField(this))); else suppressionMatcher.addSuppressor( new ClassWarningSuppressor(pattern, clazz)); } public void report() { } }<|fim▁end|>
else for(int i = 0; i < suppressedWarnings.length; i++) suppressWarning((String)suppressedWarnings[i]); }
<|file_name|>manager.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2004-2008, The Dojo Foundation All Rights Reserved. Available via Academic Free License >= 2.1 OR the modified BSD license. see: http://dojotoolkit.org/license for details<|fim▁hole|> if(!dojo._hasResource["dijit._base.manager"]){ dojo._hasResource["dijit._base.manager"]=true; dojo.provide("dijit._base.manager"); dojo.declare("dijit.WidgetSet",null,{constructor:function(){ this._hash={}; },add:function(_1){ if(this._hash[_1.id]){ throw new Error("Tried to register widget with id=="+_1.id+" but that id is already registered"); } this._hash[_1.id]=_1; },remove:function(id){ delete this._hash[id]; },forEach:function(_3){ for(var id in this._hash){ _3(this._hash[id]); } },filter:function(_5){ var _6=new dijit.WidgetSet(); this.forEach(function(_7){ if(_5(_7)){ _6.add(_7); } }); return _6; },byId:function(id){ return this._hash[id]; },byClass:function(_9){ return this.filter(function(_a){ return _a.declaredClass==_9; }); }}); dijit.registry=new dijit.WidgetSet(); dijit._widgetTypeCtr={}; dijit.getUniqueId=function(_b){ var id; do{ id=_b+"_"+(_b in dijit._widgetTypeCtr?++dijit._widgetTypeCtr[_b]:dijit._widgetTypeCtr[_b]=0); }while(dijit.byId(id)); return id; }; if(dojo.isIE){ dojo.addOnWindowUnload(function(){ dijit.registry.forEach(function(_d){ _d.destroy(); }); }); } dijit.byId=function(id){ return (dojo.isString(id))?dijit.registry.byId(id):id; }; dijit.byNode=function(_f){ return dijit.registry.byId(_f.getAttribute("widgetId")); }; dijit.getEnclosingWidget=function(_10){ while(_10){ if(_10.getAttribute&&_10.getAttribute("widgetId")){ return dijit.registry.byId(_10.getAttribute("widgetId")); } _10=_10.parentNode; } return null; }; dijit._tabElements={area:true,button:true,input:true,object:true,select:true,textarea:true}; dijit._isElementShown=function(_11){ var _12=dojo.style(_11); return (_12.visibility!="hidden")&&(_12.visibility!="collapsed")&&(_12.display!="none")&&(dojo.attr(_11,"type")!="hidden"); }; dijit.isTabNavigable=function(_13){ if(dojo.hasAttr(_13,"disabled")){ return false; } var _14=dojo.hasAttr(_13,"tabindex"); var _15=dojo.attr(_13,"tabindex"); if(_14&&_15>=0){ return true; } var _16=_13.nodeName.toLowerCase(); if(((_16=="a"&&dojo.hasAttr(_13,"href"))||dijit._tabElements[_16])&&(!_14||_15>=0)){ return true; } return false; }; dijit._getTabNavigable=function(_17){ var _18,_19,_1a,_1b,_1c,_1d; var _1e=function(_1f){ dojo.query("> *",_1f).forEach(function(_20){ var _21=dijit._isElementShown(_20); if(_21&&dijit.isTabNavigable(_20)){ var _22=dojo.attr(_20,"tabindex"); if(!dojo.hasAttr(_20,"tabindex")||_22==0){ if(!_18){ _18=_20; } _19=_20; }else{ if(_22>0){ if(!_1a||_22<_1b){ _1b=_22; _1a=_20; } if(!_1c||_22>=_1d){ _1d=_22; _1c=_20; } } } } if(_21&&_20.nodeName.toUpperCase()!="SELECT"){ _1e(_20); } }); }; if(dijit._isElementShown(_17)){ _1e(_17); } return {first:_18,last:_19,lowest:_1a,highest:_1c}; }; dijit.getFirstInTabbingOrder=function(_23){ var _24=dijit._getTabNavigable(dojo.byId(_23)); return _24.lowest?_24.lowest:_24.first; }; dijit.getLastInTabbingOrder=function(_25){ var _26=dijit._getTabNavigable(dojo.byId(_25)); return _26.last?_26.last:_26.highest; }; dijit.defaultDuration=dojo.config["defaultDuration"]||200; }<|fim▁end|>
*/
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and # is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012) from reversion.admin import VersionAdmin from django.contrib.gis import admin from .models import Feedback,Topic @admin.register(Feedback) class FeedbackAdmin(VersionAdmin, admin.ModelAdmin):<|fim▁hole|> ordering = ['topic'] @admin.register(Topic) class TopicAdmin(VersionAdmin, admin.ModelAdmin): model = Topic list_display = ['name'] save_as = True ordering = ['name']<|fim▁end|>
model = Feedback list_display = ['name', 'email', 'topic', 'message'] save_as = True
<|file_name|>feature-gate-prelude_import.rs<|end_file_name|><|fim▁begin|>#[prelude_import] //~ ERROR `#[prelude_import]` is for use by rustc only use std::prelude::v1::*; <|fim▁hole|><|fim▁end|>
fn main() {}
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from setuptools import setup from setuptools import find_packages version = '0.1' shortdesc = "Klarna Payment for bda.plone.shop" setup( name='bda.plone.klarnapayment', version=version, description=shortdesc, classifiers=[ 'Environment :: Web Environment', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], author='Espen Moe-Nilssen', author_email='[email protected]', license='GNU General Public Licence', packages=find_packages('src'), package_dir = {'': 'src'}, namespace_packages=['bda', 'bda.plone'], include_package_data=True, zip_safe=False, install_requires=[ 'setuptools', 'Plone', 'bda.plone.shop', 'klarnacheckout', ], extras_require={ 'test': [ 'plone.app.testing', ] }, entry_points=""" [z3c.autoinclude.plugin] target = plone """, )<|fim▁end|>
import os
<|file_name|>saluki.js<|end_file_name|><|fim▁begin|>'use strict'; function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; } /** * * saluki api 查看 * * Created by joe on 16/12/26. */ var services = require('../grpc/index').services(); var consul = require('../grpc/consul'); module.exports = function () { var opts = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; <|fim▁hole|> if (url.endsWith('/saluki')) { ctx.body = JSON.stringify(consul.getALL()); return; } yield next(); }); function saluki(_x2, _x3) { return _ref.apply(this, arguments); } return saluki; }(); };<|fim▁end|>
return function () { var _ref = _asyncToGenerator(function* (ctx, next) { var url = ctx.url; ctx.services = services;
<|file_name|>surface.py<|end_file_name|><|fim▁begin|>import numpy as np class Surface(object): def __init__(self, image, edge_points3d, edge_points2d): """ Constructor for a surface defined by a texture image and 4 boundary points. Choose the first point as the origin of the surface's coordinate system. :param image: image array :param edge_points3d: array of 3d coordinates of 4 corner points in clockwise direction :param edge_points2d: array of 2d coordinates of 4 corner points in clockwise direction """ assert len(edge_points3d) == 4 and len(edge_points2d) == 4 self.image = image self.edge_points3d = edge_points3d self.edge_points2d = np.float32(edge_points2d) # This is required for using cv2's getPerspectiveTransform self.normal = self._get_normal_vector() def top_left_corner3d(self): return self.edge_points3d[0] def top_right_corner3d(self): return self.edge_points3d[1] def bottom_right_corner3d(self): return self.edge_points3d[2]<|fim▁hole|> def distance_to_point(self, point): point_to_surface = point - self.top_left_corner3d() distance_to_surface = self.normal.dot(point_to_surface) return distance_to_surface def _get_normal_vector(self): """ :return: the normal vector of the surface. It determined the front side of the surface and it's not necessarily a unit vector """ p0 = self.edge_points3d[0] p1 = self.edge_points3d[1] p3 = self.edge_points3d[3] v1 = p3 - p0 v2 = p1 - p0 normal = np.cross(v1, v2) norm = np.linalg.norm(normal) return normal / norm class Polyhedron(object): def __init__(self, surfaces): self.surfaces = surfaces class Space(object): def __init__(self, models=None): self.models = models or [] def add_model(self, model): assert isinstance(model, Polyhedron) self.models.append(model) class Line2D(object): def __init__(self, point1, point2): """ Using the line equation a*x + b*y + c = 0 with b >= 0 :param point1: starting point :param point2: ending point :return: a Line object """ assert len(point1) == 2 and len(point2) == 2 self.a = point2[1] - point1[1] self.b = point1[0] - point2[0] self.c = point1[1] * point2[0] - point1[0] * point2[1] if self.b < 0: self.a = -self.a self.b = -self.b self.c = -self.c def is_point_on_left(self, point): return self.a * point[0] + self.b * point[1] + self.c > 0 def is_point_on_right(self, point): return self.a * point[0] + self.b * point[1] + self.c < 0 def is_point_on_line(self, point): return self.a * point[0] + self.b * point[1] + self.c == 0 def get_y_from_x(self, x): if self.b == 0: return 0.0 return 1.0 * (-self.c - self.a * x) / self.b def get_x_from_y(self, y): if self.a == 0: return 0.0 return 1.0 * (-self.c - self.b * y) / self.a<|fim▁end|>
def bottom_left_corner3d(self): return self.edge_points3d[3]
<|file_name|>fuzzer.cpp<|end_file_name|><|fim▁begin|>/* * The ManaPlus Client * Copyright (C) 2013-2015 The ManaPlus Developers * * This file is part of The ManaPlus Client. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "utils/fuzzer.h" #ifdef USE_FUZZER #include "client.h" #include "logger.h" #include "settings.h" #include "utils/stringutils.h" #include "debug.h" namespace { Logger *fuzz = nullptr; int fuzzRand = 50; } // namespace void Fuzzer::init()<|fim▁hole|> fuzz->log("Srand: %u", sr); srand(sr); } bool Fuzzer::conditionTerminate(const char *const name) { if ((rand() % 100) <= fuzzRand) { fuzz->log("deleted: %s", name); return true; } fuzz->log("passed: %s", name); return false; } #endif<|fim▁end|>
{ fuzz = new Logger; fuzz->setLogFile(settings.localDataDir + "/fuzzer.log"); unsigned int sr = time(nullptr);
<|file_name|>merkleTree.js<|end_file_name|><|fim▁begin|>import {keccak256, bufferToHex} from "ethereumjs-util" export default class MerkleTree { constructor(elements) { // Filter empty strings and hash elements this.elements = elements.filter(el => el).map(el => keccak256(el)) // Deduplicate elements this.elements = this.bufDedup(this.elements) // Sort elements this.elements.sort(Buffer.compare) // Create layers this.layers = this.getLayers(this.elements) }<|fim▁hole|> if (elements.length === 0) { return [[""]] } const layers = [] layers.push(elements) // Get next layer until we reach the root while (layers[layers.length - 1].length > 1) { layers.push(this.getNextLayer(layers[layers.length - 1])) } return layers } getNextLayer(elements) { return elements.reduce((layer, el, idx, arr) => { if (idx % 2 === 0) { // Hash the current element with its pair element layer.push(this.combinedHash(el, arr[idx + 1])) } return layer }, []) } combinedHash(first, second) { if (!first) { return second } if (!second) { return first } return keccak256(this.sortAndConcat(first, second)) } getRoot() { return this.layers[this.layers.length - 1][0] } getHexRoot() { return bufferToHex(this.getRoot()) } getProof(el) { let idx = this.bufIndexOf(el, this.elements) if (idx === -1) { throw new Error("Element does not exist in Merkle tree") } return this.layers.reduce((proof, layer) => { const pairElement = this.getPairElement(idx, layer) if (pairElement) { proof.push(pairElement) } idx = Math.floor(idx / 2) return proof }, []) } getHexProof(el) { const proof = this.getProof(el) return this.bufArrToHexArr(proof) } getPairElement(idx, layer) { const pairIdx = idx % 2 === 0 ? idx + 1 : idx - 1 if (pairIdx < layer.length) { return layer[pairIdx] } else { return null } } bufIndexOf(el, arr) { let hash // Convert element to 32 byte hash if it is not one already if (el.length !== 32 || !Buffer.isBuffer(el)) { hash = keccak256(el) } else { hash = el } for (let i = 0; i < arr.length; i++) { if (hash.equals(arr[i])) { return i } } return -1 } bufDedup(elements) { return elements.filter((el, idx) => { return this.bufIndexOf(el, elements) === idx }) } bufArrToHexArr(arr) { if (arr.some(el => !Buffer.isBuffer(el))) { throw new Error("Array is not an array of buffers") } return arr.map(el => "0x" + el.toString("hex")) } sortAndConcat(...args) { return Buffer.concat([...args].sort(Buffer.compare)) } }<|fim▁end|>
getLayers(elements) {
<|file_name|>test_script_delete.py<|end_file_name|><|fim▁begin|>import os import time import sys FOLDERPATH = sys.argv[1] #os.chdir(FOLDERPATH) walk = os.walk(FOLDERPATH) FSEVENT = "delete"<|fim▁hole|> FILEPATHPREFIX = item[0] + "\\" for song in item[2]: if song.endswith(".mp3"): FILEPATH = "%s%s" % (FILEPATHPREFIX, song) os.system('python script.py "' + song + '" "' + FILEPATH + '" "' + FSEVENT + '"')<|fim▁end|>
for item in walk:
<|file_name|>redbox-factory.hpp<|end_file_name|><|fim▁begin|>void RedboxFactory::add (Addr addr, ActionType t) { switch (t) { case ActionType::RD8 : return add_read (addr, 1); case ActionType::RD16 : return add_read (addr, 2); case ActionType::RD32 : return add_read (addr, 4); case ActionType::RD64 : return add_read (addr, 8); case ActionType::WR8 : return add_write (addr, 1); case ActionType::WR16 : return add_write (addr, 2); case ActionType::WR32 : return add_write (addr, 4); case ActionType::WR64 : return add_write (addr, 8); default : SHOW (t, "d"); ASSERT (0); } } void RedboxFactory::add_read (Addr addr, unsigned size) { ASSERT (size); read_regions.emplace_back (addr, size); } void RedboxFactory::add_write (Addr addr, unsigned size) { ASSERT (size); write_regions.emplace_back (addr, size); } void RedboxFactory::clear () { read_regions.clear (); write_regions.clear (); } Redbox *RedboxFactory::create () { Redbox *b; // Special case: we return a null pointer if both memory pools are empty. // In this case the event will continue being labelled by a null pointer // rather than a pointer to a Redbox. This is useful during the data race // detection in DataRaceAnalysis::find_data_races(), because that way we // don't even need to look inside of the red box to see if it has events, the // event will straightfowardly be discarde for DR detection if (read_regions.empty() and write_regions.empty()) return nullptr; // allocate a new Redbox and keep the pointer to it, we are the container b = new Redbox (); boxes.push_back (b); // compress the lists of memory areas compress (read_regions); compress (write_regions); // copy them to the new redbox b->readpool = read_regions; b->writepool = write_regions; #ifdef CONFIG_DEBUG if (verb_debug) b->dump (); // this will assert that the memory pools are a sorted sequence of disjoint // memory areas b->readpool.assertt (); b->writepool.assertt (); #endif // restart the internal arrays read_regions.clear (); write_regions.clear (); ASSERT (empty()); return b; } void RedboxFactory::compress (MemoryPool::Container &regions) { unsigned i, j; size_t s; // nothing to do if we have no regions; code below assumes we have at least 1 if (regions.empty ()) return; // sort the memory regions by increasing value of lower bound struct compare { bool operator() (const MemoryRegion<Addr> &a, const MemoryRegion<Addr> &b) { return a.lower < b.lower; } } cmp; std::sort (regions.begin(), regions.end(), cmp); // compress regions s = regions.size (); breakme (); for (i = 0, j = 1; j < s; ++j) { ASSERT (i < j); ASSERT (regions[i].lower <= regions[j].lower); // if the next region's lower bound is below i's region upper bound, we can // extend i's range if (regions[i].upper >= regions[j].lower) { regions[i].upper = std::max (regions[i].upper, regions[j].upper); } else {<|fim▁hole|> { // copy j into i regions[i] = regions[j]; } } } DEBUG ("redbox-factory: compressed %zu regions into %u", regions.size(), i+1); regions.resize (i + 1); }<|fim▁end|>
// otherwise there is a gap betwen interval i and interval j, so we // need to create a new interval at offset i+1, only if i+1 != j ++i; if (i != j)
<|file_name|>CinematicCamera.js<|end_file_name|><|fim▁begin|>import { Mesh, OrthographicCamera, PerspectiveCamera, PlaneGeometry, Scene, ShaderMaterial, UniformsUtils, WebGLRenderTarget } from 'three'; import { BokehShader } from '../shaders/BokehShader2.js'; import { BokehDepthShader } from '../shaders/BokehShader2.js'; class CinematicCamera extends PerspectiveCamera { <|fim▁hole|> super( fov, aspect, near, far ); this.type = 'CinematicCamera'; this.postprocessing = { enabled: true }; this.shaderSettings = { rings: 3, samples: 4 }; const depthShader = BokehDepthShader; this.materialDepth = new ShaderMaterial( { uniforms: depthShader.uniforms, vertexShader: depthShader.vertexShader, fragmentShader: depthShader.fragmentShader } ); this.materialDepth.uniforms[ 'mNear' ].value = near; this.materialDepth.uniforms[ 'mFar' ].value = far; // In case of cinematicCamera, having a default lens set is important this.setLens(); this.initPostProcessing(); } // providing fnumber and coc(Circle of Confusion) as extra arguments // In case of cinematicCamera, having a default lens set is important // if fnumber and coc are not provided, cinematicCamera tries to act as a basic PerspectiveCamera setLens( focalLength = 35, filmGauge = 35, fNumber = 8, coc = 0.019 ) { this.filmGauge = filmGauge; this.setFocalLength( focalLength ); this.fNumber = fNumber; this.coc = coc; // fNumber is focalLength by aperture this.aperture = focalLength / this.fNumber; // hyperFocal is required to calculate depthOfField when a lens tries to focus at a distance with given fNumber and focalLength this.hyperFocal = ( focalLength * focalLength ) / ( this.aperture * this.coc ); } linearize( depth ) { const zfar = this.far; const znear = this.near; return - zfar * znear / ( depth * ( zfar - znear ) - zfar ); } smoothstep( near, far, depth ) { const x = this.saturate( ( depth - near ) / ( far - near ) ); return x * x * ( 3 - 2 * x ); } saturate( x ) { return Math.max( 0, Math.min( 1, x ) ); } // function for focusing at a distance from the camera focusAt( focusDistance = 20 ) { const focalLength = this.getFocalLength(); // distance from the camera (normal to frustrum) to focus on this.focus = focusDistance; // the nearest point from the camera which is in focus (unused) this.nearPoint = ( this.hyperFocal * this.focus ) / ( this.hyperFocal + ( this.focus - focalLength ) ); // the farthest point from the camera which is in focus (unused) this.farPoint = ( this.hyperFocal * this.focus ) / ( this.hyperFocal - ( this.focus - focalLength ) ); // the gap or width of the space in which is everything is in focus (unused) this.depthOfField = this.farPoint - this.nearPoint; // Considering minimum distance of focus for a standard lens (unused) if ( this.depthOfField < 0 ) this.depthOfField = 0; this.sdistance = this.smoothstep( this.near, this.far, this.focus ); this.ldistance = this.linearize( 1 - this.sdistance ); this.postprocessing.bokeh_uniforms[ 'focalDepth' ].value = this.ldistance; } initPostProcessing() { if ( this.postprocessing.enabled ) { this.postprocessing.scene = new Scene(); this.postprocessing.camera = new OrthographicCamera( window.innerWidth / - 2, window.innerWidth / 2, window.innerHeight / 2, window.innerHeight / - 2, - 10000, 10000 ); this.postprocessing.scene.add( this.postprocessing.camera ); this.postprocessing.rtTextureDepth = new WebGLRenderTarget( window.innerWidth, window.innerHeight ); this.postprocessing.rtTextureColor = new WebGLRenderTarget( window.innerWidth, window.innerHeight ); const bokeh_shader = BokehShader; this.postprocessing.bokeh_uniforms = UniformsUtils.clone( bokeh_shader.uniforms ); this.postprocessing.bokeh_uniforms[ 'tColor' ].value = this.postprocessing.rtTextureColor.texture; this.postprocessing.bokeh_uniforms[ 'tDepth' ].value = this.postprocessing.rtTextureDepth.texture; this.postprocessing.bokeh_uniforms[ 'manualdof' ].value = 0; this.postprocessing.bokeh_uniforms[ 'shaderFocus' ].value = 0; this.postprocessing.bokeh_uniforms[ 'fstop' ].value = 2.8; this.postprocessing.bokeh_uniforms[ 'showFocus' ].value = 1; this.postprocessing.bokeh_uniforms[ 'focalDepth' ].value = 0.1; //console.log( this.postprocessing.bokeh_uniforms[ "focalDepth" ].value ); this.postprocessing.bokeh_uniforms[ 'znear' ].value = this.near; this.postprocessing.bokeh_uniforms[ 'zfar' ].value = this.near; this.postprocessing.bokeh_uniforms[ 'textureWidth' ].value = window.innerWidth; this.postprocessing.bokeh_uniforms[ 'textureHeight' ].value = window.innerHeight; this.postprocessing.materialBokeh = new ShaderMaterial( { uniforms: this.postprocessing.bokeh_uniforms, vertexShader: bokeh_shader.vertexShader, fragmentShader: bokeh_shader.fragmentShader, defines: { RINGS: this.shaderSettings.rings, SAMPLES: this.shaderSettings.samples, DEPTH_PACKING: 1 } } ); this.postprocessing.quad = new Mesh( new PlaneGeometry( window.innerWidth, window.innerHeight ), this.postprocessing.materialBokeh ); this.postprocessing.quad.position.z = - 500; this.postprocessing.scene.add( this.postprocessing.quad ); } } renderCinematic( scene, renderer ) { if ( this.postprocessing.enabled ) { const currentRenderTarget = renderer.getRenderTarget(); renderer.clear(); // Render scene into texture scene.overrideMaterial = null; renderer.setRenderTarget( this.postprocessing.rtTextureColor ); renderer.clear(); renderer.render( scene, this ); // Render depth into texture scene.overrideMaterial = this.materialDepth; renderer.setRenderTarget( this.postprocessing.rtTextureDepth ); renderer.clear(); renderer.render( scene, this ); // Render bokeh composite renderer.setRenderTarget( null ); renderer.render( this.postprocessing.scene, this.postprocessing.camera ); renderer.setRenderTarget( currentRenderTarget ); } } } export { CinematicCamera };<|fim▁end|>
constructor( fov, aspect, near, far ) {
<|file_name|>Topic.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * This file is part of Aspose.Words. The source code in this file * is only intended as a supplement to the documentation, and is provided * "as is", without warranty of any kind, either expressed or implied. */ package loadingandsaving.loadingandsavinghtml.splitintohtmlpages.java; /** * A simple class to hold a topic title and HTML file name together. */ class Topic { Topic(String title, String fileName) throws Exception { mTitle = title; mFileName = fileName; } String getTitle() throws Exception { return mTitle; } String getFileName() throws Exception { return mFileName; } private final String mTitle; private final String mFileName; }<|fim▁end|>
/* * Copyright 2001-2014 Aspose Pty Ltd. All Rights Reserved. *
<|file_name|>polygon_mask.cc<|end_file_name|><|fim▁begin|>/****************************************************************************** * Copyright 2017 The Apollo Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *****************************************************************************/ #include "modules/perception/obstacle/lidar/roi_filter/hdmap_roi_filter/polygon_mask.h" namespace apollo { namespace perception { void GetValidXRange(const typename PolygonScanConverter::Polygon& polygon, const Bitmap2D& bitmap, const PolygonScanConverter::DirectionMajor major_dir, const double major_dir_grid_size, Interval* valid_x_range) { Eigen::Vector2d polygon_min_pt, polygon_max_pt; polygon_min_pt.setConstant(std::numeric_limits<double>::max()); polygon_max_pt.setConstant(std::numeric_limits<double>::min()); for (const auto& point : polygon) { polygon_min_pt.x() = std::min(polygon_min_pt.x(), point.x()); polygon_min_pt.y() = std::min(polygon_min_pt.y(), point.y()); polygon_max_pt.x() = std::max(polygon_max_pt.x(), point.x()); polygon_max_pt.y() = std::max(polygon_max_pt.y(), point.y()); } const Eigen::Vector2d& bitmap_min_pt = bitmap.get_min_p(); const Eigen::Vector2d& bitmap_max_pt = bitmap.get_max_p(); valid_x_range->first = std::max(polygon_min_pt[major_dir], bitmap_min_pt[major_dir]); valid_x_range->second = std::min(polygon_max_pt[major_dir], bitmap_max_pt[major_dir]); // For numerical stability valid_x_range->first = (static_cast<int>((valid_x_range->first - bitmap_min_pt[major_dir]) / major_dir_grid_size) + 0.5) * major_dir_grid_size + bitmap_min_pt[major_dir]; } void DrawPolygonInBitmap(const typename PolygonScanConverter::Polygon& polygon, const double extend_dist, Bitmap2D* bitmap) { PolygonScanConverter::DirectionMajor major_dir = bitmap->get_dir_major(); PolygonScanConverter::DirectionMajor op_major_dir = bitmap->get_op_dir_major(); double major_dir_grid_size = bitmap->get_grid_size()[major_dir]; // 1. Get valid x range Interval valid_x_range; GetValidXRange(polygon, *bitmap, major_dir, major_dir_grid_size, &valid_x_range); // 2. Convert polygon to scan intervals(Most important) std::vector<std::vector<Interval>> scans_intervals; PolygonScanConverter polygon_scan_converter; polygon_scan_converter.Init(major_dir, valid_x_range, polygon, major_dir_grid_size); polygon_scan_converter.ConvertScans(&scans_intervals); // 3. Draw grids in bitmap based on scan intervals const Eigen::Vector2d& bitmap_min_pt = bitmap->get_min_p(); const Eigen::Vector2d& bitmap_max_pt = bitmap->get_max_p(); double x = valid_x_range.first; for (size_t i = 0; i < scans_intervals.size(); x += major_dir_grid_size, ++i) { for (const auto& scan_interval : scans_intervals[i]) { if (scan_interval.first > scan_interval.second) { AERROR << "scan interval is not valid: " << "scan_interval.first = " << scan_interval.first << ", " << "scan_interval.second = " << scan_interval.second << "."; } Interval valid_y_range; valid_y_range.first = std::max(bitmap_min_pt[op_major_dir], scan_interval.first - extend_dist); valid_y_range.second = std::min(bitmap_max_pt[op_major_dir], scan_interval.second + extend_dist); if (valid_y_range.first > valid_y_range.second) {<|fim▁hole|> } bitmap->Set(x, valid_y_range.first, valid_y_range.second); } } } void DrawPolygonInBitmap( const std::vector<typename PolygonScanConverter::Polygon>& polygons, const double extend_dist, Bitmap2D* bitmap) { for (const auto& polygon : polygons) { DrawPolygonInBitmap(polygon, extend_dist, bitmap); } } } // namespace perception } // namespace apollo<|fim▁end|>
continue;
<|file_name|>JoinDirective.java<|end_file_name|><|fim▁begin|>/* * WANDORA * Knowledge Extraction, Management, and Publishing Application * http://wandora.org * * Copyright (C) 2004-2016 Wandora Team * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * * * * JoinDirective.java * * Created on 25. lokakuuta 2007, 11:38 * */ package org.wandora.query; import org.wandora.topicmap.*; import java.util.*; /** * @deprecated * * @author olli */ public class JoinDirective implements Directive { private Directive query; private Locator joinContext; private Directive joinQuery; /** Creates a new instance of JoinDirective */ public JoinDirective(Directive query,Directive joinQuery) { this(query,(Locator)null,joinQuery); } public JoinDirective(Directive query,Locator joinContext,Directive joinQuery) { this.query=query; this.joinContext=joinContext; this.joinQuery=joinQuery; } public JoinDirective(Directive query,String joinContext,Directive joinQuery) { this(query,new Locator(joinContext),joinQuery); } public ArrayList<ResultRow> query(QueryContext context) throws TopicMapException { return query(context,null,null); } public ArrayList<ResultRow> query(QueryContext context,FilterDirective filter,Object filterParam) throws TopicMapException { Topic contextTopic=context.getContextTopic(); TopicMap tm=contextTopic.getTopicMap(); ArrayList<ResultRow> inner=query.query(context); ArrayList<ResultRow> res=new ArrayList<ResultRow>(); ArrayList<ResultRow> cachedJoin=null; boolean useCache=!joinQuery.isContextSensitive(); for(ResultRow row : inner){ Topic t=null; if(joinContext!=null){ Locator c=row.getPlayer(joinContext); if(c==null) continue; t=tm.getTopic(c); } else t=context.getContextTopic(); if(t==null) continue; ArrayList<ResultRow> joinRes; if(!useCache || cachedJoin==null){ joinRes=joinQuery.query(context.makeNewWithTopic(t)); if(useCache) cachedJoin=joinRes;<|fim▁hole|> for(ResultRow joinRow : joinRes){ ResultRow joined=ResultRow.joinRows(row,joinRow); if(filter!=null && !filter.includeRow(joined, contextTopic, tm, filterParam)) continue; res.add(joined); } } return res; } public boolean isContextSensitive(){ return query.isContextSensitive(); // note joinQuery gets context from query so it's sensitivity is same // as that of query } }<|fim▁end|>
} else joinRes=cachedJoin;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>VERSION = "0.1" <|fim▁hole|><|fim▁end|>
from trello.api import *
<|file_name|>macro-crate-def-only.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:macro_crate_def_only.rs<|fim▁hole|> #![feature(phase)] #[phase(plugin)] extern crate macro_crate_def_only; pub fn main() { assert_eq!(5i, make_a_5!()); }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ <DefineSource> @Date : Fri Nov 14 13:20:38 2014 \n @Author : Erwan Ledoux \n\n </DefineSource> An Eulerer """ #<DefineAugmentation> import ShareYourSystem as SYS BaseModuleStr="ShareYourSystem.Specials.Simulaters.Populater" DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer" SYS.setSubModule(globals()) #</DefineAugmentation> #<ImportSpecificModules> import numpy as np #</ImportSpecificModules> #<DefineClass> @DecorationClass() class EulererClass(BaseClass): #Definition RepresentingKeyStrsList=[ 'EuleringPreFloatsArray', 'EuleringJacMethodStr', 'EuleringStepTimeFloat', 'EuleredPostFloatsArray', ] def default_init(self, _EuleringPreFloatsArray=None, _EuleringJacMethodStr="euler_null", _EuleringStepTimeFloat=0.1, _EuleredPostFloatsArray=None, **_KwargVariablesDict ): #Call the parent __init__ method BaseClass.__init__(self,**_KwargVariablesDict) def euler_null(self):<|fim▁hole|> return np.zeros( len(self.EuleringPreFloatsArray) ) def do_euler( self, **_KwargVariablesDict ): #debug ''' self.debug(('self.',self,[ 'EuleringJacMethodStr' ])) ''' #Do euler self.EuleredPostFloatsArray=self.EuleringPreFloatsArray+getattr( self,self.EuleringJacMethodStr)()*self.EuleringStepTimeFloat #</DefineClass><|fim▁end|>
#return
<|file_name|>error.py<|end_file_name|><|fim▁begin|># # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import logging import traceback from restlib import response from mint import logerror from mint import mint_error from mint.rest.api import models from mint.rest.modellib import converter log = logging.getLogger(__name__) class ErrorCallback(object): def __init__(self, controller): self.controller = controller def processException(self, request, excClass, exception, tb): message = '%s: %s' % (excClass.__name__, exception) if hasattr(exception, 'status'): status = exception.status else: status = 500 self.logError(request, excClass, exception, tb, doEmail=True) # Only send the traceback information if it's an unintentional # exception (i.e. a 500) if status == 500: tbString = 'Traceback:\n' + ''.join(traceback.format_tb(tb)) text = [message + '\n', tbString] else: tbString = None text = [message + '\n'] isFlash = 'HTTP_X_FLASH_VERSION' in request.headers or 'X-Wrap-Response-Codes' in request.headers if not getattr(request, 'contentType', None): request.contentType = 'text/xml' request.responseType = 'xml' if isFlash or request.contentType != 'text/plain': # for text/plain, just print out the traceback in the easiest to read # format. code = status if isFlash: # flash ignores all data sent with a non-200 error status = 200 error = models.Fault(code=code, message=message, traceback=tbString) text = converter.toText(request.responseType, error, self.controller, request) return response.Response(text, content_type=request.contentType, status=status) def logError(self, request, e_type, e_value, e_tb, doEmail=True): info = { 'uri' : request.thisUrl, 'path' : request.path, 'method' : request.method, 'headers_in' : request.headers, 'request_params' : request.GET, 'post_params' : request.POST, 'remote' : request.remote, } try: logerror.logErrorAndEmail(self.controller.cfg, e_type, e_value, e_tb, 'API call', info, doEmail=doEmail)<|fim▁hole|> except mint_error.MailError, err: log.error("Error sending mail: %s", str(err))<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate hyper; extern crate rustc_serialize;<|fim▁hole|> pub use self::client::Client; pub use self::structs::{MarathonTask, ServiceDefinition}; mod client; mod structs; #[derive(RustcDecodable, RustcEncodable)] pub struct Apps { pub apps: Vec<ServiceDefinition>, } #[derive(RustcDecodable, RustcEncodable)] pub struct Tasks { pub tasks: Vec<MarathonTask>, } #[derive(RustcDecodable, RustcEncodable)] pub struct Leader { pub leader: String, }<|fim▁end|>
extern crate url;
<|file_name|>GraphQLSegment-test.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2013-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @emails oncall+relay */ 'use strict'; jest.dontMock('GraphQLSegment'); const GraphQLSegment = require('GraphQLSegment'); const RelayRecord = require('RelayRecord'); RelayRecord.getDataID.mockImplementation(function(data) { return data.__dataID__; }); var edges = [ { __dataID__:'edge1', node: {__dataID__: 'id1'}, cursor: 'cursor1', }, { __dataID__:'edge2', node: {__dataID__: 'id2'}, cursor: 'cursor2', }, { __dataID__:'edge3', node: {__dataID__: 'id3'}, cursor: 'cursor3', }, ]; var moreEdges = [ { __dataID__:'edge4', node: {__dataID__: 'id4'}, cursor: 'cursor4', }, { __dataID__:'edge5', node: {__dataID__: 'id5'}, cursor: 'cursor5', }, { __dataID__:'edge6', node: {__dataID__: 'id6'}, cursor: 'cursor6', }, ]; var lastEdges = [ { __dataID__:'edge98', node: {__dataID__: 'id98'}, cursor: 'cursor98', }, { __dataID__:'edge99', node: {__dataID__: 'id99'}, cursor: 'cursor99', }, { __dataID__:'edge100', node: {__dataID__: 'id100'}, cursor: 'cursor100', }, ]; var beforeLastEdges = [ { __dataID__:'edge95', node: {__dataID__: 'id95'}, cursor: 'cursor95', }, { __dataID__:'edge96', node: {__dataID__: 'id96'}, cursor: 'cursor96', }, { __dataID__:'edge97', node: {__dataID__: 'id97'}, cursor: 'cursor97', }, ]; var oneEdge = { __dataID__:'edgeOneEdge', node: {__dataID__: 'idOneEdge'}, cursor: 'cursorOneEdge', }; var anotherEdge = { __dataID__:'edgeAnotherEdge', node: {__dataID__: 'idAnotherEdge'}, cursor: 'cursorAnotherEdge', }; /** * Returns all valid ids and cursors. */ function getAllMetadata(segment) { return segment.getMetadataAfterCursor(segment.getLength(), null); } describe('GraphQLSegment', () => { var segment; var consoleWarn; beforeEach(() => { segment = new GraphQLSegment(); consoleWarn = console.warn; }); afterEach(() => { console.warn = consoleWarn; }); it('should add after', () => { // Initial add segment.addEdgesAfterCursor(edges, null); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); // Add more segment.addEdgesAfterCursor(moreEdges, 'cursor3'); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual( ['edge1', 'edge2', 'edge3', 'edge4', 'edge5', 'edge6'] ); expect(metadata.cursors).toEqual( ['cursor1', 'cursor2', 'cursor3', 'cursor4', 'cursor5', 'cursor6'] ); }); it('should add before', () => { // Initial add segment.addEdgesBeforeCursor(lastEdges, null);<|fim▁hole|> expect(metadata.edgeIDs).toEqual(['edge98', 'edge99', 'edge100']); expect(metadata.cursors).toEqual(['cursor98', 'cursor99', 'cursor100']); // Add more segment.addEdgesBeforeCursor(beforeLastEdges, 'cursor98'); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual( ['edge95', 'edge96', 'edge97', 'edge98', 'edge99', 'edge100'] ); expect(metadata.cursors).toEqual( ['cursor95', 'cursor96', 'cursor97', 'cursor98', 'cursor99', 'cursor100'] ); }); it('should handle repeated edges', () => { console.warn = jest.genMockFunction(); var repeatedEdges = edges.concat(edges.slice(0, 1)); // Attempting to add edges 1 2 3 1. segment.addEdgesAfterCursor(repeatedEdges, null); expect(console.warn.mock.calls.length).toBe(1); expect(console.warn).toBeCalledWith( 'Attempted to add an ID already in GraphQLSegment: %s', 'edge1' ); // Should have skipped the repeated ones. var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); }); it('should prepend', () => { // Prepend on new segment segment.prependEdge(oneEdge); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edgeOneEdge']); expect(metadata.cursors).toEqual(['cursorOneEdge']); // Prepend on segment that already has item segment.prependEdge(anotherEdge); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edgeAnotherEdge', 'edgeOneEdge']); expect(metadata.cursors).toEqual(['cursorAnotherEdge', 'cursorOneEdge']); }); it('should append', () => { // Append on new segment segment.appendEdge(oneEdge); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edgeOneEdge']); expect(metadata.cursors).toEqual(['cursorOneEdge']); // Append on segment that already has item segment.appendEdge(anotherEdge); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edgeOneEdge', 'edgeAnotherEdge']); expect(metadata.cursors).toEqual(['cursorOneEdge', 'cursorAnotherEdge']); }); it('should retrieve metadata correctly', () => { var before = segment.getMetadataBeforeCursor( segment.getLength(), null ); var after = segment.getMetadataAfterCursor( segment.getLength(), null ); expect(before.edgeIDs).toEqual([]); expect(before.edgeIDs).toEqual(after.edgeIDs); expect(before.cursors).toEqual([]); expect(before.cursors).toEqual(after.cursors); segment.addEdgesAfterCursor(edges, null); before = segment.getMetadataBeforeCursor( segment.getLength(), null ); after = segment.getMetadataAfterCursor( segment.getLength(), null ); expect(before.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(before.edgeIDs).toEqual(after.edgeIDs); expect(before.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); expect(before.cursors).toEqual(after.cursors); }); it('should remove', () => { segment.addEdgesAfterCursor(edges, null); // Remove the middle edge segment.removeEdge('edge2'); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor3']); }); it('should include removed edges in `getLength()` calculation', () => { expect(segment.getCount()).toBe(0); segment.addEdgesAfterCursor(edges, null); expect(segment.getLength()).toBe(3); segment.removeEdge('edge2'); expect(segment.getLength()).toBe(3); }); it('should exclude removed edges from `getCount()` calculation', () => { // with addEdgesAfterCursor expect(segment.getCount()).toBe(0); segment.addEdgesAfterCursor(edges, null); expect(segment.getCount()).toBe(3); segment.removeEdge('edge2'); expect(segment.getCount()).toBe(2); // with concatSegment var otherSegment = new GraphQLSegment(); otherSegment.addEdgesAfterCursor(edges.slice(0, 2), null); expect(otherSegment.getCount()).toBe(2); otherSegment.removeEdge('edge2'); expect(otherSegment.getCount()).toBe(1); segment.removeEdge('edge1'); otherSegment.concatSegment(segment, null); expect(otherSegment.getCount()).toBe(2); }); it('rolls back failed concatSegment operations', () => { console.warn = jest.genMockFunction(); segment.addEdgesAfterCursor(edges.slice(0, 2), null); expect(segment.getCount()).toBe(2); expect(segment.getLength()).toBe(2); var otherSegment = new GraphQLSegment(); otherSegment.addEdgesAfterCursor(edges.slice(1, 2), null); var concatResult = segment.concatSegment(otherSegment); expect(concatResult).toBe(false); expect(console.warn).toBeCalledWith( 'Attempt to concat an ID already in GraphQLSegment: %s', 'edge2' ); expect(segment.getCount()).toBe(2); expect(segment.getLength()).toBe(2); }); it('rolls back bumped edges from failed concatSegment operations', () => { console.warn = jest.genMockFunction(); segment.addEdgesAfterCursor(edges.slice(0, 2), null); expect(segment.__debug().idToIndices.edge2.length).toBe(1); var otherSegment = new GraphQLSegment(); var edge2 = edges.slice(1, 2); otherSegment.addEdgesAfterCursor(edge2, null); // bumping the edge otherSegment.removeEdge('edge2', 1001); otherSegment.addEdgesAfterCursor(edge2, null, 1001); var concatResult = segment.concatSegment(otherSegment); expect(concatResult).toBe(false); expect(console.warn).toBeCalledWith( 'Attempt to concat an ID already in GraphQLSegment: %s', 'edge2' ); // Make sure it rolled back the deleted edge from indices map expect(segment.__debug().idToIndices.edge2.length).toBe(1); }); it('should check for valid id in segment', () => { segment.addEdgesAfterCursor(edges, null); // Remove the middle edge segment.removeEdge('edge2'); // Never added expect(segment.containsEdgeWithID('edge0')).toBeFalsy(); // Added expect(segment.containsEdgeWithID('edge1')).toBeTruthy(); // Deleted expect(segment.containsEdgeWithID('edge2')).toBeFalsy(); }); it('should check for valid cursor in segment', () => { segment.addEdgesAfterCursor(edges, null); // Remove the middle edge segment.removeEdge('edge2'); // Never added expect(segment.containsEdgeWithCursor('cursor0')).toBeFalsy(); // Added expect(segment.containsEdgeWithCursor('cursor1')).toBeTruthy(); // Deleted expect(segment.containsEdgeWithCursor('cursor2')).toBeFalsy(); }); it('should get first and last cursor in segment', () => { // Returns undefined for empty segment expect(segment.getFirstCursor()).toBeUndefined(); expect(segment.getLastCursor()).toBeUndefined(); // Returns property for basic edges segment.addEdgesAfterCursor(edges, null); expect(segment.getFirstCursor()).toEqual('cursor1'); expect(segment.getLastCursor()).toEqual('cursor3'); // Skips over deleted edges segment.removeEdge('edge1'); segment.removeEdge('edge3'); expect(segment.getFirstCursor()).toEqual('cursor2'); expect(segment.getLastCursor()).toEqual('cursor2'); // Returns undefined when all edges are deleted segment.removeEdge('edge2'); expect(segment.getFirstCursor()).toBeUndefined(); expect(segment.getLastCursor()).toBeUndefined(); // Appends and prepends new edges segment.prependEdge(oneEdge); segment.appendEdge(anotherEdge); expect(segment.getFirstCursor()).toEqual('cursorOneEdge'); expect(segment.getLastCursor()).toEqual('cursorAnotherEdge'); // Returns null for null cursors segment = new GraphQLSegment(); segment.addEdgesAfterCursor( [{__dataID__: 'edgeid', cursor: null, node: {__dataID__: 'id'}}], null ); expect(segment.getFirstCursor()).toBeNull(); expect(segment.getLastCursor()).toBeNull(); }); it('should get first and last id in segment', () => { // Returns undefined for empty segment expect(segment.getFirstID()).toBeUndefined(); expect(segment.getLastID()).toBeUndefined(); // Returns property for basic edges segment.addEdgesAfterCursor(edges, null); expect(segment.getFirstID()).toEqual('edge1'); expect(segment.getLastID()).toEqual('edge3'); // Skips over deleted edges segment.removeEdge('edge1'); segment.removeEdge('edge3'); expect(segment.getFirstID()).toEqual('edge2'); expect(segment.getLastID()).toEqual('edge2'); // Returns undefined when all edges are deleted segment.removeEdge('edge2'); expect(segment.getFirstID()).toBeUndefined(); expect(segment.getLastID()).toBeUndefined(); // Appends and prepends new edges segment.prependEdge(oneEdge); segment.appendEdge(anotherEdge); expect(segment.getFirstID()).toEqual('edgeOneEdge'); expect(segment.getLastID()).toEqual('edgeAnotherEdge'); }); it('should concat segments', () => { segment.addEdgesAfterCursor(edges, null); var segment2 = new GraphQLSegment(); segment2.addEdgesAfterCursor(moreEdges, null); var concatenated = segment.concatSegment(segment2); expect(concatenated).toBe(true); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual( ['edge1', 'edge2', 'edge3', 'edge4', 'edge5', 'edge6'] ); expect(metadata.cursors).toEqual( ['cursor1', 'cursor2', 'cursor3', 'cursor4', 'cursor5', 'cursor6'] ); }); it('should concat with empty segments', () => { var segment2 = new GraphQLSegment(); segment2.addEdgesAfterCursor(edges, null); // Concatenating from an empty segment var concatenated = segment.concatSegment(segment2); expect(concatenated).toBe(true); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); var segment3 = new GraphQLSegment(); // Concatenating empty segment concatenated = segment.concatSegment(segment3); expect(concatenated).toBe(true); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); }); it('should concat with deleted edges', () => { // Makes sure we update cursor and id to index map correctly // based on removal time. var edges345 = [ { __dataID__: 'edge3', node: {__dataID__: 'id3'}, cursor: 'cursor3', }, { __dataID__: 'edge4', node: {__dataID__: 'id4'}, cursor: 'cursor4', }, { __dataID__: 'edge5', node: {__dataID__: 'id5'}, cursor: 'cursor5', }, ]; // deleted edge in the original segment segment.addEdgesAfterCursor(edges, null); segment.removeEdge('edge3'); var segment2 = new GraphQLSegment(); segment2.addEdgesAfterCursor(edges345, null); var concatenated = segment.concatSegment(segment2); expect(concatenated).toBe(true); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual( ['edge1', 'edge2', 'edge3', 'edge4', 'edge5'] ); expect(metadata.cursors).toEqual( ['cursor1', 'cursor2', 'cursor3', 'cursor4', 'cursor5'] ); expect(segment.containsEdgeWithID('edge3')).toBe(true); expect(segment.containsEdgeWithCursor('cursor3')).toBe(true); // deleted edge in the input segment segment = new GraphQLSegment(); segment.addEdgesAfterCursor(edges, null); segment2 = new GraphQLSegment(); segment2.addEdgesAfterCursor(edges345, null); segment2.removeEdge('edge3'); concatenated = segment.concatSegment(segment2); expect(concatenated).toBe(true); metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual( ['edge1', 'edge2', 'edge3', 'edge4', 'edge5'] ); expect(metadata.cursors).toEqual( ['cursor1', 'cursor2', 'cursor3', 'cursor4', 'cursor5'] ); expect(segment.containsEdgeWithID('edge3')).toBe(true); expect(segment.containsEdgeWithCursor('cursor3')).toBe(true); }); it('should toJSON', () => { segment.addEdgesAfterCursor(edges, null); var actual = JSON.stringify(segment); expect(actual).toEqual('[{"0":{"edgeID":"edge1","cursor":"cursor1",' + '"deleted":false},"1":{"edgeID":"edge2","cursor":"cursor2",' + '"deleted":false},"2":{"edgeID":"edge3","cursor":"cursor3","deleted"' + ':false}},{"edge1":[0],"edge2":[1],"edge3":[2]},{"cursor1":0,' + '"cursor2":1,"cursor3":2},0,2,3]' ); segment = GraphQLSegment.fromJSON(JSON.parse(actual)); var metadata = getAllMetadata(segment); expect(metadata.edgeIDs).toEqual(['edge1', 'edge2', 'edge3']); expect(metadata.cursors).toEqual(['cursor1', 'cursor2', 'cursor3']); }); });<|fim▁end|>
var metadata = getAllMetadata(segment);
<|file_name|>avpr2rest.py<|end_file_name|><|fim▁begin|>import sys import json import os import re import argparse def get_file_locations(): parser = argparse.ArgumentParser() parser.add_argument('input', help='Input AVPR filename(s)', nargs='+') parser.add_argument('output', help='Output directory') args = parser.parse_args() return (args.input, args.output) def typename(typeobject): if isinstance(typeobject, list): union_names = [typename(item) for item in typeobject] return '|'.join(union_names) elif isinstance(typeobject, dict): if typeobject['type'] == 'array': return 'array<%s>' % typename(typeobject['items']) elif typeobject['type'] == 'map': return 'map<%s>' % typename(typeobject['values']) elif isinstance(typeobject, basestring): return typeobject raise ValueError def cleanup_doc(doc,indent=0): return '\n'.join([' '*indent + line for line in doc.split('\n')]) if __name__ == '__main__': avpr_filenames, rest_directory = get_file_locations() for avpr_filename in avpr_filenames: base_filename = os.path.basename(avpr_filename) name = os.path.splitext(base_filename)[0] rest_filename = os.path.join(rest_directory, name+'.rst') with open(avpr_filename,'r') as f: data = json.load(f) output = data['protocol'] + '\n' output += '*' * len(data['protocol']) + '\n\n' if 'doc' in data: output += cleanup_doc(data['doc']) + '\n\n' for message_name in data['messages']: message_def = data['messages'][message_name] doc = message_def['doc'] # process formal parameters ('request') request = message_def['request'] # collect the names param_names = [] for param in request: param_names.append(param['name']) response = message_def['response'] errors = message_def['errors'] output += " .. function:: %s(%s)\n\n" % (message_name, ', '.join(param_names)) for param in request: output += " :param %s: %s: %s\n" % (param['name'], param['type'], param['doc']) output += " :return type: %s\n" % response output += " :throws: %s\n\n" % ', '.join(errors) output += cleanup_doc(doc) output += "\n\n" for item in data['types']: output += '.. avro:%s:: %s\n\n' % (item['type'], item['name']) if item['type'] == 'record': for field in item['fields']: output += ' :field %s:\n' % field['name'] if 'doc' in field: output += cleanup_doc(field['doc'],indent=4) + '\n' output += ' :type %s: %s\n' % (field['name'], typename(field['type'])) output += '\n' if item['type'] == 'enum': output += ' :symbols: %s\n' % '|'.join(item['symbols']) if item['type'] == 'fixed': output += ' :size: %s\n' % item['size'] if 'doc' in item: output += cleanup_doc(item['doc'],indent=2) + '\n\n' with open(rest_filename,'w') as f: f.write(output) def get_file_locations(): parser = argparse.ArgumentParser() parser.add_argument('input', help='Input AVPR filename(s)', nargs='+') parser.add_argument('output', help='Output directory') args = parser.parse_args() return (args.input, args.output) def typename(typeobject): if isinstance(typeobject, list): union_names = [typename(item) for item in typeobject] return '|'.join(union_names) elif isinstance(typeobject, dict): if typeobject['type'] == 'array': return 'array<%s>' % typename(typeobject['items']) elif typeobject['type'] == 'map': return 'map<%s>' % typename(typeobject['values']) elif isinstance(typeobject, basestring): return typeobject raise ValueError if __name__ == '__main__': avpr_filenames, rest_directory = get_file_locations() for avpr_filename in avpr_filenames: base_filename = os.path.basename(avpr_filename) name = os.path.splitext(base_filename)[0] rest_filename = os.path.join(rest_directory, name+'.rst') with open(avpr_filename,'r') as f: data = json.load(f) output = data['protocol'] + '\n' output += '*' * len(data['protocol']) + '\n\n' if 'doc' in data: output += cleanup_doc(data['doc']) + '\n\n'<|fim▁hole|> doc = message_def['doc'] # process formal parameters ('request') request = message_def['request'] # collect the names param_names = [] for param in request: param_names.append(param['name']) response = message_def['response'] errors = message_def['errors'] output += " .. function:: %s(%s)\n\n" % (message_name, ', '.join(param_names)) for param in request: output += " :param %s: %s: %s\n" % (param['name'], param['type'], param['doc']) output += " :return type: %s\n" % response output += " :throws: %s\n\n" % ', '.join(errors) output += cleanup_doc(doc) output += "\n\n" for item in data['types']: output += '.. avro:%s:: %s\n\n' % (item['type'], item['name']) if item['type'] == 'record': for field in item['fields']: output += ' :field %s:\n' % field['name'] if 'doc' in field: output += cleanup_doc(field['doc'],indent=4) + '\n' output += ' :type %s: %s\n' % (field['name'], typename(field['type'])) output += '\n' if item['type'] == 'enum': output += ' :symbols: %s\n' % '|'.join(item['symbols']) if item['type'] == 'fixed': output += ' :size: %s\n' % item['size'] if 'doc' in item: output += cleanup_doc(item['doc'],indent=2) + '\n\n' with open(rest_filename,'w') as f: f.write(output)<|fim▁end|>
for message_name in data['messages']: message_def = data['messages'][message_name]
<|file_name|>fs-client.ts<|end_file_name|><|fim▁begin|>import fs from 'fs'; import mkdirp from 'mkdirp'; import path from 'path'; type FSWraps = | typeof fs.promises.readFile | typeof fs.promises.writeFile | typeof fs.promises.unlink | typeof fs.promises.readdir | typeof fs.promises.mkdir | typeof fs.promises.rmdir | typeof fs.promises.stat | typeof fs.promises.lstat | typeof fs.promises.readlink | typeof fs.promises.symlink; /** This is a client for isomorphic-git. {@link https://isomorphic-git.org/docs/en/fs} */ export const fsClient = (basePath: string) => { console.log(`[fsClient] Created in ${basePath}`); mkdirp.sync(basePath); const wrap = (fn: FSWraps) => async (filePath: string, ...args: any[]) => { const modifiedPath = path.join(basePath, path.normalize(filePath)); // @ts-expect-error -- TSCONVERSION return fn(modifiedPath, ...args); }; return { promises: { readFile: wrap(fs.promises.readFile),<|fim▁hole|> unlink: wrap(fs.promises.unlink), readdir: wrap(fs.promises.readdir), mkdir: wrap(fs.promises.mkdir), rmdir: wrap(fs.promises.rmdir), stat: wrap(fs.promises.stat), lstat: wrap(fs.promises.lstat), readlink: wrap(fs.promises.readlink), symlink: wrap(fs.promises.symlink), }, }; };<|fim▁end|>
writeFile: wrap(fs.promises.writeFile),
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// +--------------------------------------------------------------------------+ // | Copyright 2016 Matthew D. Steele <[email protected]> | // | | // | This file is part of System Syzygy. | // | | // | System Syzygy is free software: you can redistribute it and/or modify it | // | under the terms of the GNU General Public License as published by the | // | Free Software Foundation, either version 3 of the License, or (at your | // | option) any later version. | // | | // | System Syzygy is distributed in the hope that it will be useful, but | // | WITHOUT ANY WARRANTY; without even the implied warranty of | // | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | // | General Public License for details. | // | | // | You should have received a copy of the GNU General Public License along | // | with System Syzygy. If not, see <http://www.gnu.org/licenses/>. | // +--------------------------------------------------------------------------+ mod scenes; mod view; use self::view::View; use crate::gui::Window; use crate::modes::{run_puzzle, Mode};<|fim▁hole|>use crate::save::SaveData; // ========================================================================= // pub fn run_column_as_icy_em( window: &mut Window, save_data: &mut SaveData, ) -> Mode { let view = { let visible_rect = window.visible_rect(); View::new( &mut window.resources(), visible_rect, &save_data.game_mut().column_as_icy_em, ) }; run_puzzle(window, save_data, view) } // ========================================================================= //<|fim▁end|>
<|file_name|>client_test.go<|end_file_name|><|fim▁begin|>package wsclient import ( "github.com/cosminrentea/gobbler/testutil" "fmt" "strings" "testing" "time" "github.com/gorilla/websocket" "github.com/stretchr/testify/assert" ) var aNormalMessage = `/foo/bar,42,user01,phone01,{},,1420110000,0 Hello World` var aSendNotification = "#send" var anErrorNotification = "!error-send" func MockConnectionFactory(connectionMock *MockWSConnection) func(string, string) (WSConnection, error) { return func(url string, origin string) (WSConnection, error) { return connectionMock, nil } } func TestConnectErrorWithoutReconnection(t *testing.T) { a := assert.New(t) // given a client c := New("url", "origin", 1, false) // which raises an error on connect callCounter := 0 c.SetWSConnectionFactory(func(url string, origin string) (WSConnection, error) { a.Equal("url", url) a.Equal("origin", origin) callCounter++ return nil, fmt.Errorf("emulate connection error") }) // when we start err := c.Start() // then a.Error(err) a.Equal(1, callCounter) } func TestConnectErrorWithoutReconnectionUsingOpen(t *testing.T) { a := assert.New(t) c, err := Open("url", "origin", 1, false) // which raises an error on connect callCounter := 0 c.SetWSConnectionFactory(func(url string, origin string) (WSConnection, error) { a.Equal("url", url) a.Equal("origin", origin) callCounter++ return nil, fmt.Errorf("emulate connection error") }) a.Error(err) } func TestConnectErrorWithReconnection(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() a := assert.New(t) // given a client c := New("url", "origin", 1, true) // which raises an error twice and then allows to connect callCounter := 0 connMock := NewMockWSConnection(ctrl) connMock.EXPECT().ReadMessage().Do(func() { time.Sleep(time.Second) }) c.SetWSConnectionFactory(func(url string, origin string) (WSConnection, error) { a.Equal("url", url) a.Equal("origin", origin) if callCounter <= 2 { callCounter++ return nil, fmt.Errorf("emulate connection error") } return connMock, nil }) // when we start err := c.Start() // then we get an error, first a.Error(err) a.False(c.IsConnected()) // when we wait for two iterations and 10ms buffer time to connect time.Sleep(time.Millisecond * 110) // then we got connected a.True(c.IsConnected()) a.Equal(3, callCounter) } func TestStopableClient(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() a := assert.New(t) // given a client c := New("url", "origin", 1, true) // with a closeable connection connMock := NewMockWSConnection(ctrl) close := make(chan bool, 1) connMock.EXPECT().ReadMessage(). Do(func() { <-close }). Return(0, []byte{}, fmt.Errorf("expected close error")) connMock.EXPECT().Close().Do(func() { close <- true }) c.SetWSConnectionFactory(MockConnectionFactory(connMock)) // when we start err := c.Start() // than we are connected a.NoError(err) a.True(c.IsConnected()) // when we clode c.Close() time.Sleep(time.Millisecond * 1) // than the client returns a.False(c.IsConnected()) } func TestReceiveAMessage(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() a := assert.New(t) // given a client c := New("url", "origin", 10, false) // with a closeable connection connMock := NewMockWSConnection(ctrl) close := make(chan bool, 1) // normal message call1 := connMock.EXPECT().ReadMessage(). Return(4, []byte(aNormalMessage), nil) call2 := connMock.EXPECT().ReadMessage(). Return(4, []byte(aSendNotification), nil) call3 := connMock.EXPECT().ReadMessage(). Return(4, []byte("---"), nil) call4 := connMock.EXPECT().ReadMessage(). Return(4, []byte(anErrorNotification), nil) call5 := connMock.EXPECT().ReadMessage(). Do(func() { <-close }). Return(0, []byte{}, fmt.Errorf("expected close error")). AnyTimes() call5.After(call4) call4.After(call3) call3.After(call2) call2.After(call1) c.SetWSConnectionFactory(MockConnectionFactory(connMock)) connMock.EXPECT().Close().Do(func() { close <- true }) // when we start err := c.Start() a.NoError(err) a.True(c.IsConnected()) // than we receive the expected message select { case m := <-c.Messages(): a.Equal(aNormalMessage, string(m.Encode())) case <-time.After(time.Millisecond * 10): a.Fail("timeout while waiting for message") } // and we receive the notification select { case m := <-c.StatusMessages(): a.Equal(aSendNotification, string(m.Bytes())) case <-time.After(time.Millisecond * 10): a.Fail("timeout while waiting for message") } // parse error select { case m := <-c.Errors(): a.True(strings.HasPrefix(string(m.Bytes()), "!clientError ")) case <-time.After(time.Millisecond * 10): a.Fail("timeout while waiting for message") } // and we receive the error notification select { case m := <-c.Errors(): a.Equal(anErrorNotification, string(m.Bytes())) case <-time.After(time.Millisecond * 10): a.Fail("timeout while waiting for message") } c.Close() } func TestSendAMessage(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() // a := assert.New(t) // given a client c := New("url", "origin", 1, true) // when expects a message connMock := NewMockWSConnection(ctrl) connMock.EXPECT().WriteMessage(websocket.BinaryMessage, []byte("> /foo\n{}\nTest")) connMock.EXPECT(). ReadMessage(). Return(websocket.BinaryMessage, []byte(aNormalMessage), nil). Do(func() { time.Sleep(time.Millisecond * 50) }). AnyTimes() c.SetWSConnectionFactory(MockConnectionFactory(connMock)) c.Start() // then the expectation is meet by sending it c.Send("/foo", "Test", "{}") // stop client after 200ms time.AfterFunc(time.Millisecond*200, func() { c.Close() }) } func TestSendSubscribeMessage(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() // given a client c := New("url", "origin", 1, true) // when expects a message connMock := NewMockWSConnection(ctrl) connMock.EXPECT().WriteMessage(websocket.BinaryMessage, []byte("+ /foo")) connMock.EXPECT(). ReadMessage(). Return(websocket.BinaryMessage, []byte(aNormalMessage), nil). Do(func() { time.Sleep(time.Millisecond * 50) }). AnyTimes() c.SetWSConnectionFactory(MockConnectionFactory(connMock)) c.Start() c.Subscribe("/foo") <|fim▁hole|>} func TestSendUnSubscribeMessage(t *testing.T) { ctrl, finish := testutil.NewMockCtrl(t) defer finish() // given a client c := New("url", "origin", 1, true) // when expects a message connMock := NewMockWSConnection(ctrl) connMock.EXPECT().WriteMessage(websocket.BinaryMessage, []byte("- /foo")) connMock.EXPECT(). ReadMessage(). Return(websocket.BinaryMessage, []byte(aNormalMessage), nil). Do(func() { time.Sleep(time.Millisecond * 50) }). AnyTimes() c.SetWSConnectionFactory(MockConnectionFactory(connMock)) c.Start() c.Unsubscribe("/foo") // stop client after 200ms time.AfterFunc(time.Millisecond*200, func() { c.Close() }) }<|fim▁end|>
// stop client after 200ms time.AfterFunc(time.Millisecond*200, func() { c.Close() })
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'; var _ = require('underscore'); var express = require('express'); var router = express.Router(); var ObjectId = require('mongoose').Types.ObjectId; var path = require('path'); /** * @apiDefine group Group based access * Resource access controlled by user's groups */ /** * @apiDefine access Authenticated user access only * User should sign in for the request */ /** * @api {get} / Get home page * @apiName GetHomePage<|fim▁hole|> * @apiGroup Home */ router.get('/', (req, res) => res.sendFile(path.join(__dirname, 'public', 'index.html'))); module.exports = router; module.exports.getCollection = function(req, res, model, condition) { let id = req.params.id; let cond = id ? { _id: new ObjectId(id) } : {}; condition = _.extend(cond, condition); model.find(condition, (err, result) => { if (err) { throw err; } res.send(result); }); };<|fim▁end|>
<|file_name|>test_feature_extractor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|> class TestFeatureExtractor(object): """Tests for feature extractors. """ def test_context_size(self): f = FeatureExtractor() assert f.context_size == (0, 0) f.context_size = (1, 2) assert f.context_size == (1, 2) with pytest.raises(ValueError): f.context_size = (-1, 1) with pytest.raises(ValueError): f.context_size = (1, -1) assert f.context_size == (1, 2)<|fim▁end|>
import pytest from mmb_perceptron.feature_extractor import FeatureExtractor
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url from django.conf.urls import patterns<|fim▁hole|>from pyday_alarms import views app_name = 'pyday_alarms' urlpatterns = [ url(r'^alarms/$', views.AlarmView.as_view(), name='alarms'), ] '''urlpatterns += patterns('pyday_social_network.views', url(r'^list/$', 'list', name='list')) '''<|fim▁end|>
<|file_name|>bad-bang-ann.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Tests that a function with a ! annotation always actually fails fn bad_bang(i: usize) -> ! { //~ ERROR computation may converge in a function marked as diverging if i < 0us { } else { panic!(); } }<|fim▁hole|> fn main() { bad_bang(5us); }<|fim▁end|>
<|file_name|>tower_job_template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # coding: utf-8 -*- # (c) 2017, Wayne Witzel III <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: tower_job_template author: "Wayne Witzel III (@wwitzel3)" version_added: "2.3" short_description: create, update, or destroy Ansible Tower job template. description: - Create, update, or destroy Ansible Tower job templates. See U(https://www.ansible.com/tower) for an overview. options: name: description: - Name to use for the job template. required: True description: description: - Description to use for the job template. job_type: description: - The job type to use for the job template. required: True choices: ["run", "check", "scan"] inventory: description: - Name of the inventory to use for the job template. project: description: - Name of the project to use for the job template. required: True playbook: description: - Path to the playbook to use for the job template within the project provided. required: True credential: description: - Name of the credential to use for the job template. version_added: 2.7 vault_credential: description: - Name of the vault credential to use for the job template. version_added: 2.7 forks: description: - The number of parallel or simultaneous processes to use while executing the playbook. limit: description: - A host pattern to further constrain the list of hosts managed or affected by the playbook verbosity: description: - Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug. choices: [0, 1, 2, 3, 4] default: 0 extra_vars_path: description: - Path to the C(extra_vars) YAML file. job_tags: description: - Comma separated list of the tags to use for the job template. force_handlers_enabled: description: - Enable forcing playbook handlers to run even if a task fails. version_added: 2.7 type: bool default: 'no' skip_tags: description: - Comma separated list of the tags to skip for the job template. start_at_task: description: - Start the playbook at the task matching this name. version_added: 2.7 fact_caching_enabled: description: - Enable use of fact caching for the job template. version_added: 2.7 type: bool default: 'no' host_config_key: description: - Allow provisioning callbacks using this host config key. ask_diff_mode: description: - Prompt user to enable diff mode (show changes) to files when supported by modules. version_added: 2.7 type: bool default: 'no' ask_extra_vars: description: - Prompt user for (extra_vars) on launch. type: bool default: 'no' ask_limit: description: - Prompt user for a limit on launch. version_added: 2.7 type: bool default: 'no' ask_tags: description: - Prompt user for job tags on launch. type: bool default: 'no' ask_skip_tags: description: - Prompt user for job tags to skip on launch. version_added: 2.7 type: bool default: 'no' ask_job_type: description: - Prompt user for job type on launch. type: bool default: 'no' ask_verbosity: description: - Prompt user to choose a verbosity level on launch. version_added: 2.7 type: bool default: 'no' ask_inventory: description:<|fim▁hole|> - Propmt user for inventory on launch. type: bool default: 'no' ask_credential: description: - Prompt user for credential on launch. type: bool default: 'no' survey_enabled: description: - Enable a survey on the job template. version_added: 2.7 type: bool default: 'no' survey_spec: description: - JSON/YAML dict formatted survey definition. version_added: 2.8 type: dict required: False become_enabled: description: - Activate privilege escalation. type: bool default: 'no' concurrent_jobs_enabled: description: - Allow simultaneous runs of the job template. version_added: 2.7 type: bool default: 'no' state: description: - Desired state of the resource. default: "present" choices: ["present", "absent"] extends_documentation_fragment: tower notes: - JSON for survey_spec can be found in Tower API Documentation. See U(https://docs.ansible.com/ansible-tower/latest/html/towerapi/api_ref.html#/Job_Templates/Job_Templates_job_templates_survey_spec_create) for POST operation payload example. ''' EXAMPLES = ''' - name: Create tower Ping job template tower_job_template: name: "Ping" job_type: "run" inventory: "Local" project: "Demo" playbook: "ping.yml" credential: "Local" state: "present" tower_config_file: "~/tower_cli.cfg" survey_enabled: yes survey_spec: "{{ lookup('file', 'my_survey.json') }}" ''' from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode try: import tower_cli import tower_cli.exceptions as exc from tower_cli.conf import settings except ImportError: pass def update_fields(p): '''This updates the module field names to match the field names tower-cli expects to make calling of the modify/delete methods easier. ''' params = p.copy() field_map = { 'fact_caching_enabled': 'use_fact_cache', 'ask_diff_mode': 'ask_diff_mode_on_launch', 'ask_extra_vars': 'ask_variables_on_launch', 'ask_limit': 'ask_limit_on_launch', 'ask_tags': 'ask_tags_on_launch', 'ask_skip_tags': 'ask_skip_tags_on_launch', 'ask_verbosity': 'ask_verbosity_on_launch', 'ask_inventory': 'ask_inventory_on_launch', 'ask_credential': 'ask_credential_on_launch', 'ask_job_type': 'ask_job_type_on_launch', 'diff_mode_enabled': 'diff_mode', 'concurrent_jobs_enabled': 'allow_simultaneous', 'force_handlers_enabled': 'force_handlers', } params_update = {} for old_k, new_k in field_map.items(): v = params.pop(old_k) params_update[new_k] = v extra_vars = params.get('extra_vars_path') if extra_vars is not None: params_update['extra_vars'] = ['@' + extra_vars] params.update(params_update) return params def update_resources(module, p): params = p.copy() identity_map = { 'project': 'name', 'inventory': 'name', 'credential': 'name', 'vault_credential': 'name', } for k, v in identity_map.items(): try: if params[k]: key = 'credential' if '_credential' in k else k result = tower_cli.get_resource(key).get(**{v: params[k]}) params[k] = result['id'] elif k in params: # unset empty parameters to avoid ValueError: invalid literal for int() with base 10: '' del(params[k]) except (exc.NotFound) as excinfo: module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False) return params def main(): argument_spec = dict( name=dict(required=True), description=dict(default=''), job_type=dict(choices=['run', 'check', 'scan'], required=True), inventory=dict(default=''), project=dict(required=True), playbook=dict(required=True), credential=dict(default=''), vault_credential=dict(default=''), forks=dict(type='int'), limit=dict(default=''), verbosity=dict(type='int', choices=[0, 1, 2, 3, 4], default=0), extra_vars_path=dict(type='path', required=False), job_tags=dict(default=''), force_handlers_enabled=dict(type='bool', default=False), skip_tags=dict(default=''), start_at_task=dict(default=''), timeout=dict(type='int', default=0), fact_caching_enabled=dict(type='bool', default=False), host_config_key=dict(default=''), ask_diff_mode=dict(type='bool', default=False), ask_extra_vars=dict(type='bool', default=False), ask_limit=dict(type='bool', default=False), ask_tags=dict(type='bool', default=False), ask_skip_tags=dict(type='bool', default=False), ask_job_type=dict(type='bool', default=False), ask_verbosity=dict(type='bool', default=False), ask_inventory=dict(type='bool', default=False), ask_credential=dict(type='bool', default=False), survey_enabled=dict(type='bool', default=False), survey_spec=dict(type='dict', required=False), become_enabled=dict(type='bool', default=False), diff_mode_enabled=dict(type='bool', default=False), concurrent_jobs_enabled=dict(type='bool', default=False), state=dict(choices=['present', 'absent'], default='present'), ) module = TowerModule(argument_spec=argument_spec, supports_check_mode=True) name = module.params.get('name') state = module.params.pop('state') json_output = {'job_template': name, 'state': state} tower_auth = tower_auth_config(module) with settings.runtime_values(**tower_auth): tower_check_mode(module) jt = tower_cli.get_resource('job_template') params = update_resources(module, module.params) params = update_fields(params) params['create_on_missing'] = True try: if state == 'present': result = jt.modify(**params) json_output['id'] = result['id'] elif state == 'absent': result = jt.delete(**params) except (exc.ConnectionError, exc.BadRequest, exc.NotFound) as excinfo: module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False) json_output['changed'] = result['changed'] module.exit_json(**json_output) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>metadata.go<|end_file_name|><|fim▁begin|>// Copyright 2016 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package metadata provides a struct for storing browser metadata. package metadata import ( "encoding/json"<|fim▁hole|> "github.com/bazelbuild/rules_webtesting/go/httphelper" "github.com/bazelbuild/rules_webtesting/go/metadata/capabilities" ) // Values for Metadata.RecordVideo. const ( RecordNever = "never" RecordFailed = "failed" RecordAlways = "always" ) // Metadata provides necessary metadata for launching a browser. type Metadata struct { // The Capabilities that should be used for this browser. Capabilities map[string]interface{} `json:"capabilities,omitempty"` // The Environment that web test launcher should use to to launch the browser. Environment string `json:"environment,omitempty"` // Label for the web_test rule. Label string `json:"label,omitempty"` // Browser label set in the web_test rule. BrowserLabel string `json:"browserLabel,omitempty"` // Test label set in the web_test rule. TestLabel string `json:"testLabel,omitempty"` // Config label set in the web_test rule. ConfigLabel string `json:"configLabel,omitempty"` // Port to connect debugger to. If 0, debugger will not be started. DebuggerPort int `json:"debuggerPort,omitempty"` // A list of WebTestFiles with named files in them. WebTestFiles []*WebTestFiles `json:"webTestFiles,omitempty"` // An object for any additional metadata fields on this object. Extension `json:"extension,omitempty"` } // Extension is an interface for adding additional fields that will be parsed as part of the metadata. type Extension interface { // Merge merges this extension data with another set of Extension data. It should not mutate either // Extension object, but it is allowed to return one of the Extension objects unchanged if needed. // In general values in other should take precedence over values in this object. Merge(other Extension) (Extension, error) // Normalize normalizes and validate the extension data. Normalize() error } // Merge takes two Metadata objects and merges them into a new Metadata object. func Merge(m1, m2 *Metadata) (*Metadata, error) { capabilities := capabilities.Merge(m1.Capabilities, m2.Capabilities) environment := m1.Environment if m2.Environment != "" { environment = m2.Environment } label := m1.Label if m2.Label != "" { label = m2.Label } browserLabel := m1.BrowserLabel if m2.BrowserLabel != "" { browserLabel = m2.BrowserLabel } testLabel := m1.TestLabel if m2.TestLabel != "" { testLabel = m2.TestLabel } configLabel := m1.ConfigLabel if m2.ConfigLabel != "" { configLabel = m2.ConfigLabel } debuggerPort := m1.DebuggerPort if m2.DebuggerPort != 0 { debuggerPort = m2.DebuggerPort } var webTestFiles []*WebTestFiles webTestFiles = append(webTestFiles, m1.WebTestFiles...) webTestFiles = append(webTestFiles, m2.WebTestFiles...) webTestFiles, err := normalizeWebTestFiles(webTestFiles) if err != nil { return nil, err } extension := m1.Extension if extension == nil { extension = m2.Extension } else if m2.Extension != nil { e, err := extension.Merge(m2.Extension) if err != nil { return nil, err } extension = e } return &Metadata{ Capabilities: capabilities, Environment: environment, Label: label, BrowserLabel: browserLabel, TestLabel: testLabel, ConfigLabel: configLabel, DebuggerPort: debuggerPort, WebTestFiles: webTestFiles, Extension: extension, }, nil } // FromFile reads a Metadata object from a json file. func FromFile(filename string, ext Extension) (*Metadata, error) { bytes, err := ioutil.ReadFile(filename) if err != nil { return nil, err } return FromBytes(bytes, ext) } // FromBytes reads a Metadata object from a byte array. func FromBytes(bytes []byte, ext Extension) (*Metadata, error) { if ext == nil { ext = &extension{} } metadata := &Metadata{Extension: ext} if err := json.Unmarshal(bytes, metadata); err != nil { return nil, err } webTestFiles, err := normalizeWebTestFiles(metadata.WebTestFiles) if err != nil { return nil, err } metadata.WebTestFiles = webTestFiles if metadata.Extension != nil { if err := metadata.Extension.Normalize(); err != nil { return nil, err } } return metadata, nil } // ToFile writes m to filename as json. func (m *Metadata) ToFile(filename string) error { bytes, err := m.ToBytes() if err != nil { return err } return ioutil.WriteFile(filename, bytes, 0644) } // ToBytes serializes metadata. func (m *Metadata) ToBytes() ([]byte, error) { return json.MarshalIndent(m, "", " ") } // GetFilePath returns the path to a file specified by platform_archive, // web_test_named_executable, or web_test_named_file. func (m *Metadata) GetFilePath(name string) (string, error) { for _, a := range m.WebTestFiles { filename, err := a.getFilePath(name, m) if err != nil { return "", err } if filename != "" { return filename, nil } } return "", fmt.Errorf("no named file %q", name) } // Resolver returns a Resolver that processes ENV, FILE, and METADATA prefixed // capabilities variables. func (m *Metadata) Resolver() capabilities.Resolver { metadataResolver := capabilities.MapResolver("METADATA", map[string]string{ "LABEL": m.Label, "TEST_LABEL": m.TestLabel, "BROWSER_LABEL": m.BrowserLabel, "CONFIG_LABEL": m.ConfigLabel, "ENVIRONMENT": m.Environment, }) return func(prefix, name string) (string, error) { switch prefix { case "ENV": v, ok := os.LookupEnv(name) if !ok { return "", fmt.Errorf("environment variable %q is not defined", name) } return v, nil case "FILE": return m.GetFilePath(name) case "WTL": switch name { case "FQDN": return httphelper.FQDN() default: return "", fmt.Errorf("WTL:%q is not defined", name) } default: return metadataResolver(prefix, name) } } } type extension map[string]interface{} func (e extension) Merge(other Extension) (Extension, error) { if other == nil { return e, nil } if e == nil || len(e) == 0 { return other, nil } o, ok := other.(extension) if !ok || len(o) == 0 { return e, nil } ext := extension{} for k, v := range e { ext[k] = v } for k, v := range o { ext[k] = v } return ext, nil } func (e extension) Normalize() error { return nil } // ExtensionMap returns the Extension field as a map if, when the metadata was read, // no extension type was specified. func (m *Metadata) ExtensionMap() (map[string]interface{}, bool) { if m.Extension == nil { return nil, false } ext, ok := m.Extension.(*extension) if !ok { return nil, false } return map[string]interface{}(*ext), true }<|fim▁end|>
"fmt" "io/ioutil" "os"
<|file_name|>triangle.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Brendan Zabarauskas and the gl-rs developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. extern crate gl; extern crate glutin; use gl::types::*; use std::ffi::CString; use std::mem; use std::ptr; use std::str; // Vertex data static VERTEX_DATA: [GLfloat; 6] = [0.0, 0.5, 0.5, -0.5, -0.5, -0.5]; // Shader sources static VS_SRC: &'static str = " #version 150 in vec2 position; void main() { gl_Position = vec4(position, 0.0, 1.0); }"; static FS_SRC: &'static str = " #version 150 out vec4 out_color;<|fim▁hole|> void main() { out_color = vec4(1.0, 1.0, 1.0, 1.0); }"; fn compile_shader(src: &str, ty: GLenum) -> GLuint { let shader; unsafe { shader = gl::CreateShader(ty); // Attempt to compile the shader let c_str = CString::new(src.as_bytes()).unwrap(); gl::ShaderSource(shader, 1, &c_str.as_ptr(), ptr::null()); gl::CompileShader(shader); // Get the compile status let mut status = gl::FALSE as GLint; gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status); // Fail on error if status != (gl::TRUE as GLint) { let mut len = 0; gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len); let mut buf = Vec::with_capacity(len as usize); buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character gl::GetShaderInfoLog( shader, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar, ); panic!( "{}", str::from_utf8(&buf) .ok() .expect("ShaderInfoLog not valid utf8") ); } } shader } fn link_program(vs: GLuint, fs: GLuint) -> GLuint { unsafe { let program = gl::CreateProgram(); gl::AttachShader(program, vs); gl::AttachShader(program, fs); gl::LinkProgram(program); // Get the link status let mut status = gl::FALSE as GLint; gl::GetProgramiv(program, gl::LINK_STATUS, &mut status); // Fail on error if status != (gl::TRUE as GLint) { let mut len: GLint = 0; gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len); let mut buf = Vec::with_capacity(len as usize); buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character gl::GetProgramInfoLog( program, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar, ); panic!( "{}", str::from_utf8(&buf) .ok() .expect("ProgramInfoLog not valid utf8") ); } program } } fn main() { let event_loop = glutin::event_loop::EventLoop::new(); let window = glutin::window::WindowBuilder::new(); let gl_window = glutin::ContextBuilder::new() .build_windowed(window, &event_loop) .unwrap(); // It is essential to make the context current before calling `gl::load_with`. let gl_window = unsafe { gl_window.make_current() }.unwrap(); // Load the OpenGL function pointers gl::load_with(|symbol| gl_window.get_proc_address(symbol)); // Create GLSL shaders let vs = compile_shader(VS_SRC, gl::VERTEX_SHADER); let fs = compile_shader(FS_SRC, gl::FRAGMENT_SHADER); let program = link_program(vs, fs); let mut vao = 0; let mut vbo = 0; unsafe { // Create Vertex Array Object gl::GenVertexArrays(1, &mut vao); gl::BindVertexArray(vao); // Create a Vertex Buffer Object and copy the vertex data to it gl::GenBuffers(1, &mut vbo); gl::BindBuffer(gl::ARRAY_BUFFER, vbo); gl::BufferData( gl::ARRAY_BUFFER, (VERTEX_DATA.len() * mem::size_of::<GLfloat>()) as GLsizeiptr, mem::transmute(&VERTEX_DATA[0]), gl::STATIC_DRAW, ); // Use shader program gl::UseProgram(program); gl::BindFragDataLocation(program, 0, CString::new("out_color").unwrap().as_ptr()); // Specify the layout of the vertex data let pos_attr = gl::GetAttribLocation(program, CString::new("position").unwrap().as_ptr()); gl::EnableVertexAttribArray(pos_attr as GLuint); gl::VertexAttribPointer( pos_attr as GLuint, 2, gl::FLOAT, gl::FALSE as GLboolean, 0, ptr::null(), ); } event_loop.run(move |event, _, control_flow| { use glutin::event::{Event, WindowEvent}; use glutin::event_loop::ControlFlow; *control_flow = ControlFlow::Wait; match event { Event::LoopDestroyed => return, Event::WindowEvent { event, .. } => match event { WindowEvent::CloseRequested => { // Cleanup unsafe { gl::DeleteProgram(program); gl::DeleteShader(fs); gl::DeleteShader(vs); gl::DeleteBuffers(1, &vbo); gl::DeleteVertexArrays(1, &vao); } *control_flow = ControlFlow::Exit }, _ => (), }, Event::RedrawRequested(_) => { unsafe { // Clear the screen to black gl::ClearColor(0.3, 0.3, 0.3, 1.0); gl::Clear(gl::COLOR_BUFFER_BIT); // Draw a triangle from the 3 vertices gl::DrawArrays(gl::TRIANGLES, 0, 3); } gl_window.swap_buffers().unwrap(); }, _ => (), } }); }<|fim▁end|>
<|file_name|>cacheWrapper.test.ts<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import { cacheWrapper } from 'src/utils/cacheWrapper'; describe('cacheWrapper', () => { const fnResult = 'fnResult'; const fn = jest.fn<string, [number, number]>().mockReturnValue(fnResult); let wrappedFn: (a: number, b: number) => string; beforeEach(() => { const cache = new Map<string, any>(); wrappedFn = cacheWrapper(fn, cache); }); afterEach(() => { jest.clearAllMocks(); }); it('calls fn with its arguments once when the key is not found', () => { const returnedValue = wrappedFn(1, 2); expect(returnedValue).toEqual(fnResult); expect(fn).toBeCalledTimes(1); expect(fn).toBeCalledWith(1, 2); }); describe('subsequent calls', () => { it('returns the correct value without fn being called multiple times', () => { const returnedValue1 = wrappedFn(1, 2); const returnedValue2 = wrappedFn(1, 2); expect(returnedValue1).toEqual(fnResult); expect(returnedValue2).toEqual(fnResult); expect(fn).toBeCalledTimes(1); }); it('fn is called multiple times for different arguments', () => { wrappedFn(1, 2); wrappedFn(1, 3); expect(fn).toBeCalledTimes(2); }); }); describe('with custom keyFn', () => { let cache: Map<string, any>; beforeEach(() => { cache = new Map<string, any>(); wrappedFn = cacheWrapper(fn, cache, (...args) => `key-${args[0]}`); }); it('saves fn result in cache under generated key', () => { wrappedFn(1, 2); expect(cache.get('key-1')).toEqual(fnResult); }); it('subsequent calls with same generated key calls fn once, even if other arguments have changed', () => { wrappedFn(1, 1); wrappedFn(1, 2); wrappedFn(1, 3); expect(fn).toBeCalledTimes(1); }); }); });<|fim▁end|>
* * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<|file_name|>test_statepoint_batch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys sys.path.insert(0, os.pardir)<|fim▁hole|>class StatepointTestHarness(TestHarness): def __init__(self): self._sp_name = None self._tallies = False self._opts = None self._args = None def _test_output_created(self): """Make sure statepoint files have been created.""" sps = ('statepoint.03.*', 'statepoint.06.*', 'statepoint.09.*') for sp in sps: self._sp_name = sp TestHarness._test_output_created(self) if __name__ == '__main__': harness = StatepointTestHarness() harness.main()<|fim▁end|>
from testing_harness import TestHarness
<|file_name|>wrapping_sub_mul.rs<|end_file_name|><|fim▁begin|>use num::arithmetic::traits::{ WrappingMul, WrappingSub, WrappingSubAssign, WrappingSubMul, WrappingSubMulAssign, }; fn wrapping_sub_mul<T: WrappingMul<T, Output = T> + WrappingSub<T, Output = T>>( x: T, y: T, z: T, ) -> T { x.wrapping_sub(y.wrapping_mul(z)) } fn wrapping_sub_mul_assign<T: WrappingMul<T, Output = T> + WrappingSubAssign<T>>( x: &mut T, y: T, z: T, ) { x.wrapping_sub_assign(y.wrapping_mul(z)); } macro_rules! impl_wrapping_sub_mul { ($t:ident) => { impl WrappingSubMul<$t> for $t { type Output = $t; /// Computes $x - yz$, wrapping around at the boundary of the type. /// /// $f(x, y, z) = w$, where $w \equiv x - yz \mod 2^W$ and $W$ is `$t::WIDTH`. /// /// # Worst-case complexity /// Constant time and additional memory. /// /// # Examples /// See the documentation of the `num::arithmetic::wrapping_sub_mul` module. #[inline] fn wrapping_sub_mul(self, y: $t, z: $t) -> $t { wrapping_sub_mul(self, y, z) } } impl WrappingSubMulAssign<$t> for $t { /// Replaces $x$ with $x - yz$, wrapping around at the boundary of the type. /// /// $x \gets w$, where $w \equiv x - yz \mod 2^W$ and $W$ is `$t::WIDTH`. /// /// # Worst-case complexity /// Constant time and additional memory. /// /// # Examples /// See the documentation of the `num::arithmetic::wrapping_sub_mul` module. #[inline] fn wrapping_sub_mul_assign(&mut self, y: $t, z: $t) { wrapping_sub_mul_assign(self, y, z) } }<|fim▁hole|><|fim▁end|>
}; } apply_to_primitive_ints!(impl_wrapping_sub_mul);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from .abc import ABCIE from .abc7news import Abc7NewsIE from .academicearth import AcademicEarthCourseIE from .addanime import AddAnimeIE from .adobetv import AdobeTVIE from .adultswim import AdultSwimIE from .aftonbladet import AftonbladetIE from .aljazeera import AlJazeeraIE from .alphaporno import AlphaPornoIE from .anitube import AnitubeIE from .anysex import AnySexIE from .aol import AolIE from .allocine import AllocineIE from .aparat import AparatIE from .appletrailers import AppleTrailersIE from .archiveorg import ArchiveOrgIE from .ard import ARDIE, ARDMediathekIE from .arte import ( ArteTvIE, ArteTVPlus7IE, ArteTVCreativeIE, ArteTVConcertIE, ArteTVFutureIE, ArteTVDDCIE, ArteTVEmbedIE, ) from .atresplayer import AtresPlayerIE from .atttechchannel import ATTTechChannelIE from .audiomack import AudiomackIE, AudiomackAlbumIE from .azubu import AzubuIE from .bambuser import BambuserIE, BambuserChannelIE from .bandcamp import BandcampIE, BandcampAlbumIE from .bbccouk import BBCCoUkIE from .beeg import BeegIE from .behindkink import BehindKinkIE from .bet import BetIE from .bild import BildIE from .bilibili import BiliBiliIE from .blinkx import BlinkxIE from .bliptv import BlipTVIE, BlipTVUserIE from .bloomberg import BloombergIE from .bpb import BpbIE from .br import BRIE from .breakcom import BreakIE from .brightcove import BrightcoveIE from .buzzfeed import BuzzFeedIE from .byutv import BYUtvIE from .c56 import C56IE from .canal13cl import Canal13clIE from .canalplus import CanalplusIE from .canalc2 import Canalc2IE from .cbs import CBSIE from .cbsnews import CBSNewsIE from .ceskatelevize import CeskaTelevizeIE from .channel9 import Channel9IE from .chilloutzone import ChilloutzoneIE from .cinchcast import CinchcastIE from .clipfish import ClipfishIE from .cliphunter import CliphunterIE from .clipsyndicate import ClipsyndicateIE from .cloudy import CloudyIE from .clubic import ClubicIE from .cmt import CMTIE from .cnet import CNETIE from .cnn import ( CNNIE, CNNBlogsIE, CNNArticleIE, ) from .collegehumor import CollegeHumorIE from .collegerama import CollegeRamaIE from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE from .comcarcoff import ComCarCoffIE from .commonmistakes import CommonMistakesIE from .condenast import CondeNastIE from .cracked import CrackedIE from .criterion import CriterionIE from .crunchyroll import ( CrunchyrollIE, CrunchyrollShowPlaylistIE ) from .cspan import CSpanIE from .dailymotion import ( DailymotionIE, DailymotionPlaylistIE, DailymotionUserIE, ) from .daum import DaumIE from .dbtv import DBTVIE from .deezer import DeezerPlaylistIE from .dfb import DFBIE from .dotsub import DotsubIE from .dreisat import DreiSatIE from .drbonanza import DRBonanzaIE from .drtuber import DrTuberIE from .drtv import DRTVIE from .dvtv import DVTVIE from .dump import DumpIE from .defense import DefenseGouvFrIE from .discovery import DiscoveryIE from .divxstage import DivxStageIE from .dropbox import DropboxIE from .ebaumsworld import EbaumsWorldIE from .echomsk import EchoMskIE from .ehow import EHowIE from .eighttracks import EightTracksIE from .einthusan import EinthusanIE from .eitb import EitbIE from .ellentv import ( EllenTVIE, EllenTVClipsIE, ) from .elpais import ElPaisIE from .empflix import EMPFlixIE from .engadget import EngadgetIE from .eporner import EpornerIE from .eroprofile import EroProfileIE from .escapist import EscapistIE from .everyonesmixtape import EveryonesMixtapeIE from .exfm import ExfmIE from .expotv import ExpoTVIE from .extremetube import ExtremeTubeIE from .facebook import FacebookIE from .faz import FazIE from .fc2 import FC2IE from .firedrive import FiredriveIE from .firstpost import FirstpostIE from .firsttv import FirstTVIE from .fivemin import FiveMinIE from .fktv import ( FKTVIE, FKTVPosteckeIE, ) from .flickr import FlickrIE from .folketinget import FolketingetIE from .fourtube import FourTubeIE from .foxgay import FoxgayIE from .foxnews import FoxNewsIE from .franceculture import FranceCultureIE from .franceinter import FranceInterIE from .francetv import ( PluzzIE, FranceTvInfoIE, FranceTVIE, GenerationQuoiIE, CultureboxIE, ) from .freesound import FreesoundIE from .freespeech import FreespeechIE from .freevideo import FreeVideoIE from .funnyordie import FunnyOrDieIE from .gamekings import GamekingsIE from .gameone import ( GameOneIE, GameOnePlaylistIE, ) from .gamespot import GameSpotIE from .gamestar import GameStarIE from .gametrailers import GametrailersIE from .gdcvault import GDCVaultIE from .generic import GenericIE from .giantbomb import GiantBombIE from .giga import GigaIE from .glide import GlideIE from .globo import GloboIE from .godtube import GodTubeIE from .goldenmoustache import GoldenMoustacheIE from .golem import GolemIE from .googleplus import GooglePlusIE from .googlesearch import GoogleSearchIE from .gorillavid import GorillaVidIE from .goshgay import GoshgayIE from .grooveshark import GroovesharkIE from .groupon import GrouponIE from .hark import HarkIE from .hearthisat import HearThisAtIE from .heise import HeiseIE from .hellporno import HellPornoIE from .helsinki import HelsinkiIE from .hentaistigma import HentaiStigmaIE from .hitbox import HitboxIE, HitboxLiveIE from .hornbunny import HornBunnyIE from .hostingbulk import HostingBulkIE from .hotnewhiphop import HotNewHipHopIE from .howcast import HowcastIE from .howstuffworks import HowStuffWorksIE from .huffpost import HuffPostIE from .hypem import HypemIE from .iconosquare import IconosquareIE from .ign import IGNIE, OneUPIE from .imdb import ( ImdbIE, ImdbListIE ) from .ina import InaIE from .infoq import InfoQIE from .instagram import InstagramIE, InstagramUserIE from .internetvideoarchive import InternetVideoArchiveIE from .iprima import IPrimaIE<|fim▁hole|> IviCompilationIE ) from .izlesene import IzleseneIE from .jadorecettepub import JadoreCettePubIE from .jeuxvideo import JeuxVideoIE from .jove import JoveIE from .jukebox import JukeboxIE from .jpopsukitv import JpopsukiIE from .kankan import KankanIE from .karaoketv import KaraoketvIE from .keezmovies import KeezMoviesIE from .khanacademy import KhanAcademyIE from .kickstarter import KickStarterIE from .keek import KeekIE from .kontrtube import KontrTubeIE from .krasview import KrasViewIE from .ku6 import Ku6IE from .la7 import LA7IE from .laola1tv import Laola1TvIE from .lifenews import LifeNewsIE from .liveleak import LiveLeakIE from .livestream import ( LivestreamIE, LivestreamOriginalIE, LivestreamShortenerIE, ) from .lnkgo import LnkGoIE from .lrt import LRTIE from .lynda import ( LyndaIE, LyndaCourseIE ) from .m6 import M6IE from .macgamestore import MacGameStoreIE from .mailru import MailRuIE from .malemotion import MalemotionIE from .mdr import MDRIE from .metacafe import MetacafeIE from .metacritic import MetacriticIE from .mgoon import MgoonIE from .minhateca import MinhatecaIE from .ministrygrid import MinistryGridIE from .mit import TechTVMITIE, MITIE, OCWMITIE from .mitele import MiTeleIE from .mixcloud import MixcloudIE from .mlb import MLBIE from .mpora import MporaIE from .moevideo import MoeVideoIE from .mofosex import MofosexIE from .mojvideo import MojvideoIE from .moniker import MonikerIE from .mooshare import MooshareIE from .morningstar import MorningstarIE from .motherless import MotherlessIE from .motorsport import MotorsportIE from .movieclips import MovieClipsIE from .moviezine import MoviezineIE from .movshare import MovShareIE from .mtv import ( MTVIE, MTVServicesEmbeddedIE, MTVIggyIE, ) from .muenchentv import MuenchenTVIE from .musicplayon import MusicPlayOnIE from .musicvault import MusicVaultIE from .muzu import MuzuTVIE from .myspace import MySpaceIE, MySpaceAlbumIE from .myspass import MySpassIE from .myvideo import MyVideoIE from .myvidster import MyVidsterIE from .naver import NaverIE from .nba import NBAIE from .nbc import ( NBCIE, NBCNewsIE, ) from .ndr import NDRIE from .ndtv import NDTVIE from .netzkino import NetzkinoIE from .nerdcubed import NerdCubedFeedIE from .newgrounds import NewgroundsIE from .newstube import NewstubeIE from .nfb import NFBIE from .nfl import NFLIE from .nhl import NHLIE, NHLVideocenterIE from .niconico import NiconicoIE, NiconicoPlaylistIE from .ninegag import NineGagIE from .noco import NocoIE from .normalboots import NormalbootsIE from .nosvideo import NosVideoIE from .novamov import NovaMovIE from .nowness import NownessIE from .nowvideo import NowVideoIE from .npo import ( NPOIE, NPOLiveIE, TegenlichtVproIE, ) from .nrk import ( NRKIE, NRKTVIE, ) from .ntv import NTVIE from .nytimes import NYTimesIE from .nuvid import NuvidIE from .oktoberfesttv import OktoberfestTVIE from .ooyala import OoyalaIE from .openfilm import OpenFilmIE from .orf import ( ORFTVthekIE, ORFOE1IE, ORFFM4IE, ) from .parliamentliveuk import ParliamentLiveUKIE from .patreon import PatreonIE from .pbs import PBSIE from .phoenix import PhoenixIE from .photobucket import PhotobucketIE from .planetaplay import PlanetaPlayIE from .played import PlayedIE from .playfm import PlayFMIE from .playvid import PlayvidIE from .podomatic import PodomaticIE from .pornhd import PornHdIE from .pornhub import PornHubIE from .pornotube import PornotubeIE from .pornoxo import PornoXOIE from .promptfile import PromptFileIE from .prosiebensat1 import ProSiebenSat1IE from .pyvideo import PyvideoIE from .quickvid import QuickVidIE from .radiode import RadioDeIE from .radiobremen import RadioBremenIE from .radiofrance import RadioFranceIE from .rai import RaiIE from .rbmaradio import RBMARadioIE from .redtube import RedTubeIE from .restudy import RestudyIE from .reverbnation import ReverbNationIE from .ringtv import RingTVIE from .ro220 import Ro220IE from .rottentomatoes import RottenTomatoesIE from .roxwel import RoxwelIE from .rtbf import RTBFIE from .rte import RteIE from .rtlnl import RtlXlIE from .rtlnow import RTLnowIE from .rtl2 import RTL2IE from .rtp import RTPIE from .rts import RTSIE from .rtve import RTVEALaCartaIE, RTVELiveIE from .ruhd import RUHDIE from .rutube import ( RutubeIE, RutubeChannelIE, RutubeEmbedIE, RutubeMovieIE, RutubePersonIE, ) from .rutv import RUTVIE from .sapo import SapoIE from .savefrom import SaveFromIE from .sbs import SBSIE from .scivee import SciVeeIE from .screencast import ScreencastIE from .screencastomatic import ScreencastOMaticIE from .screenwavemedia import CinemassacreIE, ScreenwaveMediaIE, TeamFourIE from .servingsys import ServingSysIE from .sexu import SexuIE from .sexykarma import SexyKarmaIE from .shared import SharedIE from .sharesix import ShareSixIE from .sina import SinaIE from .slideshare import SlideshareIE from .slutload import SlutloadIE from .smotri import ( SmotriIE, SmotriCommunityIE, SmotriUserIE, SmotriBroadcastIE, ) from .snotr import SnotrIE from .sockshare import SockshareIE from .sohu import SohuIE from .soundcloud import ( SoundcloudIE, SoundcloudSetIE, SoundcloudUserIE, SoundcloudPlaylistIE ) from .soundgasm import SoundgasmIE from .southpark import ( SouthParkIE, SouthparkDeIE, ) from .space import SpaceIE from .spankwire import SpankwireIE from .spiegel import SpiegelIE, SpiegelArticleIE from .spiegeltv import SpiegeltvIE from .spike import SpikeIE from .sport5 import Sport5IE from .sportbox import SportBoxIE from .sportdeutschland import SportDeutschlandIE from .srmediathek import SRMediathekIE from .stanfordoc import StanfordOpenClassroomIE from .steam import SteamIE from .streamcloud import StreamcloudIE from .streamcz import StreamCZIE from .streetvoice import StreetVoiceIE from .sunporno import SunPornoIE from .swrmediathek import SWRMediathekIE from .syfy import SyfyIE from .sztvhu import SztvHuIE from .tagesschau import TagesschauIE from .tapely import TapelyIE from .tass import TassIE from .teachertube import ( TeacherTubeIE, TeacherTubeUserIE, ) from .teachingchannel import TeachingChannelIE from .teamcoco import TeamcocoIE from .techtalks import TechTalksIE from .ted import TEDIE from .telebruxelles import TeleBruxellesIE from .telecinco import TelecincoIE from .telemb import TeleMBIE from .teletask import TeleTaskIE from .tenplay import TenPlayIE from .testurl import TestURLIE from .testtube import TestTubeIE from .tf1 import TF1IE from .theonion import TheOnionIE from .theplatform import ThePlatformIE from .thesixtyone import TheSixtyOneIE from .thisav import ThisAVIE from .tinypic import TinyPicIE from .tlc import TlcIE, TlcDeIE from .tmz import TMZIE from .tnaflix import TNAFlixIE from .thvideo import ( THVideoIE, THVideoPlaylistIE ) from .toutv import TouTvIE from .toypics import ToypicsUserIE, ToypicsIE from .traileraddict import TrailerAddictIE from .trilulilu import TriluliluIE from .trutube import TruTubeIE from .tube8 import Tube8IE from .tudou import TudouIE from .tumblr import TumblrIE from .tunein import TuneInIE from .turbo import TurboIE from .tutv import TutvIE from .tvigle import TvigleIE from .tvp import TvpIE, TvpSeriesIE from .tvplay import TVPlayIE from .twentyfourvideo import TwentyFourVideoIE from .twitch import ( TwitchVideoIE, TwitchChapterIE, TwitchVodIE, TwitchProfileIE, TwitchPastBroadcastsIE, TwitchBookmarksIE, TwitchStreamIE, ) from .ubu import UbuIE from .udemy import ( UdemyIE, UdemyCourseIE ) from .unistra import UnistraIE from .urort import UrortIE from .ustream import UstreamIE, UstreamChannelIE from .vbox7 import Vbox7IE from .veehd import VeeHDIE from .veoh import VeohIE from .vesti import VestiIE from .vevo import VevoIE from .vgtv import VGTVIE from .vh1 import VH1IE from .vice import ViceIE from .viddler import ViddlerIE from .videobam import VideoBamIE from .videodetective import VideoDetectiveIE from .videolecturesnet import VideoLecturesNetIE from .videofyme import VideofyMeIE from .videomega import VideoMegaIE from .videopremium import VideoPremiumIE from .videott import VideoTtIE from .videoweed import VideoWeedIE from .vidme import VidmeIE from .vidzi import VidziIE from .vier import VierIE, VierVideosIE from .vimeo import ( VimeoIE, VimeoAlbumIE, VimeoChannelIE, VimeoGroupsIE, VimeoLikesIE, VimeoReviewIE, VimeoUserIE, VimeoWatchLaterIE, ) from .vimple import VimpleIE from .vine import ( VineIE, VineUserIE, ) from .viki import VikiIE from .vk import ( VKIE, VKUserVideosIE, ) from .vodlocker import VodlockerIE from .vporn import VpornIE from .vrt import VRTIE from .vube import VubeIE from .vuclip import VuClipIE from .vulture import VultureIE from .walla import WallaIE from .washingtonpost import WashingtonPostIE from .wat import WatIE from .wayofthemaster import WayOfTheMasterIE from .wdr import ( WDRIE, WDRMobileIE, WDRMausIE, ) from .webofstories import WebOfStoriesIE from .weibo import WeiboIE from .wimp import WimpIE from .wistia import WistiaIE from .worldstarhiphop import WorldStarHipHopIE from .wrzuta import WrzutaIE from .xbef import XBefIE from .xboxclips import XboxClipsIE from .xhamster import XHamsterIE from .xminus import XMinusIE from .xnxx import XNXXIE from .xvideos import XVideosIE from .xtube import XTubeUserIE, XTubeIE from .xxxymovies import XXXYMoviesIE from .yahoo import ( YahooIE, YahooSearchIE, ) from .yesjapan import YesJapanIE from .ynet import YnetIE from .youjizz import YouJizzIE from .youku import YoukuIE from .youporn import YouPornIE from .yourupload import YourUploadIE from .youtube import ( YoutubeIE, YoutubeChannelIE, YoutubeFavouritesIE, YoutubeHistoryIE, YoutubePlaylistIE, YoutubeRecommendedIE, YoutubeSearchDateIE, YoutubeSearchIE, YoutubeSearchURLIE, YoutubeShowIE, YoutubeSubscriptionsIE, YoutubeTruncatedIDIE, YoutubeTruncatedURLIE, YoutubeUserIE, YoutubeWatchLaterIE, ) from .zdf import ZDFIE, ZDFChannelIE from .zingmp3 import ( ZingMp3SongIE, ZingMp3AlbumIE, ) _ALL_CLASSES = [ klass for name, klass in globals().items() if name.endswith('IE') and name != 'GenericIE' ] _ALL_CLASSES.append(GenericIE) def gen_extractors(): """ Return a list of an instance of every supported extractor. The order does matter; the first extractor matched is the one handling the URL. """ return [klass() for klass in _ALL_CLASSES] def list_extractors(age_limit): """ Return a list of extractors that are suitable for the given age, sorted by extractor ID. """ return sorted( filter(lambda ie: ie.is_suitable(age_limit), gen_extractors()), key=lambda ie: ie.IE_NAME.lower()) def get_info_extractor(ie_name): """Returns the info extractor class with the given ie_name""" return globals()[ie_name + 'IE']<|fim▁end|>
from .ivi import ( IviIE,
<|file_name|>githubcron.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3.4 # -*- coding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os, sys, re, urllib, json, subprocess import time import urllib.request import smtplib from email.mime.text import MIMEText # Function for fetching JSON via HTTPS def getJSON(url, creds = None, cookie = None): headers = {} if creds and len(creds) > 0: xcreds = creds.encode(encoding='ascii', errors='replace') auth = base64.encodebytes(xcreds).decode('ascii', errors='replace').replace("\n", '') headers = {"Content-type": "application/json", "Accept": "*/*", "Authorization": "Basic %s" % auth } request = urllib.request.Request(url, headers = headers) result = urllib.request.urlopen(request) return json.loads(result.read().decode('utf-8', errors = 'replace')) # Get the current queue js = getJSON("https://reporeq.apache.org/queue.json") created = 0 # If queue is valid: if js: print("analysing %u items" % len(js)) # For each item: # - Check that it hasn't been mirrored yet # - Check that a repo with this name doesn't exist already # - Check that name is valid # - Mirror repo if all is okay for item in js: # Make sure this is a GH integration request AND it's been mirrored more than a day ago, so GH caught up. if not 'githubbed' in item and item['github'] == True and 'mirrordate' in item and item['mirrordate'] < (time.time()-86400): reponame = item['name'] # Check valid name if len(reponame) < 5 or reponame.find("..") != -1 or reponame.find("/") != -1: print("Invalid repo name!") continue # Set some vars notify = item['notify'] description = item['description'] if 'description' in item else "Unknown" # Make sure the repo exists! if os.path.exists("/x1/git/mirrors/%s" % reponame): print("%s is there, adding web hooks" % reponame) try: xreponame = reponame.replace(".git", "") # Cut off the .git part, so GH will not bork inp = subprocess.check_output("/usr/local/etc/git_self_serve/add-webhook.sh %s" % xreponame, shell = True).decode('ascii', 'replace') except subprocess.CalledProcessError as err: print("Borked: %s" % err.output) continue else: print("Repo doesn't exist, ignoring this request...sort of") # Notify reporeq that we've GH'ed this repository! print("Notifying https://reporeq.apache.org/ss.lua?githubbed=%s" % reponame) request = urllib.request.Request("https://reporeq.apache.org/ss.lua?githubbed=%s" % reponame) result = urllib.request.urlopen(request)<|fim▁hole|> msg['From'] = "[email protected]" msg['Reply-To'] = "[email protected]" msg['To'] = "[email protected], private@%s.apache.org" % item['pmc'] s = smtplib.SMTP(host='mail.apache.org', port=2025) s.send_message(msg) s.quit() # We made a thing! created += 1 print("All done for today! Made %u new repos" % created)<|fim▁end|>
# Inform infra@ and private@$pmc that the mirror has been set up msg = MIMEText("New repository %s has now had GitHub integration enabled!\n\nWith regards,\nApache Infrastructure." % (reponame)) msg['Subject'] = 'Github integration set up: %s' % reponame
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>mod utils; use wasm_bindgen::prelude::*; // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global // allocator. #[cfg(feature = "wee_alloc")] #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; use vt::VT;<|fim▁hole|>pub struct VtWrapper { vt: VT, } #[wasm_bindgen] pub fn create(w: usize, h: usize) -> VtWrapper { utils::set_panic_hook(); VtWrapper { vt: VT::new(w, h) } } #[wasm_bindgen] impl VtWrapper { pub fn feed(&mut self, s: &str) -> Vec<usize> { self.vt.feed_str(s) } pub fn inspect(&self) -> String { format!("{:?}", self.vt) } pub fn get_line(&self, l: usize) -> JsValue { let line = self.vt.get_line(l); serde_wasm_bindgen::to_value(&line).unwrap() } pub fn get_cursor(&self) -> JsValue { let cursor = self.vt.get_cursor(); serde_wasm_bindgen::to_value(&cursor).unwrap() } }<|fim▁end|>
#[wasm_bindgen]
<|file_name|>test_jail_code.py<|end_file_name|><|fim▁begin|>"""Test jail_code.py""" import os import shutil import sys import textwrap import tempfile import unittest from nose.plugins.skip import SkipTest from codejail.jail_code import jail_code, is_configured, Jail, configure, auto_configure auto_configure() def jailpy(code=None, *args, **kwargs): """Run `jail_code` on Python.""" if code: code = textwrap.dedent(code) result = jail_code("python", code, *args, **kwargs) if isinstance(result.stdout, bytes): result.stdout = result.stdout.decode() if isinstance(result.stderr, bytes): result.stderr = result.stderr.decode() return result def file_here(fname): """Return the full path to a file alongside this code.""" return os.path.join(os.path.dirname(__file__), fname) class JailCodeHelpers(unittest.TestCase): """Assert helpers for jail_code tests.""" def setUp(self): super(JailCodeHelpers, self).setUp() if not is_configured("python"): raise SkipTest def assertResultOk(self, res): """Assert that `res` exited well (0), and had no stderr output.""" self.assertEqual(res.stderr, "") self.assertEqual(res.status, 0) class TestFeatures(JailCodeHelpers): """Test features of how `jail_code` runs Python.""" def test_hello_world(self): res = jailpy(code="print('Hello, world!')") self.assertResultOk(res) self.assertEqual(res.stdout, 'Hello, world!\n') def test_argv(self):<|fim▁hole|> ) self.assertResultOk(res) self.assertEqual(res.stdout, "Hello:world:-x\n") def test_ends_with_exception(self): res = jailpy(code="""raise Exception('FAIL')""") self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "") self.assertEqual(res.stderr, textwrap.dedent("""\ Traceback (most recent call last): File "jailed_code", line 1, in <module> raise Exception('FAIL') Exception: FAIL """)) def test_stdin_is_provided(self): res = jailpy( code="import json,sys; print(sum(json.load(sys.stdin)))", stdin="[1, 2.5, 33]" ) self.assertResultOk(res) self.assertEqual(res.stdout, "36.5\n") def test_files_are_copied(self): res = jailpy( code="print('Look:', open('hello.txt').read())", files=[file_here("hello.txt")] ) self.assertResultOk(res) self.assertEqual(res.stdout, 'Look: Hello there.\n\n') def test_directories_are_copied(self): res = jailpy( code="""\ import os for path, dirs, files in os.walk("."): print((path, sorted(dirs), sorted(files))) """, files=[file_here("hello.txt"), file_here("pylib")] ) self.assertResultOk(res) self.assertIn("hello.txt", res.stdout) self.assertIn("pylib", res.stdout) self.assertIn("module.py", res.stdout) def test_executing_a_copied_file(self): res = jailpy( files=[file_here("doit.py")], argv=["doit.py", "1", "2", "3"] ) self.assertResultOk(res) self.assertEqual( res.stdout, "This is doit.py!\nMy args are ['doit.py', '1', '2', '3']\n" ) def test_context_managers(self): first = textwrap.dedent(""" with open("hello.txt", "w") as f: f.write("Hello, second") """) second = textwrap.dedent(""" with open("hello.txt") as f: print(f.read()) """) limits = {"TIME": 1, "MEMORY": 128*1024*1024, "CAN_FORK": True, "FILE_SIZE": 256} configure("unconfined_python", sys.prefix + "/bin/python", limits_conf=limits) with Jail() as j: res = j.run_code("unconfined_python", first) self.assertEqual(res.status, 0) res = j.run_code("python", second) self.assertEqual(res.status, 0) self.assertEqual(res.stdout.decode().strip(), "Hello, second") class TestLimits(JailCodeHelpers): """Tests of the resource limits, and changing them.""" def test_cant_use_too_much_memory(self): # This will fail after setting the limit to 30Mb. res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': 30000000}) self.assertEqual(res.stdout, "") self.assertNotEqual(res.status, 0) def test_changing_vmem_limit(self): # Up the limit, it will succeed. res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': 80000000}) self.assertEqual(res.stdout, "50000000\n") self.assertEqual(res.status, 0) def test_disabling_vmem_limit(self): # Disable the limit, it will succeed. res = jailpy(code="print(len(bytearray(50000000)))", limits={'MEMORY': None}) self.assertEqual(res.stdout, "50000000\n") self.assertEqual(res.status, 0) def test_cant_use_too_much_cpu(self): res = jailpy(code="print(sum(range(10**9)))") self.assertEqual(res.stdout, "") self.assertNotEqual(res.status, 0) self.assertTrue(res.time_limit_exceeded) def test_cant_use_too_much_time(self): # time limit is 5 * cpu_time res = jailpy(code="import time; time.sleep(7); print('Done!')", limits={'TIME': 1}) self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "") self.assertTrue(res.time_limit_exceeded) def test_cant_write_files(self): res = jailpy(code="""\ print("Trying") with open("mydata.txt", "w") as f: f.write("hello") with open("mydata.txt") as f2: print("Got this:", f2.read()) """) self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "Trying\n") self.assertIn("ermission denied", res.stderr) def test_cant_use_network(self): res = jailpy(code="""\ import urllib.request print("Reading google") u = urllib.request.urlopen("http://google.com") google = u.read() print(len(google)) """) self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "Reading google\n") self.assertIn("URLError", res.stderr) def test_cant_fork(self): res = jailpy(code="""\ import os print("Forking") child_ppid = os.fork() """) self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "Forking\n") self.assertIn("IOError", res.stderr) def test_cant_see_environment_variables(self): os.environ['HONEY_BOO_BOO'] = 'Look!' res = jailpy(code="""\ import os for name, value in os.environ.items(): print("%s: %r" % (name, value)) """) self.assertResultOk(res) self.assertNotIn("HONEY", res.stdout) def test_reading_dev_random(self): # We can read 10 bytes just fine. res = jailpy(code="x = open('/dev/random', 'rb').read(10); print(len(x))") self.assertResultOk(res) self.assertEqual(res.stdout, "10\n") # If we try to read all of it, we'll be killed by the real-time limit. res = jailpy(code="x = open('/dev/random').read(); print('Done!')") self.assertNotEqual(res.status, 0) class TestSymlinks(JailCodeHelpers): """Testing symlink behavior.""" def setUp(self): # Make a temp dir, and arrange to have it removed when done. tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) # Make a directory that won't be copied into the sandbox. self.not_copied = os.path.join(tmp_dir, "not_copied") os.mkdir(self.not_copied) self.linked_txt = os.path.join(self.not_copied, "linked.txt") with open(self.linked_txt, "w") as linked: linked.write("Hi!") # Make a directory that will be copied into the sandbox, with a # symlink to a file we aren't copying in. self.copied = os.path.join(tmp_dir, "copied") os.mkdir(self.copied) self.here_txt = os.path.join(self.copied, "here.txt") with open(self.here_txt, "w") as here: here.write("012345") self.link_txt = os.path.join(self.copied, "link.txt") os.symlink(self.linked_txt, self.link_txt) self.herelink_txt = os.path.join(self.copied, "herelink.txt") os.symlink("here.txt", self.herelink_txt) def test_symlinks_in_directories_wont_copy_data(self): # Run some code in the sandbox, with a copied directory containing # the symlink. res = jailpy( code="""\ print(open('copied/here.txt').read()) # can read print(open('copied/herelink.txt').read()) # can read print(open('copied/link.txt').read()) # can't read """, files=[self.copied], ) self.assertEqual(res.stdout, "012345\n012345\n") self.assertIn("ermission denied", res.stderr) def test_symlinks_wont_copy_data(self): # Run some code in the sandbox, with a copied file which is a symlink. res = jailpy( code="""\ print(open('here.txt').read()) # can read print(open('herelink.txt').read()) # can read print(open('link.txt').read()) # can't read """, files=[self.here_txt, self.herelink_txt, self.link_txt], ) self.assertEqual(res.stdout, "012345\n012345\n") self.assertIn("ermission denied", res.stderr) class TestMalware(JailCodeHelpers): """Tests that attempt actual malware against the interpreter or system.""" def test_crash_cpython(self): # http://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html res = jailpy(code="""\ import types, sys bad_code = types.CodeType(0,0,0,0,0,b"KABOOM",(),(),(),"","",0,b"") crash_me = types.FunctionType(bad_code, {}) print("Here we go...") sys.stdout.flush() crash_me() print("The afterlife!") """) self.assertNotEqual(res.status, 0) self.assertEqual(res.stderr, "") self.assertEqual(res.stdout, "Here we go...\n") def test_read_etc_passwd(self): res = jailpy(code="""\ bytes = len(open('/etc/passwd').read()) print('Gotcha', bytes) """) self.assertNotEqual(res.status, 0) self.assertEqual(res.stdout, "") self.assertIn("ermission denied", res.stderr) def test_find_other_sandboxes(self): res = jailpy(code=""" import os places = [ "..", "/tmp", "/", "/home", "/etc", "/var" ] for place in places: try: files = os.listdir(place) except Exception: # darn pass else: print("Files in %r: %r" % (place, files)) print("Done.") """) self.assertResultOk(res) self.assertEqual(res.stdout, "Done.\n")<|fim▁end|>
res = jailpy( code="import sys; print(':'.join(sys.argv[1:]))", argv=["Hello", "world", "-x"]
<|file_name|>test_star.py<|end_file_name|><|fim▁begin|>"""star subcommand tests""" # (c) 2015-2021 Wibowo Arindrarto <[email protected]> import json import pytest from click.testing import CliRunner from crimson.cli import main from .utils import get_test_path @pytest.fixture(scope="module") def star_fail(): runner = CliRunner() in_file = get_test_path("star_nope.txt") result = runner.invoke(main, ["star", in_file]) return result @pytest.fixture(scope="module") def star_v230_01(): runner = CliRunner() in_file = get_test_path("star_v230_01.txt") result = runner.invoke(main, ["star", in_file]) result.json = json.loads(result.output) return result @pytest.fixture(scope="module") def star_v230_02(): runner = CliRunner() in_file = get_test_path("star_v230_02.txt") result = runner.invoke(main, ["star", in_file]) result.json = json.loads(result.output) return result def test_star_fail_exit_code(star_fail): assert star_fail.exit_code != 0 def test_star_fail_output(star_fail): err_msg = "Unexpected file structure. No contents parsed." assert err_msg in star_fail.output @pytest.mark.parametrize( "attr, exp", [ ("avgDeletionLength", 1.36), ("avgInputLength", 98), ("avgInsertionLength", 1.21), ("avgMappedLength", 98.27), ("mappingSpeed", 403.16), ("nInput", 14782416), ("nMappedMultipleLoci", 1936775), ("nMappedTooManyLoci", 27644), ("nSplicesATAC", 2471), ("nSplicesAnnotated", 3780876), ("nSplicesGCAG", 22344), ("nSplicesGTAG", 3780050), ("nSplicesNonCanonical", 5148), ("nSplicesTotal", 3810013), ("nUniquelyMapped", 12347431), ("pctMappedMultipleLoci", 13.1), ("pctMappedTooManyLoci", 0.19), ("pctUniquelyMapped", 83.53), ("pctUnmappedForOther", 0.03),<|fim▁hole|> ("pctUnmappedForTooShort", 3.16), ("rateDeletionPerBase", 0.0), ("rateInsertionPerBase", 0.0), ("rateMismatchPerBase", 0.24), ("timeEnd", "Dec 11 19:01:56"), ("timeJobStart", "Dec 11 18:55:02"), ("timeMappingStart", "Dec 11 18:59:44"), ], ) def test_star_v230_01(star_v230_01, attr, exp): assert star_v230_01.json.get(attr) == exp, attr<|fim▁end|>
("pctUnmappedForTooManyMismatches", 0.0),
<|file_name|>lookup-international-basic.5.x.py<|end_file_name|><|fim▁begin|><|fim▁hole|> # Python 3 from urllib.parse import quote except ImportError: # Python 2 from urllib import quote # Your Account Sid and Auth Token from twilio.com/console account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" auth_token = "your_auth_token" client = TwilioLookupsClient(account_sid, auth_token) encoded_number = quote('+4402077651182') number = client.phone_numbers.get(encoded_number) print(number.national_format)<|fim▁end|>
# Download the Python helper library from twilio.com/docs/python/install from twilio.rest.lookups import TwilioLookupsClient try:
<|file_name|>samba_ip_test.go<|end_file_name|><|fim▁begin|>package cmd import ( "testing" gomock "github.com/golang/mock/gomock" "github.com/jamesnetherton/homehub-cli/service" ) func TestSambaIPCommand(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() hub := NewMockHub(ctrl) service.SetHub(hub)<|fim▁hole|> AssertCommandOutput(t, NewSambaIPCommand(NewLoginCommand())) }<|fim▁end|>
service.AuthenticationComplete() hub.EXPECT().SambaIP().Return("192.168.1.254", nil)
<|file_name|>option_manager.go<|end_file_name|><|fim▁begin|>/* Copyright (c) 2016 VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package object import ( "github.com/vmware/govmomi/vim25" "github.com/vmware/govmomi/vim25/methods" "github.com/vmware/govmomi/vim25/types" "golang.org/x/net/context" ) type OptionManager struct { Common } func NewOptionManager(c *vim25.Client, ref types.ManagedObjectReference) *OptionManager { return &OptionManager{ Common: NewCommon(c, ref), } } func (m OptionManager) Query(ctx context.Context, name string) ([]types.BaseOptionValue, error) { req := types.QueryOptions{ This: m.Reference(), Name: name, }<|fim▁hole|> if err != nil { return nil, err } return res.Returnval, nil } func (m OptionManager) Update(ctx context.Context, value []types.BaseOptionValue) error { req := types.UpdateOptions{ This: m.Reference(), ChangedValue: value, } _, err := methods.UpdateOptions(ctx, m.Client(), &req) return err }<|fim▁end|>
res, err := methods.QueryOptions(ctx, m.Client(), &req)
<|file_name|>fixtures.js<|end_file_name|><|fim▁begin|>module.exports = function() { return [{ "request": { "method": "GET", "url": "http://api.wordnik.com/v4/word.json/test/definitions", "params": { "limit":"1", "includeRelated":"true", "sourceDictionaries":"webster", "useCanonical":"true", "api_key":"a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5" } }, "response": { "code": 200, "data": [{ "textProns":[], "sourceDictionary":"gcide", "exampleUses":[], "relatedWords":[], "labels":[{"type":"fld","text":"(Metal.)"}], "citations": [ { "source":"Chaucer.", "cite":"Our ingots, tests, and many mo." } ], "word":"test", "text":[ "A cupel or cupelling hearth in which precious metals ", "are melted for trial and refinement." ].join(""), "sequence":"0", "score":0.0,<|fim▁hole|> "Dictionary of English"].join(""), "seqString":"1." }] } }, { "request": { "method": "GET", "url": "http://api.wordnik.com/v4/word.json/asd/definitions", "params": { "limit":"1", "includeRelated":"true", "sourceDictionaries":"webster", "useCanonical":"true", "api_key":"a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5" } }, "response": { "code": 200, "data": [{ "textProns":[], "sourceDictionary":"gcide", "exampleUses":[], "relatedWords":[ { "words":["added"], "gram":"imp. & p. p.", "relationshipType":"form"}, { "words":["adding"], "gram":"p. pr. & vb. n.", "relationshipType":"form"}, { "words":["added"], "gram":"imp. & p. p.", "relationshipType":"form"}, { "words":["adding"], "gram":"p. pr. & vb. n.", "relationshipType":"form"}], "labels":[], "citations": [ { "source":"Gen. xxx. 24.", "cite":"The Lord shall add to me another son." } ], "word":"add", "text":[ "To give by way of increased possession (to any one); to ", "bestow (on)." ].join(""), "sequence":"0", "score":0.0, "partOfSpeech":"verb-transitive", "attributionText": [ "from the GNU version of the Collaborative International ", "Dictionary of English"].join(""), "seqString":"1." }] } }, { "request": { "method": "GET", "url": "http://api.wordnik.com/v4/word.json/add/definitions", "params": { "limit":"1", "includeRelated":"true", "sourceDictionaries":"webster", "useCanonical":"true", "api_key":"a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5" } }, "response": { "code": 200, "data": [{ "word":"add", "text":[ "To give by way of increased possession (to any one); to ", "bestow (on)." ].join("") }] } }, { "request": { "method": "GET", "url": "http://api.wordnik.com/v4/word.json/notaword/definitions", "params": { "limit":"1", "includeRelated":"true", "sourceDictionaries":"webster", "useCanonical":"true", "api_key":"a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5" } }, "response": { "code": 200, "data": [] } }, { "request": { "method": "GET", "url": "http://api.wordnik.com/v4/word.json/longword/definitions", "params": { "limit":"1", "includeRelated":"true", "sourceDictionaries":"webster", "useCanonical":"true", "api_key":"a2a73e7b926c924fad7001ca3111acd55af2ffabf50eb4ae5" } }, "response": { "code": 200, "data": [{ "word":"longword", "text":[ "This is a definition that is very very very very very", " very very very very very very very very very very very", " very very very very very very very very very very very", " very very very very very very very very very very very", " very very very very very very very very very very long" ].join("") }] } }]; };<|fim▁end|>
"partOfSpeech":"noun", "attributionText": [ "from the GNU version of the Collaborative International ",
<|file_name|>parm.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Parameterized string expansion use std::char; use std::mem::replace; #[deriving(PartialEq)] enum States { Nothing, Percent, SetVar, GetVar, PushParam, CharConstant, CharClose, IntConstant(int), FormatPattern(Flags, FormatState), SeekIfElse(int), SeekIfElsePercent(int), SeekIfEnd(int), SeekIfEndPercent(int) } #[deriving(PartialEq)] enum FormatState { FormatStateFlags, FormatStateWidth, FormatStatePrecision } /// Types of parameters a capability can use #[allow(missing_doc)] #[deriving(Clone)] pub enum Param { String(String), Number(int) } /// Container for static and dynamic variable arrays pub struct Variables { /// Static variables A-Z sta: [Param, ..26], /// Dynamic variables a-z dyn: [Param, ..26] } impl Variables { /// Return a new zero-initialized Variables pub fn new() -> Variables { Variables { sta: [ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), ], dyn: [ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), ], } } } /** Expand a parameterized capability # Arguments * `cap` - string to expand * `params` - vector of params for %p1 etc * `vars` - Variables struct for %Pa etc To be compatible with ncurses, `vars` should be the same between calls to `expand` for multiple capabilities for the same terminal. */ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables) -> Result<Vec<u8> , String> { let mut state = Nothing; // expanded cap will only rarely be larger than the cap itself let mut output = Vec::with_capacity(cap.len()); let mut stack: Vec<Param> = Vec::new(); // Copy parameters into a local vector for mutability let mut mparams = [ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), ]; for (dst, src) in mparams.mut_iter().zip(params.iter()) { *dst = (*src).clone(); } for &c in cap.iter() { let cur = c as char; let mut old_state = state; match state { Nothing => { if cur == '%' { state = Percent; } else { output.push(c); } }, Percent => { match cur { '%' => { output.push(c); state = Nothing }, 'c' => if stack.len() > 0 { match stack.pop().unwrap() { // if c is 0, use 0200 (128) for ncurses compatibility Number(c) => { output.push(if c == 0 { 128u8 } else { c as u8<|fim▁hole|> _ => return Err("a non-char was used with %c".to_string()) } } else { return Err("stack is empty".to_string()) }, 'p' => state = PushParam, 'P' => state = SetVar, 'g' => state = GetVar, '\'' => state = CharConstant, '{' => state = IntConstant(0), 'l' => if stack.len() > 0 { match stack.pop().unwrap() { String(s) => stack.push(Number(s.len() as int)), _ => return Err("a non-str was used with %l".to_string()) } } else { return Err("stack is empty".to_string()) }, '+' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x + y)), _ => return Err("non-numbers on stack with +".to_string()) } } else { return Err("stack is empty".to_string()) }, '-' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x - y)), _ => return Err("non-numbers on stack with -".to_string()) } } else { return Err("stack is empty".to_string()) }, '*' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x * y)), _ => return Err("non-numbers on stack with *".to_string()) } } else { return Err("stack is empty".to_string()) }, '/' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x / y)), _ => return Err("non-numbers on stack with /".to_string()) } } else { return Err("stack is empty".to_string()) }, 'm' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x % y)), _ => return Err("non-numbers on stack with %".to_string()) } } else { return Err("stack is empty".to_string()) }, '&' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x & y)), _ => return Err("non-numbers on stack with &".to_string()) } } else { return Err("stack is empty".to_string()) }, '|' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x | y)), _ => return Err("non-numbers on stack with |".to_string()) } } else { return Err("stack is empty".to_string()) }, '^' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(x ^ y)), _ => return Err("non-numbers on stack with ^".to_string()) } } else { return Err("stack is empty".to_string()) }, '=' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(if x == y { 1 } else { 0 })), _ => return Err("non-numbers on stack with =".to_string()) } } else { return Err("stack is empty".to_string()) }, '>' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(if x > y { 1 } else { 0 })), _ => return Err("non-numbers on stack with >".to_string()) } } else { return Err("stack is empty".to_string()) }, '<' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(y), Number(x)) => stack.push(Number(if x < y { 1 } else { 0 })), _ => return Err("non-numbers on stack with <".to_string()) } } else { return Err("stack is empty".to_string()) }, 'A' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(0), Number(_)) => stack.push(Number(0)), (Number(_), Number(0)) => stack.push(Number(0)), (Number(_), Number(_)) => stack.push(Number(1)), _ => return Err("non-numbers on stack with logical and".to_string()) } } else { return Err("stack is empty".to_string()) }, 'O' => if stack.len() > 1 { match (stack.pop().unwrap(), stack.pop().unwrap()) { (Number(0), Number(0)) => stack.push(Number(0)), (Number(_), Number(_)) => stack.push(Number(1)), _ => return Err("non-numbers on stack with logical or".to_string()) } } else { return Err("stack is empty".to_string()) }, '!' => if stack.len() > 0 { match stack.pop().unwrap() { Number(0) => stack.push(Number(1)), Number(_) => stack.push(Number(0)), _ => return Err("non-number on stack with logical not".to_string()) } } else { return Err("stack is empty".to_string()) }, '~' => if stack.len() > 0 { match stack.pop().unwrap() { Number(x) => stack.push(Number(!x)), _ => return Err("non-number on stack with %~".to_string()) } } else { return Err("stack is empty".to_string()) }, 'i' => match (mparams[0].clone(), mparams[1].clone()) { (Number(x), Number(y)) => { mparams[0] = Number(x+1); mparams[1] = Number(y+1); }, (_, _) => return Err("first two params not numbers with %i".to_string()) }, // printf-style support for %doxXs 'd'|'o'|'x'|'X'|'s' => if stack.len() > 0 { let flags = Flags::new(); let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), flags); if res.is_err() { return res } output.push_all(res.unwrap().as_slice()) } else { return Err("stack is empty".to_string()) }, ':'|'#'|' '|'.'|'0'..'9' => { let mut flags = Flags::new(); let mut fstate = FormatStateFlags; match cur { ':' => (), '#' => flags.alternate = true, ' ' => flags.space = true, '.' => fstate = FormatStatePrecision, '0'..'9' => { flags.width = cur as uint - '0' as uint; fstate = FormatStateWidth; } _ => unreachable!() } state = FormatPattern(flags, fstate); } // conditionals '?' => (), 't' => if stack.len() > 0 { match stack.pop().unwrap() { Number(0) => state = SeekIfElse(0), Number(_) => (), _ => return Err("non-number on stack \ with conditional".to_string()) } } else { return Err("stack is empty".to_string()) }, 'e' => state = SeekIfEnd(0), ';' => (), _ => { return Err(format!("unrecognized format option {}", cur)) } } }, PushParam => { // params are 1-indexed stack.push(mparams[match char::to_digit(cur, 10) { Some(d) => d - 1, None => return Err("bad param number".to_string()) }].clone()); }, SetVar => { if cur >= 'A' && cur <= 'Z' { if stack.len() > 0 { let idx = (cur as u8) - ('A' as u8); vars.sta[idx as uint] = stack.pop().unwrap(); } else { return Err("stack is empty".to_string()) } } else if cur >= 'a' && cur <= 'z' { if stack.len() > 0 { let idx = (cur as u8) - ('a' as u8); vars.dyn[idx as uint] = stack.pop().unwrap(); } else { return Err("stack is empty".to_string()) } } else { return Err("bad variable name in %P".to_string()); } }, GetVar => { if cur >= 'A' && cur <= 'Z' { let idx = (cur as u8) - ('A' as u8); stack.push(vars.sta[idx as uint].clone()); } else if cur >= 'a' && cur <= 'z' { let idx = (cur as u8) - ('a' as u8); stack.push(vars.dyn[idx as uint].clone()); } else { return Err("bad variable name in %g".to_string()); } }, CharConstant => { stack.push(Number(c as int)); state = CharClose; }, CharClose => { if cur != '\'' { return Err("malformed character constant".to_string()); } }, IntConstant(i) => { match cur { '}' => { stack.push(Number(i)); state = Nothing; } '0'..'9' => { state = IntConstant(i*10 + (cur as int - '0' as int)); old_state = Nothing; } _ => return Err("bad int constant".to_string()) } } FormatPattern(ref mut flags, ref mut fstate) => { old_state = Nothing; match (*fstate, cur) { (_,'d')|(_,'o')|(_,'x')|(_,'X')|(_,'s') => if stack.len() > 0 { let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), *flags); if res.is_err() { return res } output.push_all(res.unwrap().as_slice()); // will cause state to go to Nothing old_state = FormatPattern(*flags, *fstate); } else { return Err("stack is empty".to_string()) }, (FormatStateFlags,'#') => { flags.alternate = true; } (FormatStateFlags,'-') => { flags.left = true; } (FormatStateFlags,'+') => { flags.sign = true; } (FormatStateFlags,' ') => { flags.space = true; } (FormatStateFlags,'0'..'9') => { flags.width = cur as uint - '0' as uint; *fstate = FormatStateWidth; } (FormatStateFlags,'.') => { *fstate = FormatStatePrecision; } (FormatStateWidth,'0'..'9') => { let old = flags.width; flags.width = flags.width * 10 + (cur as uint - '0' as uint); if flags.width < old { return Err("format width overflow".to_string()) } } (FormatStateWidth,'.') => { *fstate = FormatStatePrecision; } (FormatStatePrecision,'0'..'9') => { let old = flags.precision; flags.precision = flags.precision * 10 + (cur as uint - '0' as uint); if flags.precision < old { return Err("format precision overflow".to_string()) } } _ => return Err("invalid format specifier".to_string()) } } SeekIfElse(level) => { if cur == '%' { state = SeekIfElsePercent(level); } old_state = Nothing; } SeekIfElsePercent(level) => { if cur == ';' { if level == 0 { state = Nothing; } else { state = SeekIfElse(level-1); } } else if cur == 'e' && level == 0 { state = Nothing; } else if cur == '?' { state = SeekIfElse(level+1); } else { state = SeekIfElse(level); } } SeekIfEnd(level) => { if cur == '%' { state = SeekIfEndPercent(level); } old_state = Nothing; } SeekIfEndPercent(level) => { if cur == ';' { if level == 0 { state = Nothing; } else { state = SeekIfEnd(level-1); } } else if cur == '?' { state = SeekIfEnd(level+1); } else { state = SeekIfEnd(level); } } } if state == old_state { state = Nothing; } } Ok(output) } #[deriving(PartialEq)] struct Flags { width: uint, precision: uint, alternate: bool, left: bool, sign: bool, space: bool } impl Flags { fn new() -> Flags { Flags{ width: 0, precision: 0, alternate: false, left: false, sign: false, space: false } } } enum FormatOp { FormatDigit, FormatOctal, FormatHex, FormatHEX, FormatString } impl FormatOp { fn from_char(c: char) -> FormatOp { match c { 'd' => FormatDigit, 'o' => FormatOctal, 'x' => FormatHex, 'X' => FormatHEX, 's' => FormatString, _ => fail!("bad FormatOp char") } } fn to_char(self) -> char { match self { FormatDigit => 'd', FormatOctal => 'o', FormatHex => 'x', FormatHEX => 'X', FormatString => 's' } } } fn format(val: Param, op: FormatOp, flags: Flags) -> Result<Vec<u8> ,String> { let mut s = match val { Number(d) => { let s = match (op, flags.sign) { (FormatDigit, true) => format!("{:+d}", d).into_bytes(), (FormatDigit, false) => format!("{:d}", d).into_bytes(), (FormatOctal, _) => format!("{:o}", d).into_bytes(), (FormatHex, _) => format!("{:x}", d).into_bytes(), (FormatHEX, _) => format!("{:X}", d).into_bytes(), (FormatString, _) => { return Err("non-number on stack with %s".to_string()) } }; let mut s: Vec<u8> = s.move_iter().collect(); if flags.precision > s.len() { let mut s_ = Vec::with_capacity(flags.precision); let n = flags.precision - s.len(); s_.grow(n, &('0' as u8)); s_.push_all_move(s); s = s_; } assert!(!s.is_empty(), "string conversion produced empty result"); match op { FormatDigit => { if flags.space && !(s[0] == '-' as u8 || s[0] == '+' as u8) { s.insert(0, ' ' as u8); } } FormatOctal => { if flags.alternate && s[0] != '0' as u8 { s.insert(0, '0' as u8); } } FormatHex => { if flags.alternate { let s_ = replace(&mut s, vec!('0' as u8, 'x' as u8)); s.push_all_move(s_); } } FormatHEX => { s = s.as_slice() .to_ascii() .to_upper() .into_bytes() .move_iter() .collect(); if flags.alternate { let s_ = replace(&mut s, vec!('0' as u8, 'X' as u8)); s.push_all_move(s_); } } FormatString => unreachable!() } s } String(s) => { match op { FormatString => { let mut s = Vec::from_slice(s.as_bytes()); if flags.precision > 0 && flags.precision < s.len() { s.truncate(flags.precision); } s } _ => { return Err(format!("non-string on stack with %{}", op.to_char())) } } } }; if flags.width > s.len() { let n = flags.width - s.len(); if flags.left { s.grow(n, &(' ' as u8)); } else { let mut s_ = Vec::with_capacity(flags.width); s_.grow(n, &(' ' as u8)); s_.push_all_move(s); s = s_; } } Ok(s) } #[cfg(test)] mod test { use super::{expand,String,Variables,Number}; use std::result::Ok; #[test] fn test_basic_setabf() { let s = b"\\E[48;5;%p1%dm"; assert_eq!(expand(s, [Number(1)], &mut Variables::new()).unwrap(), "\\E[48;5;1m".bytes().collect()); } #[test] fn test_multiple_int_constants() { assert_eq!(expand(b"%{1}%{2}%d%d", [], &mut Variables::new()).unwrap(), "21".bytes().collect()); } #[test] fn test_op_i() { let mut vars = Variables::new(); assert_eq!(expand(b"%p1%d%p2%d%p3%d%i%p1%d%p2%d%p3%d", [Number(1),Number(2),Number(3)], &mut vars), Ok("123233".bytes().collect())); assert_eq!(expand(b"%p1%d%p2%d%i%p1%d%p2%d", [], &mut vars), Ok("0011".bytes().collect())); } #[test] fn test_param_stack_failure_conditions() { let mut varstruct = Variables::new(); let vars = &mut varstruct; let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"]; for cap in caps.iter() { let res = expand(cap.as_bytes(), [], vars); assert!(res.is_err(), "Op {} succeeded incorrectly with 0 stack entries", *cap); let p = if *cap == "%s" || *cap == "%l" { String("foo".to_string()) } else { Number(97) }; let res = expand("%p1".bytes().collect::<Vec<_>>() .append(cap.as_bytes()).as_slice(), [p], vars); assert!(res.is_ok(), "Op {} failed with 1 stack entry: {}", *cap, res.unwrap_err()); } let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; for cap in caps.iter() { let res = expand(cap.as_bytes(), [], vars); assert!(res.is_err(), "Binop {} succeeded incorrectly with 0 stack entries", *cap); let res = expand("%{1}".bytes().collect::<Vec<_>>() .append(cap.as_bytes()).as_slice(), [], vars); assert!(res.is_err(), "Binop {} succeeded incorrectly with 1 stack entry", *cap); let res = expand("%{1}%{2}".bytes().collect::<Vec<_>>() .append(cap.as_bytes()).as_slice(), [], vars); assert!(res.is_ok(), "Binop {} failed with 2 stack entries: {}", *cap, res.unwrap_err()); } } #[test] fn test_push_bad_param() { assert!(expand(b"%pa", [], &mut Variables::new()).is_err()); } #[test] fn test_comparison_ops() { let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])]; for &(op, bs) in v.iter() { let s = format!("%{{1}}%{{2}}%{}%d", op); let res = expand(s.as_bytes(), [], &mut Variables::new()); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec!('0' as u8 + bs[0])); let s = format!("%{{1}}%{{1}}%{}%d", op); let res = expand(s.as_bytes(), [], &mut Variables::new()); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec!('0' as u8 + bs[1])); let s = format!("%{{2}}%{{1}}%{}%d", op); let res = expand(s.as_bytes(), [], &mut Variables::new()); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec!('0' as u8 + bs[2])); } } #[test] fn test_conditionals() { let mut vars = Variables::new(); let s = b"\\E[%?%p1%{8}%<%t3%p1%d%e%p1%{16}%<%t9%p1%{8}%-%d%e38;5;%p1%d%;m"; let res = expand(s, [Number(1)], &mut vars); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[31m".bytes().collect()); let res = expand(s, [Number(8)], &mut vars); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[90m".bytes().collect()); let res = expand(s, [Number(42)], &mut vars); assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[38;5;42m".bytes().collect()); } #[test] fn test_format() { let mut varstruct = Variables::new(); let vars = &mut varstruct; assert_eq!(expand(b"%p1%s%p2%2s%p3%2s%p4%.2s", [String("foo".to_string()), String("foo".to_string()), String("f".to_string()), String("foo".to_string())], vars), Ok("foofoo ffo".bytes().collect())); assert_eq!(expand(b"%p1%:-4.2s", [String("foo".to_string())], vars), Ok("fo ".bytes().collect())); assert_eq!(expand(b"%p1%d%p1%.3d%p1%5d%p1%:+d", [Number(1)], vars), Ok("1001 1+1".bytes().collect())); assert_eq!(expand(b"%p1%o%p1%#o%p2%6.4x%p2%#6.4X", [Number(15), Number(27)], vars), Ok("17017 001b0X001B".bytes().collect())); } }<|fim▁end|>
}) }
<|file_name|>app.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from . import settings # Initialize Flask app from flask import Flask app = Flask('ggrc', instance_relative_config=True) app.config.from_object(settings)<|fim▁hole|>db.init_app(app) # Configure Flask-Login import ggrc.login ggrc.login.init_app(app) # Configure webassets for app from . import assets app.jinja_env.add_extension('webassets.ext.jinja2.assets') app.jinja_env.assets_environment = assets.environment # Configure Jinja2 extensions for app app.jinja_env.add_extension('jinja2.ext.autoescape') app.jinja_env.add_extension('jinja2.ext.with_') app.jinja_env.add_extension('hamlpy.ext.HamlPyExtension') # Initialize services import ggrc.services ggrc.services.init_all_services(app) # Initialize views import ggrc.views ggrc.views.init_all_object_views(app) # Initialize configured and default extensions from ggrc.fulltext import get_indexer ggrc.indexer = get_indexer() if settings.ENABLE_JASMINE: # Configure Flask-Jasmine, for dev mode unit testing from flask.ext.jasmine import Jasmine, Asset jasmine = Jasmine(app) jasmine.sources( Asset("dashboard-js"), Asset("dashboard-js-spec-helpers")) jasmine.specs( Asset("dashboard-js-specs"))<|fim▁end|>
# Configure Flask-SQLAlchemy for app from . import db db.app = app
<|file_name|>hotel_date_selection_type.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/ads/googleads/v1/enums/hotel_date_selection_type.proto package enums import ( fmt "fmt" math "math" proto "github.com/golang/protobuf/proto" _ "google.golang.org/genproto/googleapis/api/annotations" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Enum describing possible hotel date selection types. type HotelDateSelectionTypeEnum_HotelDateSelectionType int32 const ( // Not specified. HotelDateSelectionTypeEnum_UNSPECIFIED HotelDateSelectionTypeEnum_HotelDateSelectionType = 0 // Used for return value only. Represents value unknown in this version. HotelDateSelectionTypeEnum_UNKNOWN HotelDateSelectionTypeEnum_HotelDateSelectionType = 1 // Dates selected by default.<|fim▁hole|> HotelDateSelectionTypeEnum_DEFAULT_SELECTION HotelDateSelectionTypeEnum_HotelDateSelectionType = 50 // Dates selected by the user. HotelDateSelectionTypeEnum_USER_SELECTED HotelDateSelectionTypeEnum_HotelDateSelectionType = 51 ) var HotelDateSelectionTypeEnum_HotelDateSelectionType_name = map[int32]string{ 0: "UNSPECIFIED", 1: "UNKNOWN", 50: "DEFAULT_SELECTION", 51: "USER_SELECTED", } var HotelDateSelectionTypeEnum_HotelDateSelectionType_value = map[string]int32{ "UNSPECIFIED": 0, "UNKNOWN": 1, "DEFAULT_SELECTION": 50, "USER_SELECTED": 51, } func (x HotelDateSelectionTypeEnum_HotelDateSelectionType) String() string { return proto.EnumName(HotelDateSelectionTypeEnum_HotelDateSelectionType_name, int32(x)) } func (HotelDateSelectionTypeEnum_HotelDateSelectionType) EnumDescriptor() ([]byte, []int) { return fileDescriptor_df390d4eb95bf882, []int{0, 0} } // Container for enum describing possible hotel date selection types type HotelDateSelectionTypeEnum struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *HotelDateSelectionTypeEnum) Reset() { *m = HotelDateSelectionTypeEnum{} } func (m *HotelDateSelectionTypeEnum) String() string { return proto.CompactTextString(m) } func (*HotelDateSelectionTypeEnum) ProtoMessage() {} func (*HotelDateSelectionTypeEnum) Descriptor() ([]byte, []int) { return fileDescriptor_df390d4eb95bf882, []int{0} } func (m *HotelDateSelectionTypeEnum) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HotelDateSelectionTypeEnum.Unmarshal(m, b) } func (m *HotelDateSelectionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HotelDateSelectionTypeEnum.Marshal(b, m, deterministic) } func (m *HotelDateSelectionTypeEnum) XXX_Merge(src proto.Message) { xxx_messageInfo_HotelDateSelectionTypeEnum.Merge(m, src) } func (m *HotelDateSelectionTypeEnum) XXX_Size() int { return xxx_messageInfo_HotelDateSelectionTypeEnum.Size(m) } func (m *HotelDateSelectionTypeEnum) XXX_DiscardUnknown() { xxx_messageInfo_HotelDateSelectionTypeEnum.DiscardUnknown(m) } var xxx_messageInfo_HotelDateSelectionTypeEnum proto.InternalMessageInfo func init() { proto.RegisterEnum("google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum_HotelDateSelectionType", HotelDateSelectionTypeEnum_HotelDateSelectionType_name, HotelDateSelectionTypeEnum_HotelDateSelectionType_value) proto.RegisterType((*HotelDateSelectionTypeEnum)(nil), "google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum") } func init() { proto.RegisterFile("google/ads/googleads/v1/enums/hotel_date_selection_type.proto", fileDescriptor_df390d4eb95bf882) } var fileDescriptor_df390d4eb95bf882 = []byte{ // 329 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x4e, 0xc2, 0x40, 0x14, 0x14, 0x4c, 0x34, 0x59, 0x62, 0x2c, 0x4d, 0xf4, 0x80, 0x72, 0x80, 0x0f, 0xd8, 0xa6, 0x72, 0x5b, 0xe3, 0xa1, 0xd0, 0x05, 0x89, 0xa4, 0x10, 0x4b, 0x31, 0x31, 0x4d, 0xea, 0x4a, 0x37, 0x95, 0xa4, 0xec, 0x36, 0xec, 0x42, 0xc2, 0xc5, 0x8f, 0xf1, 0xe8, 0xa7, 0xf8, 0x29, 0x9e, 0xfc, 0x04, 0xb3, 0xbb, 0x94, 0x13, 0x7a, 0x79, 0x99, 0xbc, 0x79, 0x33, 0x79, 0x33, 0xe0, 0x2e, 0xe3, 0x3c, 0xcb, 0xa9, 0x43, 0x52, 0xe1, 0x18, 0xa8, 0xd0, 0xc6, 0x75, 0x28, 0x5b, 0x2f, 0x85, 0xf3, 0xc6, 0x25, 0xcd, 0x93, 0x94, 0x48, 0x9a, 0x08, 0x9a, 0xd3, 0xb9, 0x5c, 0x70, 0x96, 0xc8, 0x6d, 0x41, 0x61, 0xb1, 0xe2, 0x92, 0xdb, 0x4d, 0xa3, 0x81, 0x24, 0x15, 0x70, 0x2f, 0x87, 0x1b, 0x17, 0x6a, 0x79, 0xe3, 0xba, 0x74, 0x2f, 0x16, 0x0e, 0x61, 0x8c, 0x4b, 0xa2, 0x0c, 0x84, 0x11, 0xb7, 0xdf, 0x41, 0xe3, 0x5e, 0xf9, 0xfb, 0x44, 0xd2, 0xb0, 0x74, 0x9f, 0x6e, 0x0b, 0x8a, 0xd9, 0x7a, 0xd9, 0x7e, 0x01, 0x97, 0x87, 0x59, 0xfb, 0x1c, 0xd4, 0xa2, 0x20, 0x9c, 0xe0, 0xde, 0xb0, 0x3f, 0xc4, 0xbe, 0x75, 0x64, 0xd7, 0xc0, 0x69, 0x14, 0x3c, 0x04, 0xe3, 0xa7, 0xc0, 0xaa, 0xd8, 0x17, 0xa0, 0xee, 0xe3, 0xbe, 0x17, 0x8d, 0xa6, 0x49, 0x88, 0x47, 0xb8, 0x37, 0x1d, 0x8e, 0x03, 0xeb, 0xc6, 0xae, 0x83, 0xb3, 0x28, 0xc4, 0x8f, 0xbb, 0x1d, 0xf6, 0xad, 0x4e, 0xf7, 0xa7, 0x02, 0x5a, 0x73, 0xbe, 0x84, 0xff, 0x66, 0xe8, 0x5e, 0x1d, 0xfe, 0x62, 0xa2, 0x22, 0x4c, 0x2a, 0xcf, 0xdd, 0x9d, 0x3a, 0xe3, 0x39, 0x61, 0x19, 0xe4, 0xab, 0xcc, 0xc9, 0x28, 0xd3, 0x01, 0xcb, 0x42, 0x8b, 0x85, 0xf8, 0xa3, 0xdf, 0x5b, 0x3d, 0x3f, 0xaa, 0xc7, 0x03, 0xcf, 0xfb, 0xac, 0x36, 0x07, 0xc6, 0xca, 0x4b, 0x05, 0x34, 0x50, 0xa1, 0x99, 0x0b, 0x55, 0x1f, 0xe2, 0xab, 0xe4, 0x63, 0x2f, 0x15, 0xf1, 0x9e, 0x8f, 0x67, 0x6e, 0xac, 0xf9, 0xef, 0x6a, 0xcb, 0x2c, 0x11, 0xf2, 0x52, 0x81, 0xd0, 0xfe, 0x02, 0xa1, 0x99, 0x8b, 0x90, 0xbe, 0x79, 0x3d, 0xd1, 0x8f, 0x75, 0x7e, 0x03, 0x00, 0x00, 0xff, 0xff, 0x46, 0xaf, 0xfd, 0xab, 0xf7, 0x01, 0x00, 0x00, }<|fim▁end|>
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms from Crate.models import Discussion class ReportForm(forms.Form): report = forms.CharField(label="Enter your report", max_length=500, widget=forms.Textarea) class DiscussionForm(forms.ModelForm): class Meta:<|fim▁hole|><|fim▁end|>
model = Discussion fields = ['comment']
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub fn to_bytes(values: &[u32]) -> Vec<u8> { let mut idx = 0; let mut rb: u32; let mut per_stream_vec = Vec::new(); let mut per_byte_vec: Vec<u8> = Vec::new(); for value in values { rb = *value; loop { let mut b = rb & 0x7F; if idx == 0 { b &= 0x7F; } else { b |= 0x80; } per_byte_vec.push(b as u8); rb >>= 7; idx += 1; if rb == 0 { idx = 0; break; } } per_stream_vec.push(per_byte_vec); per_byte_vec = Vec::new(); } per_stream_vec.into_iter().flat_map(|val| val.into_iter().rev()).collect()<|fim▁hole|>pub fn from_bytes(bytes: &[u8]) -> Result<Vec<u32>, &'static str> { let mut v: Vec<u32> = Vec::new(); let mut no_bytes = 0; let mut u32_val = 0u32; let mut incomplete_byte_seq = false; for byte in bytes { if byte & 0x80 != 0 { incomplete_byte_seq = true; no_bytes += 1; u32_val |= (byte & 0x7F) as u32; u32_val <<= 7; } else { incomplete_byte_seq = false; no_bytes += 1; // check for overflow if no_bytes > 4 { return Err("value overflowed!"); } u32_val |= (byte & 0x7F) as u32; v.push(u32_val); u32_val = 0; no_bytes = 0; } } if incomplete_byte_seq { return Err("incomplete sequence!"); } Ok(v) }<|fim▁end|>
} /// Given a stream of bytes, extract all numbers which are encoded in there.
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod text; pub mod physical_body;<|fim▁end|>
pub mod actor; pub mod movement; pub mod logic_handler;
<|file_name|>comment.js<|end_file_name|><|fim▁begin|>define(['App', 'underscore', 'backbone', 'jquery'], function(App, _, Backbone, $) {<|fim▁hole|> data.childrenCount = data.children.length if (data.childrenCount == 1) { data.replyVerb = 'reply' } else { data.replyVerb = 'replies' } return data }, parse: function(data) { if (!data) { return } if (typeof data.data !== 'undefined') { //the data from getmorechildren comes in this format data.data.kind = data.kind data = data.data } if (data.kind === 'more') { return this.parseAsCommentMoreLink(data) } var timeAgo = moment.unix(data.created_utc).fromNow(true) //"true" removes the "ago" timeAgo = timeAgo.replace("in ", ''); //why would it add the word "in" data.timeAgo = timeAgo data.timeUgly = moment.unix(data.created_utc).format() data.timePretty = moment.unix(data.created_utc).format("ddd MMM DD HH:mm:ss YYYY") + " UTC" //format Sun Aug 18 12:51:06 2013 UTC //if the comment is edited format its last edited time if (typeof data.edited !== 'undefined' && data.edited !== false) { timeAgo = moment.unix(data.edited).fromNow(true) //"true" removes the "ago" timeAgo = timeAgo.replace("in ", ''); //why would it add the word "in" data.editedTimeAgo = timeAgo } data.score = +data.ups - parseInt(data.downs, 10) data.scoreUp = +data.score + 1 data.scoreDown = +data.score - 1 if (data.likes === null) { data.voted = 'unvoted' data.downmod = 'down' data.upmod = 'up' } else if (data.likes === true) { data.voted = "likes" data.downmod = 'down' data.upmod = 'upmod' } else { data.voted = "dislikes" data.downmod = 'downmod' data.upmod = 'up' } //for the user view we can have comments if (typeof data.thumbnail !== 'undefined' && data.thumbnail == 'self') { data.thumbnail = 'img/self.png' } else if (data.thumbnail == 'nsfw') { data.thumbnail = 'img/nsfw.png' } else if (data.thumbnail === '' || data.thumbnail == 'default') { data.thumbnail = 'img/notsure.png' } data.body_html = (typeof data.body_html === 'undefined') ? '' : $('<div/>').html(data.body_html).text(); var linkName = data.link_id.replace('t3_', '') data.permalink = '/r/' + data.subreddit + '/comments/' + linkName + "/L/" + data.id if (typeof data.replies !== "undefined" && data.replies !== null && typeof data.replies.data !== "undefined") { // data.replies = parseComments(data.replies.data, data.link_id) data.childrenCount = data.replies.data.length if (data.replies.length == 1) { data.childOrChildren = 'child' } else { data.childOrChildren = 'children' } } else { data.childOrChildren = 'children' data.childrenCount = 0 } return data } }); });<|fim▁end|>
return Backbone.Model.extend({ parseAsCommentMoreLink: function(data) { if (data.children.length === 0) return null
<|file_name|>RoundRobinMerge.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
from MergeIndependent import *
<|file_name|>observe.js<|end_file_name|><|fim▁begin|>/** * Librerías Javascript * * @package Roraima * @author $Author$ <[email protected]> * @version SVN: $Id$ * * @copyright Copyright 2007, Cide S.A. * @license http://opensource.org/licenses/gpl-2.0.php GPLv2 */ //FUNCIONES JAVASCRIPT var form = null; var arreglo = null; var inicio = true; var noActualizarInputs = false; function ActualizarInputs() { if(!noActualizarInputs){ form = $('sf_admin_edit_form'); arreglo = Array(); if(form) arreglo = $$('input[type=text]', 'select','textarea'); // -> only text inputs var i = 0; arreglo.each(function(e,index){ if(!e.disabled && !e.readOnly) { e.tabindex = index; i++; } }); if(arreglo & inicio) { try{arreglo.first().focus();}catch(e){} inicio=false; } } } /////////////////////////////////////////////////// // Observar si se cargado la p�gina por completo // /////////////////////////////////////////////////// Event.observe(window, 'load', function() { ActualizarInputs(); } ); /////////////////////////////////////////////////// // Observando si se presiona enter para cmabiar<|fim▁hole|> if(!noActualizarInputs){ var obj = Event.element(event); var indice = parseInt(obj.tabindex); /*arreglo.each(function(e,index){ if(e.name == obj.name) indice = index; }); */ var salir=false; var i=1; while(!salir) { try{ if(!arreglo[indice+i].disabled && !arreglo[indice+i].readOnly) { arreglo[indice+i].focus(); try{arreglo[indice+i].select();}catch(e){} salir=true; }else { i++; } }catch(e){ if(arreglo[indice]) if(!arreglo[indice].disabled && !arreglo[indice].readOnly) { arreglo[indice].blur(); arreglo[indice].focus(); //try{arreglo[indice].select();}catch(e){} } salir=true; } } obj.returnEvent = false; return false; /* var indexSig = parseInt(obj.tabindex); indexSig++; objSig = $$('input[tabindex=' + indexSig + ']'); if(objSig) objSig.focus(); */ } } return true })<|fim▁end|>
Event.observe(document, 'keypress', function(event) { if(event.keyCode == Event.KEY_RETURN && form) {
<|file_name|>archive_tarcopyoptions_unix.go<|end_file_name|><|fim▁begin|>// +build !windows package daemon import ( "github.com/docker/docker/container" "github.com/docker/docker/pkg/archive" "github.com/docker/docker/pkg/idtools" ) func (daemon *Daemon) tarCopyOptions(container *container.Container, noOverwriteDirNonDir bool) (*archive.TarOptions, error) { if container.Config.User == "" { return daemon.defaultTarCopyOptions(noOverwriteDirNonDir), nil<|fim▁hole|> } user, err := idtools.LookupUser(container.Config.User) if err != nil { return nil, err } return &archive.TarOptions{ NoOverwriteDirNonDir: noOverwriteDirNonDir, ChownOpts: &idtools.IDPair{UID: user.Uid, GID: user.Gid}, }, nil }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals<|fim▁hole|> import os.path from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='django-aloha-edit', version='0.4.0', description='Django Aloha Edit', author='Nathaniel Tucker', author_email='[email protected]', url='https://github.com/ntucker/django-aloha-edit', packages=find_packages(), include_package_data=True, install_requires=['django>=1.6', 'bleach>=1.4', 'lxml>=2.3', 'Pillow>=2.9.0', 'tinycss>=0.3'], long_description=read('README.rst'), license="BSD", classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ], )<|fim▁end|>
<|file_name|>download-test-data.js<|end_file_name|><|fim▁begin|>const fs = require(`fs`) const fetchData = require(`../fetch`) // Fetch data from our sample site and save it to disk. <|fim▁hole|>const typePrefix = `wordpress__` const refactoredEntityTypes = { post: `${typePrefix}POST`, page: `${typePrefix}PAGE`, tag: `${typePrefix}TAG`, category: `${typePrefix}CATEGORY`, } fetchData({ _verbose: false, _siteURL: `http://dev-gatbsyjswp.pantheonsite.io`, baseUrl: `dev-gatbsyjswp.pantheonsite.io`, _useACF: true, _hostingWPCOM: false, _perPage: 100, typePrefix, refactoredEntityTypes, }).then(data => { fs.writeFileSync( `${__dirname}/../__tests__/data.json`, JSON.stringify(data, null, 4) ) })<|fim▁end|>
<|file_name|>selinux_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os import selinux import tempfile import unittest import blivet from tests import loopbackedtestcase import blivet.formats.fs as fs from blivet.size import Size @unittest.skipUnless(selinux.is_selinux_enabled() == 1, "SELinux is disabled") class SELinuxContextTestCase(loopbackedtestcase.LoopBackedTestCase): """Testing SELinux contexts. """ def __init__(self, methodName='runTest'): super(SELinuxContextTestCase, self).__init__(methodName=methodName, deviceSpec=[Size("100 MiB")]) def testMountingExt2FS(self): """ Test that lost+found directory gets assigned correct SELinux context if installer_mode is True, and retains some random old context if installer_mode is False. """ LOST_AND_FOUND_CONTEXT = 'system_u:object_r:lost_found_t:s0' an_fs = fs.Ext2FS(device=self.loopDevices[0], label="test") self.assertIsNone(an_fs.create()) blivet.flags.installer_mode = False mountpoint = tempfile.mkdtemp("test.selinux") an_fs.mount(mountpoint=mountpoint) lost_and_found = os.path.join(mountpoint, "lost+found") self.assertTrue(os.path.exists(lost_and_found)) lost_and_found_selinux_context = selinux.getfilecon(lost_and_found) an_fs.unmount() os.rmdir(mountpoint) self.assertNotEqual(lost_and_found_selinux_context[1], LOST_AND_FOUND_CONTEXT) blivet.flags.installer_mode = True mountpoint = tempfile.mkdtemp("test.selinux") an_fs.mount(mountpoint=mountpoint) lost_and_found = os.path.join(mountpoint, "lost+found") self.assertTrue(os.path.exists(lost_and_found)) lost_and_found_selinux_context = selinux.getfilecon(lost_and_found) an_fs.unmount() os.rmdir(mountpoint) self.assertEqual(lost_and_found_selinux_context[1], LOST_AND_FOUND_CONTEXT) def testMountingXFS(self): """ XFS does not have a lost+found directory. """ an_fs = fs.XFS(device=self.loopDevices[0], label="test") self.assertIsNone(an_fs.create()) blivet.flags.installer_mode = False mountpoint = tempfile.mkdtemp("test.selinux") an_fs.mount(mountpoint=mountpoint) lost_and_found = os.path.join(mountpoint, "lost+found") self.assertFalse(os.path.exists(lost_and_found)) an_fs.unmount() os.rmdir(mountpoint) blivet.flags.installer_mode = True<|fim▁hole|> lost_and_found = os.path.join(mountpoint, "lost+found") self.assertFalse(os.path.exists(lost_and_found)) an_fs.unmount() os.rmdir(mountpoint) if __name__ == "__main__": unittest.main()<|fim▁end|>
mountpoint = tempfile.mkdtemp("test.selinux") an_fs.mount(mountpoint=mountpoint)
<|file_name|>tiled_layer.cc<|end_file_name|><|fim▁begin|>// Copyright 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/layers/tiled_layer.h" #include <algorithm> #include <vector> #include "base/auto_reset.h" #include "base/basictypes.h" #include "build/build_config.h" #include "cc/layers/layer_impl.h" #include "cc/layers/tiled_layer_impl.h" #include "cc/resources/layer_updater.h" #include "cc/resources/prioritized_resource.h" #include "cc/resources/priority_calculator.h" #include "cc/trees/layer_tree_host.h" #include "cc/trees/occlusion_tracker.h" #include "third_party/khronos/GLES2/gl2.h" #include "ui/gfx/rect_conversions.h" namespace cc { // Maximum predictive expansion of the visible area. static const int kMaxPredictiveTilesCount = 2; // Number of rows/columns of tiles to pre-paint. // We should increase these further as all textures are // prioritized and we insure performance doesn't suffer. static const int kPrepaintRows = 4; static const int kPrepaintColumns = 2; class UpdatableTile : public LayerTilingData::Tile { public: static scoped_ptr<UpdatableTile> Create( scoped_ptr<LayerUpdater::Resource> updater_resource) { return make_scoped_ptr(new UpdatableTile(updater_resource.Pass())); } LayerUpdater::Resource* updater_resource() { return updater_resource_.get(); } PrioritizedResource* managed_resource() { return updater_resource_->texture(); } bool is_dirty() const { return !dirty_rect.IsEmpty(); } // Reset update state for the current frame. This should occur before painting // for all layers. Since painting one layer can invalidate another layer after // it has already painted, mark all non-dirty tiles as valid before painting // such that invalidations during painting won't prevent them from being // pushed. void ResetUpdateState() { update_rect = gfx::Rect(); occluded = false; partial_update = false; valid_for_frame = !is_dirty(); } // This promises to update the tile and therefore also guarantees the tile // will be valid for this frame. dirty_rect is copied into update_rect so we // can continue to track re-entrant invalidations that occur during painting. void MarkForUpdate() { valid_for_frame = true; update_rect = dirty_rect; dirty_rect = gfx::Rect(); } gfx::Rect dirty_rect; gfx::Rect update_rect; bool partial_update; bool valid_for_frame; bool occluded; private: explicit UpdatableTile(scoped_ptr<LayerUpdater::Resource> updater_resource) : partial_update(false), valid_for_frame(false), occluded(false), updater_resource_(updater_resource.Pass()) {} scoped_ptr<LayerUpdater::Resource> updater_resource_; DISALLOW_COPY_AND_ASSIGN(UpdatableTile); }; TiledLayer::TiledLayer() : ContentsScalingLayer(), texture_format_(RGBA_8888), skips_draw_(false), failed_update_(false), tiling_option_(AUTO_TILE) { tiler_ = LayerTilingData::Create(gfx::Size(), LayerTilingData::HAS_BORDER_TEXELS); } TiledLayer::~TiledLayer() {} scoped_ptr<LayerImpl> TiledLayer::CreateLayerImpl(LayerTreeImpl* tree_impl) { return TiledLayerImpl::Create(tree_impl, id()).PassAs<LayerImpl>(); } void TiledLayer::UpdateTileSizeAndTilingOption() { DCHECK(layer_tree_host()); gfx::Size default_tile_size = layer_tree_host()->settings().default_tile_size; gfx::Size max_untiled_layer_size = layer_tree_host()->settings().max_untiled_layer_size; int layer_width = content_bounds().width(); int layer_height = content_bounds().height(); gfx::Size tile_size(std::min(default_tile_size.width(), layer_width), std::min(default_tile_size.height(), layer_height)); // Tile if both dimensions large, or any one dimension large and the other // extends into a second tile but the total layer area isn't larger than that // of the largest possible untiled layer. This heuristic allows for long // skinny layers (e.g. scrollbars) that are Nx1 tiles to minimize wasted // texture space but still avoids creating very large tiles. bool any_dimension_large = layer_width > max_untiled_layer_size.width() || layer_height > max_untiled_layer_size.height(); bool any_dimension_one_tile = (layer_width <= default_tile_size.width() || layer_height <= default_tile_size.height()) && (layer_width * layer_height) <= (max_untiled_layer_size.width() * max_untiled_layer_size.height()); bool auto_tiled = any_dimension_large && !any_dimension_one_tile; bool is_tiled; if (tiling_option_ == ALWAYS_TILE) is_tiled = true; else if (tiling_option_ == NEVER_TILE) is_tiled = false; else is_tiled = auto_tiled; gfx::Size requested_size = is_tiled ? tile_size : content_bounds(); const int max_size = layer_tree_host()->GetRendererCapabilities().max_texture_size; requested_size.SetToMin(gfx::Size(max_size, max_size)); SetTileSize(requested_size); } void TiledLayer::UpdateBounds() { gfx::Size old_tiling_size = tiler_->tiling_size(); gfx::Size new_tiling_size = content_bounds(); if (old_tiling_size == new_tiling_size) return; tiler_->SetTilingSize(new_tiling_size); // Invalidate any areas that the new bounds exposes. Region new_region = SubtractRegions(gfx::Rect(new_tiling_size), gfx::Rect(old_tiling_size)); for (Region::Iterator new_rects(new_region); new_rects.has_rect(); new_rects.next()) InvalidateContentRect(new_rects.rect()); UpdateDrawsContent(HasDrawableContent()); } void TiledLayer::SetTileSize(const gfx::Size& size) { tiler_->SetTileSize(size); UpdateDrawsContent(HasDrawableContent()); } void TiledLayer::SetBorderTexelOption( LayerTilingData::BorderTexelOption border_texel_option) { tiler_->SetBorderTexelOption(border_texel_option); UpdateDrawsContent(HasDrawableContent()); } bool TiledLayer::HasDrawableContent() const { bool has_more_than_one_tile = (tiler_->num_tiles_x() > 1) || (tiler_->num_tiles_y() > 1); return !(tiling_option_ == NEVER_TILE && has_more_than_one_tile) && ContentsScalingLayer::HasDrawableContent(); } void TiledLayer::ReduceMemoryUsage() { if (Updater()) Updater()->ReduceMemoryUsage(); } void TiledLayer::SetIsMask(bool is_mask) { set_tiling_option(is_mask ? NEVER_TILE : AUTO_TILE); } void TiledLayer::PushPropertiesTo(LayerImpl* layer) { ContentsScalingLayer::PushPropertiesTo(layer); TiledLayerImpl* tiled_layer = static_cast<TiledLayerImpl*>(layer); tiled_layer->set_skips_draw(skips_draw_); tiled_layer->SetTilingData(*tiler_); std::vector<UpdatableTile*> invalid_tiles; for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { int i = iter->first.first; int j = iter->first.second; UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; if (!tile->managed_resource()->have_backing_texture()) { // Evicted tiles get deleted from both layers invalid_tiles.push_back(tile); continue; } if (!tile->valid_for_frame) { // Invalidated tiles are set so they can get different debug colors. tiled_layer->PushInvalidTile(i, j); continue; } tiled_layer->PushTileProperties( i, j, tile->managed_resource()->resource_id(), tile->opaque_rect(), tile->managed_resource()->contents_swizzled()); } for (std::vector<UpdatableTile*>::const_iterator iter = invalid_tiles.begin(); iter != invalid_tiles.end(); ++iter) tiler_->TakeTile((*iter)->i(), (*iter)->j()); // TiledLayer must push properties every frame, since viewport state and // occlusion from anywhere in the tree can change what the layer decides to // push to the impl tree. needs_push_properties_ = true; } PrioritizedResourceManager* TiledLayer::ResourceManager() { if (!layer_tree_host()) return NULL; return layer_tree_host()->contents_texture_manager(); } const PrioritizedResource* TiledLayer::ResourceAtForTesting(int i, int j) const { UpdatableTile* tile = TileAt(i, j); if (!tile) return NULL; return tile->managed_resource(); } void TiledLayer::SetLayerTreeHost(LayerTreeHost* host) { if (host && host != layer_tree_host()) { for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; tile->managed_resource()->SetTextureManager( host->contents_texture_manager()); } } ContentsScalingLayer::SetLayerTreeHost(host); } UpdatableTile* TiledLayer::TileAt(int i, int j) const { return static_cast<UpdatableTile*>(tiler_->TileAt(i, j)); } UpdatableTile* TiledLayer::CreateTile(int i, int j) { CreateUpdaterIfNeeded(); scoped_ptr<UpdatableTile> tile( UpdatableTile::Create(Updater()->CreateResource(ResourceManager()))); tile->managed_resource()->SetDimensions(tiler_->tile_size(), texture_format_); UpdatableTile* added_tile = tile.get(); tiler_->AddTile(tile.PassAs<LayerTilingData::Tile>(), i, j); added_tile->dirty_rect = tiler_->TileRect(added_tile); // Temporary diagnostic crash. CHECK(added_tile); CHECK(TileAt(i, j)); return added_tile; } void TiledLayer::SetNeedsDisplayRect(const gfx::RectF& dirty_rect) { InvalidateContentRect(LayerRectToContentRect(dirty_rect)); ContentsScalingLayer::SetNeedsDisplayRect(dirty_rect); } void TiledLayer::InvalidateContentRect(const gfx::Rect& content_rect) { UpdateBounds(); if (tiler_->is_empty() || content_rect.IsEmpty() || skips_draw_) return; for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); DCHECK(tile); // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect bound = tiler_->TileRect(tile); bound.Intersect(content_rect); tile->dirty_rect.Union(bound); } } // Returns true if tile is dirty and only part of it needs to be updated. bool TiledLayer::TileOnlyNeedsPartialUpdate(UpdatableTile* tile) { return !tile->dirty_rect.Contains(tiler_->TileRect(tile)) && tile->managed_resource()->have_backing_texture(); } bool TiledLayer::UpdateTiles(int left, int top, int right, int bottom, ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion, bool* updated) { CreateUpdaterIfNeeded(); bool ignore_occlusions = !occlusion; if (!HaveTexturesForTiles(left, top, right, bottom, ignore_occlusions)) { failed_update_ = true; return false; } gfx::Rect update_rect; gfx::Rect paint_rect; MarkTilesForUpdate( &update_rect, &paint_rect, left, top, right, bottom, ignore_occlusions); if (paint_rect.IsEmpty()) return true; *updated = true; UpdateTileTextures( update_rect, paint_rect, left, top, right, bottom, queue, occlusion); return true; } void TiledLayer::MarkOcclusionsAndRequestTextures( int left, int top, int right, int bottom, const OcclusionTracker<Layer>* occlusion) { int occluded_tile_count = 0; bool succeeded = true; for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorities get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; // Did ResetUpdateState get skipped? Are we doing more than one occlusion // pass? DCHECK(!tile->occluded); gfx::Rect visible_tile_rect = gfx::IntersectRects( tiler_->tile_bounds(i, j), visible_content_rect()); if (!draw_transform_is_animating() && occlusion && occlusion->Occluded( render_target(), visible_tile_rect, draw_transform())) { tile->occluded = true; occluded_tile_count++; } else { succeeded &= tile->managed_resource()->RequestLate(); } } } } bool TiledLayer::HaveTexturesForTiles(int left, int top, int right, int bottom, bool ignore_occlusions) { for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; // Ensure the entire tile is dirty if we don't have the texture. if (!tile->managed_resource()->have_backing_texture()) tile->dirty_rect = tiler_->TileRect(tile); // If using occlusion and the visible region of the tile is occluded, // don't reserve a texture or update the tile. if (tile->occluded && !ignore_occlusions) continue; if (!tile->managed_resource()->can_acquire_backing_texture()) return false; } } return true; } void TiledLayer::MarkTilesForUpdate(gfx::Rect* update_rect, gfx::Rect* paint_rect, int left, int top, int right, int bottom, bool ignore_occlusions) { for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; if (tile->occluded && !ignore_occlusions) continue; // Prepare update rect from original dirty rects. update_rect->Union(tile->dirty_rect); // TODO(reveman): Decide if partial update should be allowed based on cost // of update. https://bugs.webkit.org/show_bug.cgi?id=77376 if (tile->is_dirty() && !layer_tree_host()->AlwaysUsePartialTextureUpdates()) { // If we get a partial update, we use the same texture, otherwise return // the current texture backing, so we don't update visible textures // non-atomically. If the current backing is in-use, it won't be // deleted until after the commit as the texture manager will not allow // deletion or recycling of in-use textures. if (TileOnlyNeedsPartialUpdate(tile) && layer_tree_host()->RequestPartialTextureUpdate()) { tile->partial_update = true; } else { tile->dirty_rect = tiler_->TileRect(tile); tile->managed_resource()->ReturnBackingTexture(); } } paint_rect->Union(tile->dirty_rect); tile->MarkForUpdate(); } } } void TiledLayer::UpdateTileTextures(const gfx::Rect& update_rect, const gfx::Rect& paint_rect, int left, int top, int right, int bottom, ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion) { // The update_rect should be in layer space. So we have to convert the // paint_rect from content space to layer space. float width_scale = paint_properties().bounds.width() / static_cast<float>(content_bounds().width()); float height_scale = paint_properties().bounds.height() / static_cast<float>(content_bounds().height()); update_rect_ = gfx::ScaleRect(update_rect, width_scale, height_scale); // Calling PrepareToUpdate() calls into WebKit to paint, which may have the // side effect of disabling compositing, which causes our reference to the // texture updater to be deleted. However, we can't free the memory backing // the SkCanvas until the paint finishes, so we grab a local reference here to // hold the updater alive until the paint completes. scoped_refptr<LayerUpdater> protector(Updater()); gfx::Rect painted_opaque_rect; Updater()->PrepareToUpdate(paint_rect, tiler_->tile_size(), 1.f / width_scale, 1.f / height_scale, &painted_opaque_rect); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorites get skipped? // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect tile_rect = tiler_->tile_bounds(i, j); // Use update_rect as the above loop copied the dirty rect for this frame // to update_rect. gfx::Rect dirty_rect = tile->update_rect; if (dirty_rect.IsEmpty()) continue; // Save what was painted opaque in the tile. Keep the old area if the // paint didn't touch it, and didn't paint some other part of the tile // opaque. gfx::Rect tile_painted_rect = gfx::IntersectRects(tile_rect, paint_rect); gfx::Rect tile_painted_opaque_rect = gfx::IntersectRects(tile_rect, painted_opaque_rect); if (!tile_painted_rect.IsEmpty()) { gfx::Rect paint_inside_tile_opaque_rect = gfx::IntersectRects(tile->opaque_rect(), tile_painted_rect); bool paint_inside_tile_opaque_rect_is_non_opaque = !paint_inside_tile_opaque_rect.IsEmpty() && !tile_painted_opaque_rect.Contains(paint_inside_tile_opaque_rect); bool opaque_paint_not_inside_tile_opaque_rect = !tile_painted_opaque_rect.IsEmpty() && !tile->opaque_rect().Contains(tile_painted_opaque_rect); if (paint_inside_tile_opaque_rect_is_non_opaque || opaque_paint_not_inside_tile_opaque_rect) tile->set_opaque_rect(tile_painted_opaque_rect); } // source_rect starts as a full-sized tile with border texels included. gfx::Rect source_rect = tiler_->TileRect(tile); source_rect.Intersect(dirty_rect); // Paint rect not guaranteed to line up on tile boundaries, so // make sure that source_rect doesn't extend outside of it. source_rect.Intersect(paint_rect); tile->update_rect = source_rect; if (source_rect.IsEmpty()) continue; const gfx::Point anchor = tiler_->TileRect(tile).origin(); // Calculate tile-space rectangle to upload into. gfx::Vector2d dest_offset = source_rect.origin() - anchor; CHECK_GE(dest_offset.x(), 0); CHECK_GE(dest_offset.y(), 0); // Offset from paint rectangle to this tile's dirty rectangle. gfx::Vector2d paint_offset = source_rect.origin() - paint_rect.origin(); CHECK_GE(paint_offset.x(), 0); CHECK_GE(paint_offset.y(), 0); CHECK_LE(paint_offset.x() + source_rect.width(), paint_rect.width()); CHECK_LE(paint_offset.y() + source_rect.height(), paint_rect.height()); tile->updater_resource()->Update( queue, source_rect, dest_offset, tile->partial_update); } } } // This picks a small animated layer to be anything less than one viewport. This // is specifically for page transitions which are viewport-sized layers. The // extra tile of padding is due to these layers being slightly larger than the // viewport in some cases. bool TiledLayer::IsSmallAnimatedLayer() const { if (!draw_transform_is_animating() && !screen_space_transform_is_animating()) return false; gfx::Size viewport_size = layer_tree_host() ? layer_tree_host()->device_viewport_size() : gfx::Size(); gfx::Rect content_rect(content_bounds()); return content_rect.width() <= viewport_size.width() + tiler_->tile_size().width() && content_rect.height() <= viewport_size.height() + tiler_->tile_size().height(); } namespace { // TODO(epenner): Remove this and make this based on distance once distance can // be calculated for offscreen layers. For now, prioritize all small animated // layers after 512 pixels of pre-painting. void SetPriorityForTexture(const gfx::Rect& visible_rect, const gfx::Rect& tile_rect, bool draws_to_root, bool is_small_animated_layer, PrioritizedResource* texture) { int priority = PriorityCalculator::LowestPriority(); if (!visible_rect.IsEmpty()) { priority = PriorityCalculator::PriorityFromDistance( visible_rect, tile_rect, draws_to_root); } if (is_small_animated_layer) { priority = PriorityCalculator::max_priority( priority, PriorityCalculator::SmallAnimatedLayerMinPriority()); } if (priority != PriorityCalculator::LowestPriority()) texture->set_request_priority(priority); } } // namespace void TiledLayer::SetTexturePriorities(const PriorityCalculator& priority_calc) { UpdateBounds(); ResetUpdateState(); UpdateScrollPrediction(); if (tiler_->has_empty_bounds()) return; bool draws_to_root = !render_target()->parent(); bool small_animated_layer = IsSmallAnimatedLayer(); // Minimally create the tiles in the desired pre-paint rect. gfx::Rect create_tiles_rect = IdlePaintRect(); if (small_animated_layer) create_tiles_rect = gfx::Rect(content_bounds()); if (!create_tiles_rect.IsEmpty()) { int left, top, right, bottom; tiler_->ContentRectToTileIndices( create_tiles_rect, &left, &top, &right, &bottom); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { if (!TileAt(i, j)) CreateTile(i, j); } } } // Now update priorities on all tiles we have in the layer, no matter where // they are. for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; gfx::Rect tile_rect = tiler_->TileRect(tile); SetPriorityForTexture(predicted_visible_rect_, tile_rect, draws_to_root, small_animated_layer, tile->managed_resource()); } } Region TiledLayer::VisibleContentOpaqueRegion() const { if (skips_draw_) return Region(); if (contents_opaque()) return visible_content_rect(); return tiler_->OpaqueRegionInContentRect(visible_content_rect()); } void TiledLayer::ResetUpdateState() { skips_draw_ = false; failed_update_ = false; LayerTilingData::TileMap::const_iterator end = tiler_->tiles().end(); for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != end; ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); // TODO(enne): This should not ever be null. if (!tile) continue; tile->ResetUpdateState(); } } namespace { gfx::Rect ExpandRectByDelta(const gfx::Rect& rect, const gfx::Vector2d& delta) { int width = rect.width() + std::abs(delta.x()); int height = rect.height() + std::abs(delta.y()); int x = rect.x() + ((delta.x() < 0) ? delta.x() : 0); int y = rect.y() + ((delta.y() < 0) ? delta.y() : 0); return gfx::Rect(x, y, width, height); } } void TiledLayer::UpdateScrollPrediction() { // This scroll prediction is very primitive and should be replaced by a // a recursive calculation on all layers which uses actual scroll/animation // velocities. To insure this doesn't miss-predict, we only use it to predict // the visible_rect if: // - content_bounds() hasn't changed. // - visible_rect.size() hasn't changed. // These two conditions prevent rotations, scales, pinch-zooms etc. where // the prediction would be incorrect. gfx::Vector2d delta = visible_content_rect().CenterPoint() - previous_visible_rect_.CenterPoint(); predicted_scroll_ = -delta; predicted_visible_rect_ = visible_content_rect(); if (previous_content_bounds_ == content_bounds() && previous_visible_rect_.size() == visible_content_rect().size()) { // Only expand the visible rect in the major scroll direction, to prevent // massive paints due to diagonal scrolls. gfx::Vector2d major_scroll_delta = (std::abs(delta.x()) > std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); predicted_visible_rect_ = ExpandRectByDelta(visible_content_rect(), major_scroll_delta); // Bound the prediction to prevent unbounded paints, and clamp to content // bounds. gfx::Rect bound = visible_content_rect(); bound.Inset(-tiler_->tile_size().width() * kMaxPredictiveTilesCount, -tiler_->tile_size().height() * kMaxPredictiveTilesCount); bound.Intersect(gfx::Rect(content_bounds())); predicted_visible_rect_.Intersect(bound); } previous_content_bounds_ = content_bounds(); previous_visible_rect_ = visible_content_rect(); } bool TiledLayer::Update(ResourceUpdateQueue* queue, const OcclusionTracker<Layer>* occlusion) { DCHECK(!skips_draw_ && !failed_update_); // Did ResetUpdateState get skipped? // Tiled layer always causes commits to wait for activation, as it does // not support pending trees. SetNextCommitWaitsForActivation(); bool updated = false; { base::AutoReset<bool> ignore_set_needs_commit(&ignore_set_needs_commit_, true); updated |= ContentsScalingLayer::Update(queue, occlusion); UpdateBounds(); } if (tiler_->has_empty_bounds() || !DrawsContent()) return false; // Animation pre-paint. If the layer is small, try to paint it all // immediately whether or not it is occluded, to avoid paint/upload // hiccups while it is animating. if (IsSmallAnimatedLayer()) { int left, top, right, bottom; tiler_->ContentRectToTileIndices(gfx::Rect(content_bounds()), &left, &top, &right, &bottom); UpdateTiles(left, top, right, bottom, queue, NULL, &updated); if (updated) return updated; // This was an attempt to paint the entire layer so if we fail it's okay, // just fallback on painting visible etc. below. failed_update_ = false; } if (predicted_visible_rect_.IsEmpty()) return updated; // Visible painting. First occlude visible tiles and paint the non-occluded // tiles. int left, top, right, bottom; tiler_->ContentRectToTileIndices( predicted_visible_rect_, &left, &top, &right, &bottom); MarkOcclusionsAndRequestTextures(left, top, right, bottom, occlusion); skips_draw_ = !UpdateTiles( left, top, right, bottom, queue, occlusion, &updated); if (skips_draw_) tiler_->reset(); if (skips_draw_ || updated) return true; // If we have already painting everything visible. Do some pre-painting while // idle. gfx::Rect idle_paint_content_rect = IdlePaintRect(); if (idle_paint_content_rect.IsEmpty()) return updated; // Prepaint anything that was occluded but inside the layer's visible region. if (!UpdateTiles(left, top, right, bottom, queue, NULL, &updated) || updated) return updated; int prepaint_left, prepaint_top, prepaint_right, prepaint_bottom; tiler_->ContentRectToTileIndices(idle_paint_content_rect, &prepaint_left, &prepaint_top, &prepaint_right, &prepaint_bottom); // Then expand outwards one row/column at a time until we find a dirty // row/column to update. Increment along the major and minor scroll directions // first. gfx::Vector2d delta = -predicted_scroll_; delta = gfx::Vector2d(delta.x() == 0 ? 1 : delta.x(), delta.y() == 0 ? 1 : delta.y()); gfx::Vector2d major_delta = (std::abs(delta.x()) > std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); gfx::Vector2d minor_delta = (std::abs(delta.x()) <= std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0) : gfx::Vector2d(0, delta.y()); gfx::Vector2d deltas[4] = { major_delta, minor_delta, -major_delta, -minor_delta }; for (int i = 0; i < 4; i++) { if (deltas[i].y() > 0) { while (bottom < prepaint_bottom) { ++bottom; if (!UpdateTiles( left, bottom, right, bottom, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].y() < 0) { while (top > prepaint_top) { --top; if (!UpdateTiles( left, top, right, top, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].x() < 0) { while (left > prepaint_left) { --left; if (!UpdateTiles( left, top, left, bottom, queue, NULL, &updated) || updated) return updated; } } if (deltas[i].x() > 0) { while (right < prepaint_right) { ++right; if (!UpdateTiles( right, top, right, bottom, queue, NULL, &updated) || updated) return updated; } } } return updated; } void TiledLayer::OnOutputSurfaceCreated() { // Ensure that all textures are of the right format. for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin(); iter != tiler_->tiles().end(); ++iter) { UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second); if (!tile) continue; PrioritizedResource* resource = tile->managed_resource(); resource->SetDimensions(resource->size(), texture_format_); } } bool TiledLayer::NeedsIdlePaint() { // Don't trigger more paints if we failed (as we'll just fail again). if (failed_update_ || visible_content_rect().IsEmpty() || tiler_->has_empty_bounds() || !DrawsContent()) return false; <|fim▁hole|> if (idle_paint_content_rect.IsEmpty()) return false; int left, top, right, bottom; tiler_->ContentRectToTileIndices( idle_paint_content_rect, &left, &top, &right, &bottom); for (int j = top; j <= bottom; ++j) { for (int i = left; i <= right; ++i) { UpdatableTile* tile = TileAt(i, j); DCHECK(tile); // Did SetTexturePriorities get skipped? if (!tile) continue; bool updated = !tile->update_rect.IsEmpty(); bool can_acquire = tile->managed_resource()->can_acquire_backing_texture(); bool dirty = tile->is_dirty() || !tile->managed_resource()->have_backing_texture(); if (!updated && can_acquire && dirty) return true; } } return false; } gfx::Rect TiledLayer::IdlePaintRect() { // Don't inflate an empty rect. if (visible_content_rect().IsEmpty()) return gfx::Rect(); gfx::Rect prepaint_rect = visible_content_rect(); prepaint_rect.Inset(-tiler_->tile_size().width() * kPrepaintColumns, -tiler_->tile_size().height() * kPrepaintRows); gfx::Rect content_rect(content_bounds()); prepaint_rect.Intersect(content_rect); return prepaint_rect; } } // namespace cc<|fim▁end|>
gfx::Rect idle_paint_content_rect = IdlePaintRect();
<|file_name|>bitcoin_cs.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="cs" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Legscoin</source> <translation>O Legscoinu</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Legscoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Legscoin&lt;/b&gt; verze</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Tohle je experimentální program. Šířen pod licencí MIT/X11, viz přiložený soubor COPYING nebo http://www.opensource.org/licenses/mit-license.php. Tento produkt zahrnuje programy vyvinuté OpenSSL Projektem pro použití v OpenSSL Toolkitu (http://www.openssl.org/) a kryptografický program od Erika Younga ([email protected]) a program UPnP od Thomase Bernarda.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The Legscoin developers</source> <translation>Vývojáři Legscoinu</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Adresář</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Dvojklikem myši začneš upravovat označení adresy</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Vytvoř novou adresu</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Zkopíruj aktuálně vybranou adresu do systémové schránky</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>Nová &amp;adresa</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Legscoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Tohle jsou tvé Legscoinové adresy pro příjem plateb. Můžeš dát pokaždé každému plátci novou adresu, abys věděl, kdo ti kdy kolik platil.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Kopíruj adresu</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Zobraz &amp;QR kód</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Legscoin address</source> <translation>Podepiš zprávu, čímž prokážeš, že jsi vlastníkem Legscoinové adresy</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Po&amp;depiš zprávu</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Smaž zvolenou adresu ze seznamu</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Exportuj data z tohoto panelu do souboru</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Export</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Legscoin address</source> <translation>Ověř zprávu, aby ses ujistil, že byla podepsána danou Legscoinovou adresou</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Ověř zprávu</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>S&amp;maž</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Legscoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Tohle jsou tvé Legscoinové adresy pro posílání plateb. Před odesláním mincí si vždy zkontroluj částku a cílovou adresu.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Kopíruj &amp;označení</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Uprav</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Pošli min&amp;ce</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exportuj data adresáře</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>CSV formát (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Chyba při exportu</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nemohu zapisovat do souboru %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Označení</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(bez označení)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Změna hesla</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Zadej platné heslo</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Zadej nové heslo</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Totéž heslo ještě jednou</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Zadej nové heslo k peněžence.&lt;br/&gt;Použij &lt;b&gt;alespoň 10 náhodných znaků&lt;/b&gt; nebo &lt;b&gt;alespoň osm slov&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Zašifruj peněženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla odemknout.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Odemkni peněženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla dešifrovat.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Dešifruj peněženku</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Změň heslo</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Zadej staré a nové heslo k peněžence.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Potvrď zašifrování peněženky</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LEGSCOINS&lt;/b&gt;!</source> <translation>Varování: Pokud si zašifruješ peněženku a ztratíš či zapomeneš heslo, &lt;b&gt;PŘIJDEŠ O VŠECHNY LEGSCOINY&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Jsi si jistý, že chceš peněženku zašifrovat?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>DŮLEŽITÉ: Všechny předchozí zálohy peněženky by měly být nahrazeny nově vygenerovanou, zašifrovanou peněženkou. Z bezpečnostních důvodů budou předchozí zálohy nešifrované peněženky nepoužitelné, jakmile začneš používat novou zašifrovanou peněženku.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Upozornění: Caps Lock je zapnutý!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Peněženka je zašifrována</translation> </message> <message> <location line="-56"/> <source>Legscoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your legscoins from being stolen by malware infecting your computer.</source> <translation>Legscoin se teď ukončí, aby dokončil zašifrování. Pamatuj však, že pouhé zašifrování peněženky úplně nezabraňuje krádeži tvých legscoinů malwarem, kterým se může počítač nakazit.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Zašifrování peněženky selhalo</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Zašifrování peněženky selhalo kvůli vnitřní chybě. Tvá peněženka tedy nebyla zašifrována.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Zadaná hesla nejsou shodná.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Odemčení peněženky selhalo</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Nezadal jsi správné heslo pro dešifrování peněženky.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Dešifrování peněženky selhalo</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Heslo k peněžence bylo v pořádku změněno.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Po&amp;depiš zprávu...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Synchronizuji se se sítí...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Přehled</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Zobraz celkový přehled peněženky</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transakce</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Procházej historii transakcí</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Uprav seznam uložených adres a jejich označení</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Zobraz seznam adres pro příjem plateb</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Konec</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Ukonči aplikaci</translation> </message> <message> <location line="+4"/> <source>Show information about Legscoin</source> <translation>Zobraz informace o Legscoinu</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>O &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Zobraz informace o Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Možnosti...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>Zaši&amp;fruj peněženku...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Zazálohuj peněženku...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Změň &amp;heslo...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Importuji bloky z disku...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Vytvářím nový index bloků na disku...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Legscoin address</source> <translation>Pošli mince na Legscoinovou adresu</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Legscoin</source> <translation>Uprav nastavení Legscoinu</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Zazálohuj peněženku na jiné místo</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Změň heslo k šifrování peněženky</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Ladicí okno</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Otevři ladicí a diagnostickou konzoli</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Ověř zprávu...</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Legscoin</source> <translation>Legscoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Peněženka</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Pošli</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>Při&amp;jmi</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Adresy</translation> </message> <message> <location line="+22"/> <source>&amp;About Legscoin</source> <translation>O &amp;Legscoinu</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Zobraz/Skryj</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Zobraz nebo skryj hlavní okno</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Zašifruj soukromé klíče ve své peněžence</translation> </message> <message> <location line="+7"/> <source>Sign messages with your Legscoin addresses to prove you own them</source> <translation>Podepiš zprávy svými Legscoinovými adresami, čímž prokážeš, že jsi jejich vlastníkem</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Legscoin addresses</source> <translation>Ověř zprávy, aby ses ujistil, že byly podepsány danými Legscoinovými adresami</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Soubor</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Nastavení</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>Ná&amp;pověda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Panel s listy</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Legscoin client</source> <translation>Legscoin klient</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Legscoin network</source> <translation><numerusform>%n aktivní spojení do Legscoinové sítě</numerusform><numerusform>%n aktivní spojení do Legscoinové sítě</numerusform><numerusform>%n aktivních spojení do Legscoinové sítě</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Není dostupný žádný zdroj bloků...</translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Zpracováno %1 z přibližně %2 bloků transakční historie.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Zpracováno %1 bloků transakční historie.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>hodinu</numerusform><numerusform>%n hodiny</numerusform><numerusform>%n hodin</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>den</numerusform><numerusform>%n dny</numerusform><numerusform>%n dnů</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>týden</numerusform><numerusform>%n týdny</numerusform><numerusform>%n týdnů</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>Stahuji ještě bloky transakcí za poslední %1</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Poslední stažený blok byl vygenerován %1 zpátky.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Následné transakce ještě nebudou vidět.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Chyba</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informace</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Tahle transakce přesahuje velikostní limit. I tak ji ale můžeš poslat, pokud za ni zaplatíš poplatek %1, který půjde uzlům, které tvou transakci zpracují, a navíc tak podpoříš síť. Chceš zaplatit poplatek?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Aktuální</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Stahuji...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Potvrď transakční poplatek</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Odeslané transakce</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Příchozí transakce</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1 Částka: %2 Typ: %3 Adresa: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Zpracování URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Legscoin address or malformed URI parameters.</source> <translation>Nepodařilo se analyzovat URI! Důvodem může být neplatná Legscoinová adresa nebo poškozené parametry URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Peněženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálně &lt;b&gt;odemčená&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Peněženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálně &lt;b&gt;zamčená&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Legscoin can no longer continue safely and will quit.</source> <translation>Stala se fatální chyba. Legscoin nemůže bezpečně pokračovat v činnosti, a proto skončí.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Upozornění sítě</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Uprav adresu</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Označení</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Označení spojené s tímto záznamem v adresáři</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adresa</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adresa spojená s tímto záznamem v adresáři. Lze upravovat jen pro odesílací adresy.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nová přijímací adresa</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nová odesílací adresa</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Uprav přijímací adresu</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Uprav odesílací adresu</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Zadaná adresa &quot;%1&quot; už v adresáři je.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Legscoin address.</source> <translation>Zadaná adresa &quot;%1&quot; není platná Legscoinová adresa.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Nemohu odemknout peněženku.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Nepodařilo se mi vygenerovat nový klíč.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Legscoin-Qt</source> <translation>Legscoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>verze</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Užití:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>možnosti příkazové řádky</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Možnosti UI</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Nastavit jazyk, například &quot;de_DE&quot; (výchozí: systémové nastavení)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Nastartovat minimalizovaně</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Zobrazit startovací obrazovku (výchozí: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Možnosti</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Hlavní</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Dobrovolný transakční poplatek za každý započatý kB dopomáhá k rychlému zpracování tvých transakcí. Většina transakcí má do 1 kB.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Platit &amp;transakční poplatek</translation> </message> <message> <location line="+31"/> <source>Automatically start Legscoin after logging in to the system.</source> <translation>Automaticky spustí Legscoin po přihlášení do systému.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Legscoin on system login</source> <translation>S&amp;pustit Legscoin po přihlášení do systému</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Vrátí všechny volby na výchozí hodnoty.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Obnovit nastavení</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Síť</translation> </message> <message> <location line="+6"/> <source>Automatically open the Legscoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automaticky otevře potřebný port na routeru. Tohle funguje jen za předpokladu, že tvůj router podporuje UPnP a že je UPnP povolené.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Namapovat port přes &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Legscoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Připojí se do Legscoinové sítě přes SOCKS proxy (např. když se připojuje přes Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Připojit přes SOCKS proxy:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP adresa proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP adresa proxy (např. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>Por&amp;t:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port proxy (např. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Verze SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Verze SOCKS proxy (např. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>O&amp;kno</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Po minimalizaci okna zobrazí pouze ikonu v panelu.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimalizovávat do ikony v panelu</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Zavřením se aplikace minimalizuje. Pokud je tato volba zaškrtnuta, tak se aplikace ukončí pouze zvolením Konec v menu.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Za&amp;vřením minimalizovat</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>Zobr&amp;azení</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>&amp;Jazyk uživatelského rozhraní:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Legscoin.</source> <translation>Tady lze nastavit jazyk uživatelského rozhraní. Nastavení se projeví až po restartování Legscoinu.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>J&amp;ednotka pro částky: </translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Zvol výchozí podjednotku, která se bude zobrazovat v programu a při posílání mincí.</translation> </message> <message> <location line="+9"/> <source>Whether to show Legscoin addresses in the transaction list or not.</source> <translation>Zda ukazovat legscoinové adresy ve výpisu transakcí nebo ne.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Ukazo&amp;vat adresy ve výpisu transakcí</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Budiž</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Zrušit</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Uložit</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>výchozí</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Potvrzení obnovení nastavení</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Některá nastavení mohou vyžadovat restart klienta, aby se mohly projevit.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Chceš pokračovat?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Legscoin.</source> <translation>Nastavení se projeví až po restartování Legscoinu.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Zadaná adresa proxy je neplatná.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulář</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Legscoin network after a connection is established, but this process has not completed yet.</source> <translation>Zobrazené informace nemusí být aktuální. Tvá peněženka se automaticky sesynchronizuje s Legscoinovou sítí, jakmile se s ní spojí. Zatím ale ještě není synchronizace dokončena.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Stav účtu:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Nepotvrzeno:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Peněženka</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Nedozráno:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Vytěžené mince, které ještě nejsou zralé</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Poslední transakce&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Aktuální stav tvého účtu</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Celkem z transakcí, které ještě nejsou potvrzené a které se ještě nezapočítávají do celkového stavu účtu</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>nesynchronizováno</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start legscoin: click-to-pay handler</source> <translation>Nemůžu spustit legscoin: obsluha click-to-pay</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR kód</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Požadovat platbu</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Částka:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Označení:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Zpráva:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Ulož jako...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Chyba při kódování URI do QR kódu.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Zadaná částka je neplatná, překontroluj ji prosím.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Výsledná URI je příliš dlouhá, zkus zkrátit text označení / zprávy.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Ulož QR kód</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG obrázky (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Název klienta</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Verze klienta</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informace</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Používaná verze OpenSSL</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Čas spuštění</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Síť</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Počet spojení</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>V testnetu</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Řetězec bloků</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Aktuální počet bloků</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Odhad celkového počtu bloků</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Čas posledního bloku</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Otevřít</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Argumenty z příkazové řádky</translation> </message> <message> <location line="+7"/> <source>Show the Legscoin-Qt help message to get a list with possible Legscoin command-line options.</source> <translation>Seznam parametrů Legscoinu pro příkazovou řádku získáš v nápovědě Legscoinu Qt.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Zobrazit</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konzole</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Datum kompilace</translation> </message> <message> <location line="-104"/> <source>Legscoin - Debug window</source> <translation>Legscoin - ladicí okno</translation> </message> <message> <location line="+25"/> <source>Legscoin Core</source> <translation>Jádro Legscoinu</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Soubor s ladicími záznamy</translation> </message> <message> <location line="+7"/> <source>Open the Legscoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Otevři soubor s ladicími záznamy Legscoinu z aktuálního datového adresáře. U velkých logů to může pár vteřin zabrat.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Vyčistit konzoli</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Legscoin RPC console.</source> <translation>Vítej v Legscoinové RPC konzoli.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>V historii se pohybuješ šipkami nahoru a dolů a pomocí &lt;b&gt;Ctrl-L&lt;/b&gt; čistíš obrazovku.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Napsáním &lt;b&gt;help&lt;/b&gt; si vypíšeš přehled dostupných příkazů.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Pošli mince</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Pošli více příjemcům naráz</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Při&amp;dej příjemce</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Smaž všechny transakční formuláře</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Všechno s&amp;maž</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Stav účtu:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Potvrď odeslání</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>P&amp;ošli</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; pro %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Potvrď odeslání mincí</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Jsi si jistý, že chceš poslat %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> a </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Adresa příjemce je neplatná, překontroluj ji prosím.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Odesílaná částka musí být větší než 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Částka překračuje stav účtu.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Celková částka při připočítání poplatku %1 překročí stav účtu.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Zaznamenána duplikovaná adresa; každá adresa může být v odesílané platbě pouze jednou.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Chyba: Vytvoření transakce selhalo!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Chyba: Transakce byla odmítnuta. Tohle může nastat, pokud nějaké mince z tvé peněženky už jednou byly utraceny, například pokud používáš kopii souboru wallet.dat a mince byly utraceny v druhé kopii, ale nebyly označeny jako utracené v této.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulář</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Čás&amp;tka:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Komu:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa příjemce (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Zadej označení této adresy; obojí se ti pak uloží do adresáře</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>O&amp;značení:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Vyber adresu z adresáře</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Vlož adresu ze schránky</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Smaž tohoto příjemce</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Legscoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Zadej Legscoinovou adresu (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Podpisy - podepsat/ověřit zprávu</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Podepiš zprávu</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Podepsáním zprávy svými adresami můžeš prokázat, že je skutečně vlastníš. Buď opatrný a nepodepisuj nic vágního; například při phishingových útocích můžeš být lákán, abys něco takového podepsal. Podepisuj pouze zcela úplná a detailní prohlášení, se kterými souhlasíš.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa, kterou se zpráva podepíše (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Vyber adresu z adresáře</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Vlož adresu ze schránky</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Sem vepiš zprávu, kterou chceš podepsat</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Podpis</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Zkopíruj aktuálně vybraný podpis do systémové schránky</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Legscoin address</source> <translation>Podepiš zprávu, čímž prokážeš, že jsi vlastníkem této Legscoinové adresy</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Po&amp;depiš zprávu</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Vymaž všechna pole formuláře pro podepsání zrávy</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Všechno &amp;smaž</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Ověř zprávu</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>K ověření podpisu zprávy zadej podepisující adresu, zprávu (ověř si, že správně kopíruješ zalomení řádků, mezery, tabulátory apod.) a podpis. Dávej pozor na to, abys nezkopíroval do podpisu víc, než co je v samotné podepsané zprávě, abys nebyl napálen man-in-the-middle útokem.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa, kterou je zpráva podepsána (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Legscoin address</source> <translation>Ověř zprávu, aby ses ujistil, že byla podepsána danou Legscoinovou adresou</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>O&amp;věř zprávu</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Vymaž všechna pole formuláře pro ověření zrávy</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Legscoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Zadej Legscoinovou adresu (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Kliknutím na &quot;Podepiš zprávu&quot; vygeneruješ podpis</translation> </message> <message> <location line="+3"/> <source>Enter Legscoin signature</source> <translation>Vlož Legscoinový podpis</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Zadaná adresa je neplatná.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Zkontroluj ji prosím a zkus to pak znovu.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>Zadaná adresa nepasuje ke klíči.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Odemčení peněženky bylo zrušeno.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Soukromý klíč pro zadanou adresu není dostupný.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Podepisování zprávy selhalo.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Zpráv podepsána.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Podpis nejde dekódovat.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Zkontroluj ho prosím a zkus to pak znovu.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Podpis se neshoduje s hašem zprávy.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ověřování zprávy selhalo.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Zpráva ověřena.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Legscoin developers</source> <translation>Vývojáři Legscoinu</translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Otřevřeno dokud %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotvrzeno</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potvrzení</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Stav</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, rozesláno přes 1 uzel</numerusform><numerusform>, rozesláno přes %n uzly</numerusform><numerusform>, rozesláno přes %n uzlů</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Zdroj</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Vygenerováno</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Od</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Pro</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>vlastní adresa</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>označení</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Příjem</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>dozraje po jednom bloku</numerusform><numerusform>dozraje po %n blocích</numerusform><numerusform>dozraje po %n blocích</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>neakceptováno</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Výdaj</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Transakční poplatek</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Čistá částka</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Zpráva</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Komentář</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID transakce</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Vygenerované mince musí čekat 120 bloků, než mohou být utraceny. Když jsi vygeneroval tenhle blok, tak byl rozposlán do sítě, aby byl přidán do řetězce bloků. Pokud se mu nepodaří dostat se do řetězce, změní se na &quot;neakceptovaný&quot; a nepůjde utratit. To se občas může stát, pokud jiný uzel vygeneruje blok zhruba ve stejném okamžiku jako ty.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Ladicí informace</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transakce</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Vstupy</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Částka</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>true</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>false</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, ještě nebylo rozesláno</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Otevřeno pro 1 další blok</numerusform><numerusform>Otevřeno pro %n další bloky</numerusform><numerusform>Otevřeno pro %n dalších bloků</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>neznámo</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detaily transakce</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Toto okno zobrazuje detailní popis transakce</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Částka</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Otevřeno pro 1 další blok</numerusform><numerusform>Otevřeno pro %n další bloky</numerusform><numerusform>Otevřeno pro %n dalších bloků</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Otřevřeno dokud %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 potvrzení)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepotvrzeno (%1 z %2 potvrzení)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Potvrzeno (%1 potvrzení)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po jednom bloku</numerusform><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po %n blocích</numerusform><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po %n blocích</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Tento blok nedostal žádný jiný uzel a pravděpodobně nebude akceptován!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Vygenerováno, ale neakceptováno</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Přijato do</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Přijato od</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Posláno na</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Platba sama sobě</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Vytěženo</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Stav transakce. Najetím myši na toto políčko si zobrazíš počet potvrzení.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum a čas přijetí transakce.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Druh transakce.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Cílová adresa transakce.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Částka odečtená z nebo přičtená k účtu.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Vše</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Dnes</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Tento týden</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Tento měsíc</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Minulý měsíc</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Letos</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rozsah...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Přijato</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Posláno</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Sám sobě</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Vytěženo</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Ostatní</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Zadej adresu nebo označení pro její vyhledání</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Minimální částka</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopíruj adresu</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopíruj její označení</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopíruj částku</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Kopíruj ID transakce</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Uprav označení</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Zobraz detaily transakce</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Exportuj transakční data</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>CSV formát (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Potvrzeno</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Označení</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Částka</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Chyba při exportu</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nemohu zapisovat do souboru %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rozsah:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>až</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Pošli mince</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation>&amp;Export</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Exportuj data z tohoto panelu do souboru</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Záloha peněženky</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Data peněženky (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Zálohování selhalo</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Při ukládání peněženky na nové místo se přihodila nějaká chyba.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Úspěšně zazálohováno</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Data z peněženky byla v pořádku uložena na nové místo.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Legscoin version</source> <translation>Verze Legscoinu</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Užití:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or legscoind</source> <translation>Poslat příkaz pro -server nebo legscoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Výpis příkazů</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Získat nápovědu pro příkaz</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Možnosti:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: legscoin.conf)</source> <translation>Konfigurační soubor (výchozí: legscoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: legscoind.pid)</source> <translation>PID soubor (výchozí: legscoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Adresář pro data</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Nastavit velikost databázové vyrovnávací paměti v megabajtech (výchozí: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Čekat na spojení na &lt;portu&gt; (výchozí: 9333 nebo testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Povolit nejvýše &lt;n&gt; připojení k uzlům (výchozí: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Připojit se k uzlu, získat adresy jeho protějšků a odpojit se</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Specifikuj svou veřejnou adresu</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Práh pro odpojování zlobivých uzlů (výchozí: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Doba ve vteřinách, po kterou se nebudou moci zlobivé uzly znovu připojit (výchozí: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Při nastavování naslouchacího RPC portu %i pro IPv4 nastala chyba: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Čekat na JSON RPC spojení na &lt;portu&gt; (výchozí: 9332 nebo testnet: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Akceptovat příkazy z příkazové řádky a přes JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Běžet na pozadí jako démon a akceptovat příkazy</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Použít testovací síť (testnet)</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Přijímat spojení zvenčí (výchozí: 1, pokud není zadáno -proxy nebo -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=legscoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Legscoin Alert&quot; [email protected] </source> <translation>%s, musíš nastavit rpcpassword v konfiguračním souboru: %s Je vhodné použít následující náhodné heslo: rpcuser=legscoinrpc rpcpassword=%s (není potřeba si ho pamatovat) rpcuser a rpcpassword NESMÍ být stejné. Pokud konfigurační soubor ještě neexistuje, vytvoř ho tak, aby ho mohl číst pouze vlastník. Je také doporučeno si nastavit alertnotify, abys byl upozorněn na případné problémy; například: alertnotify=echo %%s | mail -s &quot;Legscoin Alert&quot; [email protected] </translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Při nastavování naslouchacího RPC portu %u pro IPv6 nastala chyba, vracím se k IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Poslouchat na zadané adrese. Pro zápis IPv6 adresy použij notaci [adresa]:port</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Legscoin is probably already running.</source> <translation>Nedaří se mi získat zámek na datový adresář %s. Legscoin pravděpodobně už jednou běží.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Chyba: Transakce byla odmítnuta! Tohle může nastat, pokud nějaké mince z tvé peněženky už jednou byly utraceny, například pokud používáš kopii souboru wallet.dat a mince byly utraceny v druhé kopii, ale nebyly označeny jako utracené v této.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Chyba: Tahle transakce vyžaduje transakční poplatek nejméně %s kvůli velikosti zasílané částky, komplexnosti nebo použití nedávno přijatých mincí!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Spustit příkaz po přijetí relevantního hlášení (%s se v příkazu nahradí za zprávu)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Spustit příkaz, když se objeví transakce týkající se peněženky (%s se v příkazu nahradí za TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Nastavit maximální velikost prioritních/nízkopoplatkových transakcí v bajtech (výchozí: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Tohle je testovací verze – používej ji jen na vlastní riziko, ale rozhodně ji nepoužívej k těžbě nebo pro obchodní aplikace</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Upozornění: -paytxfee je nastaveno velmi vysoko! Toto je transakční poplatek, který zaplatíš za každou poslanou transakci.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Upozornění: Zobrazené transakce nemusí být správné! Možná potřebuješ aktualizovat nebo ostatní uzly potřebují aktualizovat.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Legscoin will not work properly.</source> <translation>Upozornění: Zkontroluj, že máš v počítači správně nastavený datum a čas! Pokud jsou nastaveny špatně, Legscoin nebude fungovat správně.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Upozornění: nastala chyba při čtení souboru wallet.dat! Všechny klíče se přečetly správně, ale data o transakcích nebo záznamy v adresáři mohou chybět či být nesprávné.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Upozornění: soubor wallet.dat je poškozený, data jsou však zachráněna! Původní soubor wallet.dat je uložený jako wallet.{timestamp}.bak v %s. Pokud je stav tvého účtu nebo transakce nesprávné, zřejmě bys měl obnovit zálohu.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Pokusit se zachránit soukromé klíče z poškozeného souboru wallet.dat</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Možnosti vytvoření bloku:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Připojit se pouze k zadanému uzlu (příp. zadaným uzlům)</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Bylo zjištěno poškození databáze bloků</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Zjistit vlastní IP adresu (výchozí: 1, pokud naslouchá a není zadáno -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Chceš přestavět databázi bloků hned teď?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Chyba při zakládání databáze bloků</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Chyba při vytváření databázového prostředí %s pro peněženku!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Chyba při načítání databáze bloků</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Chyba při otevírání databáze bloků</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Problém: Na disku je málo místa!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Chyba: Peněženka je zamčená, nemohu vytvořit transakci!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Chyba: systémová chyba: </translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Nepodařilo se naslouchat na žádném portu. Použij -listen=0, pokud to byl tvůj záměr.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Nepodařilo se přečíst informace o bloku</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Nepodařilo se přečíst blok</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Nepodařilo se sesynchronizovat index bloků</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Nepodařilo se zapsat index bloků</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Nepodařilo se zapsat informace o bloku</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Nepodařilo se zapsat blok</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Nepodařilo se zapsat informace o souboru</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Selhal zápis do databáze mincí</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Nepodařilo se zapsat index transakcí</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Nepodařilo se zapsat data o vracení změn</translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Hledat uzly přes DNS (výchozí: 1, pokud není zadáno -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Generovat mince (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Kolik bloků při startu zkontrolovat (výchozí: 288, 0 = všechny)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Jak moc důkladná má být verifikace bloků (0-4, výchozí: 3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation>Je nedostatek deskriptorů souborů.</translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Znovu vytvořit index řetězce bloků z aktuálních blk000??.dat souborů</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Nastavení počtu vláken pro servisní RPC volání (výchozí: 4)</translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Ověřuji bloky...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Kontroluji peněženku...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importovat bloky z externího souboru blk000??.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Nastavení počtu vláken pro verifikaci skriptů (max. 16, 0 = automaticky, &lt;0 = nechat daný počet jader volný, výchozí: 0)</translation> </message> <message> <location line="+77"/> <source>Information</source> <translation>Informace</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Neplatná -tor adresa: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -minrelaytxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -mintxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Spravovat úplný index transakcí (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Maximální velikost přijímacího bufferu pro každé spojení, &lt;n&gt;*1000 bajtů (výchozí: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Maximální velikost odesílacího bufferu pro každé spojení, &lt;n&gt;*1000 bajtů (výchozí: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Uznávat pouze řetěz bloků, který odpovídá vnitřním kontrolním bodům (výchozí: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Připojit se pouze k uzlům v &lt;net&gt; síti (IPv4, IPv6 nebo Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Tisknout speciální ladicí informace. Implikuje použití všech -debug* voleb</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Tisknout speciální ladicí informace o síti</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Připojit před ladicí výstup časové razítko</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Legscoin Wiki for SSL setup instructions)</source> <translation>Možnosti SSL: (viz instrukce nastavení SSL v Legscoin Wiki)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Zvol verzi socks proxy (4-5, výchozí: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Posílat stopovací/ladicí informace do konzole místo do souboru debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Posílat stopovací/ladicí informace do debuggeru</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Nastavit maximální velikost bloku v bajtech (výchozí: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Nastavit minimální velikost bloku v bajtech (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Při spuštění klienta zmenšit soubor debug.log (výchozí: 1, pokud není zadáno -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Podepisování transakce selhalo</translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Zadej časový limit spojení v milisekundách (výchozí: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Systémová chyba: </translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation>Částka v transakci je příliš malá</translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Částky v transakci musí být kladné</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Transace je příliš velká</translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Použít UPnP k namapování naslouchacího portu (výchozí: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Použít UPnP k namapování naslouchacího portu (výchozí: 1, pokud naslouchá)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Použít proxy k připojení ke skryté služby (výchozí: stejné jako -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Uživatelské jméno pro JSON-RPC spojení</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Upozornění: tahle verze je zastaralá, měl bys ji aktualizovat!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Je třeba přestavět databázi použitím -reindex, aby bylo možné změnit -txindex</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>Soubor wallet.dat je poškozen, jeho záchrana se nezdařila</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Heslo pro JSON-RPC spojení</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Povolit JSON-RPC spojení ze specifikované IP adresy</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Posílat příkazy uzlu běžícím na &lt;ip&gt; (výchozí: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Spustit příkaz, když se změní nejlepší blok (%s se v příkazu nahradí hashem bloku)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Převést peněženku na nejnovější formát</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Nastavit zásobník klíčů na velikost &lt;n&gt; (výchozí: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Přeskenovat řetězec bloků na chybějící transakce tvé pěněženky</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Použít OpenSSL (https) pro JSON-RPC spojení</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Soubor se serverovým certifikátem (výchozí: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Soubor se serverovým soukromým klíčem (výchozí: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Akceptovatelné šifry (výchozí: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Tato nápověda</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Nedaří se mi připojit na %s na tomhle počítači (operace bind vrátila chybu %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Připojit se přes socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Povolit DNS dotazy pro -addnode (přidání uzlu), -seednode a -connect (připojení)</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Načítám adresy...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Chyba při načítání wallet.dat: peněženka je poškozená</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Legscoin</source> <translation>Chyba při načítání wallet.dat: peněženka vyžaduje novější verzi Legscoinu</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Legscoin to complete</source> <translation>Soubor s peněženkou potřeboval přepsat: restartuj Legscoin, aby se operace dokončila</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Chyba při načítání wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Neplatná -proxy adresa: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>V -onlynet byla uvedena neznámá síť: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>V -socks byla požadována neznámá verze proxy: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Nemohu přeložit -bind adresu: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Nemohu přeložit -externalip adresu: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -paytxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Neplatná částka</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nedostatek prostředků</translation> </message> <message> <location line="+10"/><|fim▁hole|> <translation>Načítám index bloků...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Přidat uzel, ke kterému se připojit a snažit se spojení udržet</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Legscoin is probably already running.</source> <translation>Nedaří se mi připojit na %s na tomhle počítači. Legscoin už pravděpodobně jednou běží.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Poplatek za kB, který se přidá ke každé odeslané transakci</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Načítám peněženku...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Nemohu převést peněženku do staršího formátu</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Nemohu napsat výchozí adresu</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Přeskenovávám...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Načítání dokončeno</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>K použití volby %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Chyba</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Musíš nastavit rpcpassword=&lt;heslo&gt; v konfiguračním souboru: %s Pokud konfigurační soubor ještě neexistuje, vytvoř ho tak, aby ho mohl číst pouze vlastník.</translation> </message> </context> </TS><|fim▁end|>
<source>Loading block index...</source>
<|file_name|>subscriptions.js<|end_file_name|><|fim▁begin|>var icms = icms || {}; icms.subscriptions = (function ($) { this.active_link = {}; this.onDocumentReady = function () { this.setSubscribe(); $('.subscriber').on('click', function () { icms.subscriptions.active_link = this; icms.subscriptions.showLoader(); $.get($(this).attr('href'), $(this).data('target'), function(data){ icms.subscriptions.setResult(data); }, 'json'); return false; }); $('.subscribe_wrap > .count-subscribers').on('click', function () { if(+$(this).text() === 0){ return false; } var list_title = $(this).attr('title'); $.get($(this).data('list_link'), {}, function(data){ icms.modal.openHtml(data.html, list_title); }, 'json'); return false; });<|fim▁hole|> }; this.showLoader = function (){ $(icms.subscriptions.active_link).closest('.subscribe_wrap').find('.spinner').show(); }; this.hideLoader = function (){ $(icms.subscriptions.active_link).closest('.subscribe_wrap').find('.spinner').fadeOut(); }; this.setResult = function (data){ icms.subscriptions.hideLoader(); if(data.error){ alert('error'); return; } if(data.confirm_url){ icms.modal.openAjax(data.confirm_url, undefined, undefined, data.confirm_title); return; } if(data.confirm){ icms.modal.openHtml(data.confirm, data.confirm_title); return; } if(data.modal_close){ icms.modal.close(); } if(data.success_text){ icms.modal.alert(data.success_text); } $(icms.subscriptions.active_link).data('issubscribe', data.is_subscribe); icms.subscriptions.setSubscribe(icms.subscriptions.active_link); $(icms.subscriptions.active_link).parent().find('.count-subscribers').html(data.count); }; this.setSubscribe = function (link){ set = function (obj){ var is_subscribe = $(obj).data('issubscribe'); $('span', obj).html($(obj).data('text'+is_subscribe)); $(obj).attr('href', $(obj).data('link'+is_subscribe)); if(is_subscribe == 0){ $(obj).removeClass('unsubscribe').addClass('subscribe'); } else { $(obj).removeClass('subscribe').addClass('unsubscribe'); } }; if(link){ set(link); return; } $('.subscriber').each(function(indx){ set(this); }); }; return this; }).call(icms.subscriptions || {},jQuery); function successSubscribe(form_data, result){ icms.subscriptions.setResult(result); }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Unix-specific extension to the primitives in the `std::ffi` module. //! //! # Examples //! //! ``` //! use std::ffi::OsString; //! use std::os::unix::ffi::OsStringExt; //! //! let bytes = b"foo".to_vec(); //! //! // OsStringExt::from_vec //! let os_string = OsString::from_vec(bytes); //! assert_eq!(os_string.to_str(), Some("foo")); //! //! // OsStringExt::into_vec //! let bytes = os_string.into_vec(); //! assert_eq!(bytes, b"foo"); //! ``` //! //! ``` //! use std::ffi::OsStr; //! use std::os::unix::ffi::OsStrExt;<|fim▁hole|>//! let bytes = b"foo"; //! //! // OsStrExt::from_bytes //! let os_str = OsStr::from_bytes(bytes); //! assert_eq!(os_str.to_str(), Some("foo")); //! //! // OsStrExt::as_bytes //! let bytes = os_str.as_bytes(); //! assert_eq!(bytes, b"foo"); //! ``` #![stable(feature = "rust1", since = "1.0.0")] mod os_str; #[stable(feature = "rust1", since = "1.0.0")] pub use self::os_str::{OsStrExt, OsStringExt};<|fim▁end|>
//!
<|file_name|>xml_tools.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import pathlib from typing import Union import lxml.etree def save_as_xml( element_tree: Union[lxml.etree._Element, lxml.etree._ElementTree], filepath: Union[str, pathlib.Path], pretty_print: bool = True) -> None:<|fim▁hole|> Args: element_tree (lxml.etree._ElementTree): the ElementTree to be save. filepath (str, pathlib.Path): The path of the File to be output as XML. pretty_print (bool) optional: The Argument of lxml.etree.tostring. Defaults to True. """ if not isinstance(filepath, pathlib.Path): filepath = pathlib.Path(filepath) with filepath.open(mode='w', encoding='utf-8', newline='') as file: file.write(lxml.etree.tostring( element_tree, encoding='utf-8', pretty_print=pretty_print, xml_declaration=True).decode('utf-8'))<|fim▁end|>
"""save ElementTree in the file as XML
<|file_name|>magento_model.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Author: Guewen Baconnier # Copyright 2013 Camptocamp SA # Copyright 2013 Akretion # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging from datetime import datetime, timedelta from openerp.osv import fields, orm from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT from openerp.tools.translate import _ from openerp.addons.connector.session import ConnectorSession from openerp.addons.connector.connector import ConnectorUnit from openerp.addons.connector.unit.mapper import (mapping, only_create, ImportMapper ) from .unit.backend_adapter import GenericAdapter from .unit.import_synchronizer import (import_batch, DirectBatchImport, MagentoImportSynchronizer, AddCheckpoint, ) from .partner import partner_import_batch from .sale import sale_order_import_batch from .backend import magento from .connector import add_checkpoint _logger = logging.getLogger(__name__) IMPORT_DELTA_BUFFER = 30 # seconds class magento_backend(orm.Model): _name = 'magento.backend' _description = 'Magento Backend' _inherit = 'connector.backend' _backend_type = 'magento' def select_versions(self, cr, uid, context=None): """ Available versions in the backend. Can be inherited to add custom versions. Using this method to add a version from an ``_inherit`` does not constrain to redefine the ``version`` field in the ``_inherit`` model. """ return [('1.7', '1.7')] def _select_versions(self, cr, uid, context=None): """ Available versions in the backend. If you want to add a version, do not override this method, but ``select_version``. """ return self.select_versions(cr, uid, context=context) def _get_stock_field_id(self, cr, uid, context=None): field_ids = self.pool.get('ir.model.fields').search( cr, uid, [('model', '=', 'product.product'), ('name', '=', 'virtual_available')], context=context) return field_ids[0] _columns = { 'version': fields.selection( _select_versions, string='Version', required=True), 'location': fields.char( 'Location', required=True, help="Url to magento application"), 'admin_location': fields.char('Admin Location'), 'use_custom_api_path': fields.boolean( 'Custom Api Path', help="The default API path is '/index.php/api/xmlrpc'. " "Check this box if you use a custom API path, in that case, " "the location has to be completed with the custom API path "), 'username': fields.char( 'Username', help="Webservice user"), 'password': fields.char( 'Password', help="Webservice password"), 'use_auth_basic': fields.boolean( 'Use HTTP Auth Basic', help="Use a Basic Access Authentication for the API. " "The Magento server could be configured to restrict access " "using a HTTP authentication based on a username and " "a password."), 'auth_basic_username': fields.char( 'Basic Auth. Username', help="Basic access authentication web server side username"), 'auth_basic_password': fields.char( 'Basic Auth. Password', help="Basic access authentication web server side password"), 'sale_prefix': fields.char( 'Sale Prefix', help="A prefix put before the name of imported sales orders.\n" "For instance, if the prefix is 'mag-', the sales " "order 100000692 in Magento, will be named 'mag-100000692' " "in OpenERP."), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, help='Warehouse used to compute the ' 'stock quantities.'), 'website_ids': fields.one2many( 'magento.website', 'backend_id', string='Website', readonly=True), 'default_lang_id': fields.many2one( 'res.lang', 'Default Language', help="If a default language is selected, the records " "will be imported in the translation of this language.\n" "Note that a similar configuration exists " "for each storeview."), 'default_category_id': fields.many2one( 'product.category', string='Default Product Category', help='If a default category is selected, products imported ' 'without a category will be linked to it.'), # add a field `auto_activate` -> activate a cron 'import_products_from_date': fields.datetime( 'Import products from date'), 'import_categories_from_date': fields.datetime( 'Import categories from date'), 'catalog_price_tax_included': fields.boolean('Prices include tax'), 'product_stock_field_id': fields.many2one( 'ir.model.fields', string='Stock Field', domain="[('model', 'in', ['product.product', 'product.template'])," " ('ttype', '=', 'float')]", help="Choose the field of the product which will be used for " "stock inventory updates.\nIf empty, Quantity Available " "is used."), 'product_binding_ids': fields.one2many('magento.product.product', 'backend_id', string='Magento Products', readonly=True), } _defaults = { 'product_stock_field_id': _get_stock_field_id, 'use_custom_api_path': False, 'use_auth_basic': False, } _sql_constraints = [ ('sale_prefix_uniq', 'unique(sale_prefix)', "A backend with the same sale prefix already exists") ] def check_magento_structure(self, cr, uid, ids, context=None): """ Used in each data import. Verify if a website exists for each backend before starting the import. """ for backend_id in ids: website_ids = self.pool['magento.website'].search( cr, uid, [('backend_id', '=', backend_id)], context=context) if not website_ids: self.synchronize_metadata(cr, uid, backend_id, context=context) return True def synchronize_metadata(self, cr, uid, ids, context=None): if not hasattr(ids, '__iter__'): ids = [ids] session = ConnectorSession(cr, uid, context=context) for backend_id in ids: for model in ('magento.website', 'magento.store', 'magento.storeview'): # import directly, do not delay because this # is a fast operation, a direct return is fine # and it is simpler to import them sequentially import_batch(session, model, backend_id) return True def import_partners(self, cr, uid, ids, context=None): """ Import partners from all websites """ if not hasattr(ids, '__iter__'): ids = [ids] self.check_magento_structure(cr, uid, ids, context=context) for backend in self.browse(cr, uid, ids, context=context): for website in backend.website_ids: website.import_partners() return True def import_sale_orders(self, cr, uid, ids, context=None): """ Import sale orders from all store views """ if not hasattr(ids, '__iter__'): ids = [ids] storeview_obj = self.pool.get('magento.storeview') storeview_ids = storeview_obj.search(cr, uid, [('backend_id', 'in', ids)], context=context) storeviews = storeview_obj.browse(cr, uid, storeview_ids, context=context) for storeview in storeviews: storeview.import_sale_orders() return True def import_customer_groups(self, cr, uid, ids, context=None): if not hasattr(ids, '__iter__'): ids = [ids] self.check_magento_structure(cr, uid, ids, context=context) session = ConnectorSession(cr, uid, context=context) for backend_id in ids: import_batch.delay(session, 'magento.res.partner.category', backend_id) return True def _import_from_date(self, cr, uid, ids, model, from_date_field, context=None): if not hasattr(ids, '__iter__'): ids = [ids] self.check_magento_structure(cr, uid, ids, context=context) session = ConnectorSession(cr, uid, context=context) import_start_time = datetime.now() for backend in self.browse(cr, uid, ids, context=context): from_date = getattr(backend, from_date_field) if from_date: from_date = datetime.strptime(from_date, DEFAULT_SERVER_DATETIME_FORMAT) else: from_date = None import_batch.delay(session, model, backend.id, filters={'from_date': from_date}) # Records from Magento are imported based on their `created_at` # date. This date is set on Magento at the beginning of a # transaction, so if the import is run between the beginning and # the end of a transaction, the import of a record may be # missed. That's why we add a small buffer back in time where # the eventually missed records will be retrieved. This also # means that we'll have jobs that import twice the same records, # but this is not a big deal because they will be skipped when # the last `sync_date` is the same. next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER) next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.write(cr, uid, ids, {from_date_field: next_time}, context=context) def import_product_categories(self, cr, uid, ids, context=None): self._import_from_date(cr, uid, ids, 'magento.product.category', 'import_categories_from_date', context=context) return True def import_product_product(self, cr, uid, ids, context=None): self._import_from_date(cr, uid, ids, 'magento.product.product', 'import_products_from_date', context=context) return True def update_product_stock_qty(self, cr, uid, ids, context=None): if not hasattr(ids, '__iter__'): ids = [ids] mag_product_obj = self.pool.get('magento.product.product') product_ids = mag_product_obj.search(cr, uid, [('backend_id', 'in', ids), ('no_stock_sync', '=', False)], context=context) mag_product_obj.recompute_magento_qty(cr, uid, product_ids, context=context) return True def _magento_backend(self, cr, uid, callback, domain=None, context=None): if domain is None: domain = [] ids = self.search(cr, uid, domain, context=context) if ids: callback(cr, uid, ids, context=context) def _scheduler_import_sale_orders(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.import_sale_orders, domain=domain, context=context) def _scheduler_import_customer_groups(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.import_customer_groups, domain=domain, context=context) def _scheduler_import_partners(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.import_partners, domain=domain, context=context) def _scheduler_import_product_categories(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.import_product_categories, domain=domain, context=context) def _scheduler_import_product_product(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.import_product_product, domain=domain, context=context) def _scheduler_update_product_stock_qty(self, cr, uid, domain=None, context=None): self._magento_backend(cr, uid, self.update_product_stock_qty, domain=domain, context=context) def output_recorder(self, cr, uid, ids, context=None): """ Utility method to output a file containing all the recorded requests / responses with Magento. Used to generate test data. Should be called with ``erppeek`` for instance. """ from .unit.backend_adapter import output_recorder import os import tempfile fmt = '%Y-%m-%d-%H-%M-%S' timestamp = datetime.now().strftime(fmt) filename = 'output_%s_%s' % (cr.dbname, timestamp) path = os.path.join(tempfile.gettempdir(), filename) output_recorder(path) return path # TODO migrate from external.shop.group class magento_website(orm.Model): _name = 'magento.website' _inherit = 'magento.binding' _description = 'Magento Website' _order = 'sort_order ASC, id ASC' _columns = { 'name': fields.char('Name', required=True, readonly=True), 'code': fields.char('Code', readonly=True), 'sort_order': fields.integer('Sort Order', readonly=True), 'store_ids': fields.one2many( 'magento.store', 'website_id', string="Stores", readonly=True), 'import_partners_from_date': fields.datetime( 'Import partners from date'), 'product_binding_ids': fields.many2many('magento.product.product', string='Magento Products', readonly=True), } _sql_constraints = [ ('magento_uniq', 'unique(backend_id, magento_id)', 'A website with the same ID on Magento already exists.'), ] def import_partners(self, cr, uid, ids, context=None): if not hasattr(ids, '__iter__'): ids = [ids] session = ConnectorSession(cr, uid, context=context) import_start_time = datetime.now() for website in self.browse(cr, uid, ids, context=context): backend_id = website.backend_id.id if website.import_partners_from_date:<|fim▁hole|> website.import_partners_from_date, DEFAULT_SERVER_DATETIME_FORMAT) else: from_date = None partner_import_batch.delay( session, 'magento.res.partner', backend_id, {'magento_website_id': website.magento_id, 'from_date': from_date}) # Records from Magento are imported based on their `created_at` # date. This date is set on Magento at the beginning of a # transaction, so if the import is run between the beginning and # the end of a transaction, the import of a record may be # missed. That's why we add a small buffer back in time where # the eventually missed records will be retrieved. This also # means that we'll have jobs that import twice the same records, # but this is not a big deal because they will be skipped when # the last `sync_date` is the same. next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER) next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.write(cr, uid, ids, {'import_partners_from_date': next_time}, context=context) return True # TODO migrate from sale.shop (create a magento.store + associated # sale.shop) class magento_store(orm.Model): _name = 'magento.store' _inherit = 'magento.binding' _description = 'Magento Store' _inherits = {'sale.shop': 'openerp_id'} def _get_store_from_website(self, cr, uid, ids, context=None): store_obj = self.pool.get('magento.store') return store_obj.search(cr, uid, [('website_id', 'in', ids)], context=context) _columns = { 'website_id': fields.many2one( 'magento.website', 'Magento Website', required=True, readonly=True, ondelete='cascade'), 'openerp_id': fields.many2one( 'sale.shop', string='Sale Shop', required=True, readonly=True, ondelete='cascade'), 'backend_id': fields.related( 'website_id', 'backend_id', type='many2one', relation='magento.backend', string='Magento Backend', store={ 'magento.store': (lambda self, cr, uid, ids, c=None: ids, ['website_id'], 10), 'magento.website': (_get_store_from_website, ['backend_id'], 20), }, readonly=True), 'storeview_ids': fields.one2many( 'magento.storeview', 'store_id', string="Storeviews", readonly=True), 'send_picking_done_mail': fields.boolean( 'Send email notification on picking done', help="Does the picking export/creation should send " "an email notification on Magento side?"), 'send_invoice_paid_mail': fields.boolean( 'Send email notification on invoice validated/paid', help="Does the invoice export/creation should send " "an email notification on Magento side?"), 'create_invoice_on': fields.selection( [('open', 'Validate'), ('paid', 'Paid')], 'Create invoice on action', required=True, help="Should the invoice be created in Magento " "when it is validated or when it is paid in OpenERP?\n" "This only takes effect if the sales order's related " "payment method is not giving an option for this by " "itself. (See Payment Methods)"), } _defaults = { 'create_invoice_on': 'paid', } _sql_constraints = [ ('magento_uniq', 'unique(backend_id, magento_id)', 'A store with the same ID on Magento already exists.'), ] class sale_shop(orm.Model): _inherit = 'sale.shop' _columns = { 'magento_bind_ids': fields.one2many( 'magento.store', 'openerp_id', string='Magento Bindings', readonly=True), } def copy_data(self, cr, uid, id, default=None, context=None): if default is None: default = {} default['magento_bind_ids'] = False return super(sale_shop, self).copy_data(cr, uid, id, default=default, context=context) # TODO: migrate from magerp.storeviews class magento_storeview(orm.Model): _name = 'magento.storeview' _inherit = 'magento.binding' _description = "Magento Storeview" _order = 'sort_order ASC, id ASC' _columns = { 'name': fields.char('Name', required=True, readonly=True), 'code': fields.char('Code', readonly=True), 'enabled': fields.boolean('Enabled', readonly=True), 'sort_order': fields.integer('Sort Order', readonly=True), 'store_id': fields.many2one('magento.store', 'Store', ondelete='cascade', readonly=True), 'lang_id': fields.many2one('res.lang', 'Language'), 'backend_id': fields.related( 'store_id', 'website_id', 'backend_id', type='many2one', relation='magento.backend', string='Magento Backend', store=True, readonly=True), 'import_orders_from_date': fields.datetime( 'Import sale orders from date', help='do not consider non-imported sale orders before this date. ' 'Leave empty to import all sale orders'), 'no_sales_order_sync': fields.boolean( 'No Sales Order Synchronization', help='Check if the storeview is active in Magento ' 'but its sales orders should not be imported.'), } _defaults = { 'no_sales_order_sync': False, } _sql_constraints = [ ('magento_uniq', 'unique(backend_id, magento_id)', 'A storeview with same ID on Magento already exists.'), ] def import_sale_orders(self, cr, uid, ids, context=None): session = ConnectorSession(cr, uid, context=context) import_start_time = datetime.now() for storeview in self.browse(cr, uid, ids, context=context): if storeview.no_sales_order_sync: _logger.debug("The storeview '%s' is active in Magento " "but its sales orders should not be imported." % storeview.name) continue backend_id = storeview.backend_id.id if storeview.import_orders_from_date: from_date = datetime.strptime( storeview.import_orders_from_date, DEFAULT_SERVER_DATETIME_FORMAT) else: from_date = None sale_order_import_batch.delay( session, 'magento.sale.order', backend_id, {'magento_storeview_id': storeview.magento_id, 'from_date': from_date}, priority=1) # executed as soon as possible # Records from Magento are imported based on their `created_at` # date. This date is set on Magento at the beginning of a # transaction, so if the import is run between the beginning and # the end of a transaction, the import of a record may be # missed. That's why we add a small buffer back in time where # the eventually missed records will be retrieved. This also # means that we'll have jobs that import twice the same records, # but this is not a big deal because the sales orders will be # imported the first time and the jobs will be skipped on the # subsequent imports next_time = import_start_time - timedelta(seconds=IMPORT_DELTA_BUFFER) next_time = next_time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.write(cr, uid, ids, {'import_orders_from_date': next_time}, context=context) return True @magento class WebsiteAdapter(GenericAdapter): _model_name = 'magento.website' _magento_model = 'ol_websites' _admin_path = 'system_store/editWebsite/website_id/{id}' @magento class StoreAdapter(GenericAdapter): _model_name = 'magento.store' _magento_model = 'ol_groups' _admin_path = 'system_store/editGroup/group_id/{id}' @magento class StoreviewAdapter(GenericAdapter): _model_name = 'magento.storeview' _magento_model = 'ol_storeviews' _admin_path = 'system_store/editStore/store_id/{id}' @magento class MetadataBatchImport(DirectBatchImport): """ Import the records directly, without delaying the jobs. Import the Magento Websites, Stores, Storeviews They are imported directly because this is a rare and fast operation, and we don't really bother if it blocks the UI during this time. (that's also a mean to rapidly check the connectivity with Magento). """ _model_name = [ 'magento.website', 'magento.store', 'magento.storeview', ] @magento class WebsiteImportMapper(ImportMapper): _model_name = 'magento.website' direct = [('code', 'code'), ('sort_order', 'sort_order')] @mapping def name(self, record): name = record['name'] if name is None: name = _('Undefined') return {'name': name} @mapping def backend_id(self, record): return {'backend_id': self.backend_record.id} @magento class StoreImportMapper(ImportMapper): _model_name = 'magento.store' direct = [('name', 'name')] @mapping def website_id(self, record): binder = self.get_binder_for_model('magento.website') binding_id = binder.to_openerp(record['website_id']) return {'website_id': binding_id} @mapping @only_create def warehouse_id(self, record): return {'warehouse_id': self.backend_record.warehouse_id.id} @magento class StoreviewImportMapper(ImportMapper): _model_name = 'magento.storeview' direct = [ ('name', 'name'), ('code', 'code'), ('is_active', 'enabled'), ('sort_order', 'sort_order'), ] @mapping def store_id(self, record): binder = self.get_binder_for_model('magento.store') binding_id = binder.to_openerp(record['group_id']) return {'store_id': binding_id} @magento class StoreImport(MagentoImportSynchronizer): """ Import one Magento Store (create a sale.shop via _inherits) """ _model_name = ['magento.store', ] def _create(self, data): openerp_binding_id = super(StoreImport, self)._create(data) checkpoint = self.get_connector_unit_for_model(AddCheckpoint) checkpoint.run(openerp_binding_id) return openerp_binding_id @magento class StoreviewImport(MagentoImportSynchronizer): """ Import one Magento Storeview """ _model_name = ['magento.storeview', ] def _create(self, data): openerp_binding_id = super(StoreviewImport, self)._create(data) checkpoint = self.get_connector_unit_for_model(StoreViewAddCheckpoint) checkpoint.run(openerp_binding_id) return openerp_binding_id @magento class StoreViewAddCheckpoint(ConnectorUnit): """ Add a connector.checkpoint on the magento.storeview record """ _model_name = ['magento.storeview', ] def run(self, openerp_binding_id): add_checkpoint(self.session, self.model._name, openerp_binding_id, self.backend_record.id)<|fim▁end|>
from_date = datetime.strptime(
<|file_name|>example.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import urllib from OpenGL.GL import * from OpenGL.GLU import * from OpenGL.GLUT import * from jp.ac.kyoto_su.aokilab.dragon.mvc.model import OpenGLModel from jp.ac.kyoto_su.aokilab.dragon.mvc.view import * from jp.ac.kyoto_su.aokilab.dragon.opengl.triangle import OpenGLTriangle from jp.ac.kyoto_su.aokilab.dragon.opengl.polygon import OpenGLPolygon TRACE = True DEBUG = False class DragonModel(OpenGLModel): """ドラゴンのモデル。""" def __init__(self): """ドラゴンのモデルのコンストラクタ。""" if TRACE: print(__name__), self.__init__.__doc__ super(DragonModel, self).__init__() self._eye_point = [-5.5852450791872 , 3.07847342734 , 15.794105252496] self._sight_point = [0.27455347776413 , 0.20096999406815 , -0.11261999607086] self._up_vector = [0.1018574904194 , 0.98480906061847 , -0.14062775604137] self._fovy = self._default_fovy = 12.642721790235 filename = os.path.join(os.getcwd(), 'dragon.txt') if os.path.exists(filename) and os.path.isfile(filename): pass else: url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Dragon/dragon.txt' urllib.urlretrieve(url, filename) with open(filename, "rU") as a_file: while True: a_string = a_file.readline() if len(a_string) == 0: break a_list = a_string.split() if len(a_list) == 0: continue first_string = a_list[0] if first_string == "number_of_vertexes": number_of_vertexes = int(a_list[1]) if first_string == "number_of_triangles": number_of_triangles = int(a_list[1]) if first_string == "end_header": get_tokens = (lambda file: file.readline().split()) collection_of_vertexes = [] for n_th in range(number_of_vertexes): a_list = get_tokens(a_file) a_vertex = map(float, a_list[0:3]) collection_of_vertexes.append(a_vertex) index_to_vertex = (lambda index: collection_of_vertexes[index-1]) for n_th in range(number_of_triangles): a_list = get_tokens(a_file) indexes = map(int, a_list[0:3]) vertexes = map(index_to_vertex, indexes) a_tringle = OpenGLTriangle(*vertexes) self._display_object.append(a_tringle) return def default_window_title(self): """ドラゴンのウィンドウのタイトル(ラベル)を応答する。""" if TRACE: print(__name__), self.default_window_title.__doc__ return "Dragon" class WaspModel(OpenGLModel): """スズメバチのモデル。""" def __init__(self): """スズメバチのモデルのコンストラクタ。""" if TRACE: print(__name__), self.__init__.__doc__ super(WaspModel, self).__init__() self._eye_point = [-5.5852450791872 , 3.07847342734 , 15.794105252496] self._sight_point = [0.19825005531311 , 1.8530999422073 , -0.63795006275177] self._up_vector = [0.070077999093727 , 0.99630606032682 , -0.049631725731267] self._fovy = self._default_fovy = 41.480099231656 filename = os.path.join(os.getcwd(), 'wasp.txt') if os.path.exists(filename) and os.path.isfile(filename): pass else: url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Wasp/wasp.txt' urllib.urlretrieve(url, filename) <|fim▁hole|> a_string = a_file.readline() if len(a_string) == 0: break a_list = a_string.split() if len(a_list) == 0: continue first_string = a_list[0] if first_string == "number_of_vertexes": number_of_vertexes = int(a_list[1]) if first_string == "number_of_polygons": number_of_polygons = int(a_list[1]) if first_string == "end_header": get_tokens = (lambda file: file.readline().split()) collection_of_vertexes = [] for n_th in range(number_of_vertexes): a_list = get_tokens(a_file) a_vertex = map(float, a_list[0:3]) collection_of_vertexes.append(a_vertex) index_to_vertex = (lambda index: collection_of_vertexes[index-1]) for n_th in range(number_of_polygons): a_list = get_tokens(a_file) number_of_indexes = int(a_list[0]) index = number_of_indexes + 1 indexes = map(int, a_list[1:index]) vertexes = map(index_to_vertex, indexes) rgb_color = map(float, a_list[index:index+3]) a_polygon = OpenGLPolygon(vertexes, rgb_color) self._display_object.append(a_polygon) return def default_view_class(self): """スズメバチのモデルを表示するデフォルトのビューのクラスを応答する。""" if TRACE: print(__name__), self.default_view_class.__doc__ return WaspView def default_window_title(self): """スズメバチのウィンドウのタイトル(ラベル)を応答する。""" if TRACE: print(__name__), self.default_window_title.__doc__ return "Wasp" class BunnyModel(OpenGLModel): """うさぎのモデル。""" def __init__(self): """うさぎのモデルのコンストラクタ。""" if TRACE: print(__name__), self.__init__.__doc__ super(BunnyModel, self).__init__() filename = os.path.join(os.getcwd(), 'bunny.ply') if os.path.exists(filename) and os.path.isfile(filename): pass else: url = 'http://www.cc.kyoto-su.ac.jp/~atsushi/Programs/Bunny/bunny.ply' urllib.urlretrieve(url, filename) with open(filename, "rU") as a_file: while True: a_string = a_file.readline() if len(a_string) == 0: break a_list = a_string.split() if len(a_list) == 0: continue first_string = a_list[0] if first_string == "element": second_string = a_list[1] if second_string == "vertex": number_of_vertexes = int(a_list[2]) if second_string == "face": number_of_faces = int(a_list[2]) if first_string == "end_header": get_tokens = (lambda file: file.readline().split()) collection_of_vertexes = [] for n_th in range(number_of_vertexes): a_list = get_tokens(a_file) a_vertex = map(float, a_list[0:3]) collection_of_vertexes.append(a_vertex) index_to_vertex = (lambda index: collection_of_vertexes[index]) for n_th in range(number_of_faces): a_list = get_tokens(a_file) indexes = map(int, a_list[1:4]) vertexes = map(index_to_vertex, indexes) a_tringle = OpenGLTriangle(*vertexes) self._display_object.append(a_tringle) if first_string == "comment": second_string = a_list[1] if second_string == "eye_point_xyz": self._eye_point = map(float, a_list[2:5]) if second_string == "sight_point_xyz": self._sight_point = map(float, a_list[2:5]) if second_string == "up_vector_xyz": self._up_vector = map(float, a_list[2:5]) if second_string == "zoom_height" and a_list[3] == "fovy": self._fovy = self._default_fovy = float(a_list[4]) return def default_view_class(self): """うさぎのモデルを表示するデフォルトのビューのクラスを応答する。""" if TRACE: print(__name__), self.default_view_class.__doc__ return BunnyView def default_window_title(self): """うさぎのウィンドウのタイトル(ラベル)を応答する。""" if TRACE: print(__name__), self.default_window_title.__doc__ return "Stanford Bunny" # end of file<|fim▁end|>
with open(filename, "rU") as a_file: while True:
<|file_name|>OversampleWithDepthTest.java<|end_file_name|><|fim▁begin|>package org.apache.lucene.facet.sampling; import java.io.IOException; import java.util.Collections; import org.apache.lucene.document.Document; import org.apache.lucene.facet.FacetTestCase; import org.apache.lucene.facet.index.FacetFields; import org.apache.lucene.facet.params.FacetIndexingParams; import org.apache.lucene.facet.params.FacetSearchParams; import org.apache.lucene.facet.sampling.RandomSampler; import org.apache.lucene.facet.sampling.Sampler; import org.apache.lucene.facet.sampling.SamplingAccumulator; import org.apache.lucene.facet.sampling.SamplingParams; import org.apache.lucene.facet.search.CountFacetRequest; import org.apache.lucene.facet.search.FacetRequest; import org.apache.lucene.facet.search.FacetResult; import org.apache.lucene.facet.search.FacetResultNode; import org.apache.lucene.facet.search.FacetsCollector; import org.apache.lucene.facet.search.StandardFacetsAccumulator; import org.apache.lucene.facet.search.FacetRequest.ResultMode; import org.apache.lucene.facet.taxonomy.CategoryPath; import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.facet.taxonomy.TaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.IOUtils; import org.junit.Test; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class OversampleWithDepthTest extends FacetTestCase { @Test public void testCountWithdepthUsingSampling() throws Exception, IOException { Directory indexDir = newDirectory(); Directory taxoDir = newDirectory(); FacetIndexingParams fip = new FacetIndexingParams(randomCategoryListParams()); // index 100 docs, each with one category: ["root", docnum/10, docnum] // e.g. root/8/87 index100Docs(indexDir, taxoDir, fip); DirectoryReader r = DirectoryReader.open(indexDir); TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir); CountFacetRequest facetRequest = new CountFacetRequest(new CategoryPath("root"), 10); // Setting the depth to '2', should potentially get all categories facetRequest.setDepth(2); facetRequest.setResultMode(ResultMode.PER_NODE_IN_TREE); FacetSearchParams fsp = new FacetSearchParams(fip, facetRequest); // Craft sampling params to enforce sampling final SamplingParams params = new SamplingParams(); params.setMinSampleSize(2); params.setMaxSampleSize(50); params.setOversampleFactor(5); params.setSamplingThreshold(60); params.setSampleRatio(0.1); FacetResult res = searchWithFacets(r, tr, fsp, params); FacetRequest req = res.getFacetRequest(); assertEquals(facetRequest, req); FacetResultNode rootNode = res.getFacetResultNode(); // Each node below root should also have sub-results as the requested depth was '2' for (FacetResultNode node : rootNode.subResults) { assertTrue("node " + node.label + " should have had children as the requested depth was '2'", node.subResults.size() > 0); } IOUtils.close(r, tr, indexDir, taxoDir); } <|fim▁hole|> FacetFields facetFields = new FacetFields(tw, fip); for (int i = 0; i < 100; i++) { Document doc = new Document(); CategoryPath cp = new CategoryPath("root",Integer.toString(i / 10), Integer.toString(i)); facetFields.addFields(doc, Collections.singletonList(cp)); w.addDocument(doc); } IOUtils.close(tw, w); } /** search reader <code>r</code>*/ private FacetResult searchWithFacets(IndexReader r, TaxonomyReader tr, FacetSearchParams fsp, final SamplingParams params) throws IOException { // a FacetsCollector with a sampling accumulator Sampler sampler = new RandomSampler(params, random()); StandardFacetsAccumulator sfa = new SamplingAccumulator(sampler, fsp, r, tr); FacetsCollector fcWithSampling = FacetsCollector.create(sfa); IndexSearcher s = new IndexSearcher(r); s.search(new MatchAllDocsQuery(), fcWithSampling); // there's only one expected result, return just it. return fcWithSampling.getFacetResults().get(0); } }<|fim▁end|>
private void index100Docs(Directory indexDir, Directory taxoDir, FacetIndexingParams fip) throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); IndexWriter w = new IndexWriter(indexDir, iwc); TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
<|file_name|>inputDateTest.js<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ({ /** * Verify that the provided component is inputDateHtml on mobile/tablet, and inputDate on desktop */ testCorrectComponentProvided: { test: function (cmp) { var isDesktop = $A.get('$Browser.formFactor').toLowerCase() === "desktop"; var providedCmpName = cmp.getDef().getDescriptor().getQualifiedName(); if (isDesktop) { $A.test.assertEquals("markup://ui:inputDate", providedCmpName, "should use inputDate on desktop"); } else { $A.test.assertEquals("markup://ui:inputDateHtml", providedCmpName, "should use inputDate on desktop"); } } }, // TODO: W-1937288 Fix flapping _testInitialValue: { attributes: {displayDatePicker: 'true', value: '2012-09-10', format: 'MM/dd/yyyy'}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("09/10/2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'format' attribute is not assigned a value. */ testDefaultFormat: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', value: '2012-09-10'}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("Sep 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'format' attribute is assigned an empty string. */ testEmptyFormat: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', value: '2012-09-10', format: ''}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("Sep 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'format' attribute is assigned a garbage value. */ testInvalidFormat: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', format: 'KKKKKK'}, test: [function (cmp) { cmp.find("datePicker").get('c.selectToday').runDeprecated(); }, function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; var dt = moment().format('KKKKKK'); $A.test.assertEquals(dt, inputDateStr, "Dates are not the same and they should be"); }] }, /** * Verify behavior when 'langLocale' attribute is not assigned a value. */ testDefaultLangLocale: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', format: 'MMMM dd, yyyy', value: '2012-09-10'}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("September 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'langLocale' attribute is assigned a different value. */ testLangLocale: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', format: 'MMMM dd, yyyy', value: '2012-09-10', langLocale: 'es'}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("Septiembre 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'langLocale' attribute is not assigned an empty string. */ testEmptyLangLocale: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', format: 'MMMM dd, yyyy', value: '2012-09-10', langLocale: ''}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("September 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior when 'langLocale' attribute is not assigned an invalid value. */ testInvalidLangLocale: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', format: 'MMMM dd, yyyy', value: '2012-09-10', langLocale: 'xx'}, test: function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; $A.test.assertEquals("September 10, 2012", inputDateStr, "Dates are not the same and they should be"); } }, /** * Verify behavior of Today() with default 'format' value. */ // TODO(W-2671175): Fails due to GMT/PST timezone difference for user.timezone and actual timezone _testToday: { attributes: {displayDatePicker: 'true', format: 'MMM dd, yyyy'}, test: [function (cmp) { cmp.find("datePicker").get('c.selectToday').runDeprecated(); }, function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; var dt = moment().format('MMM DD, YYYY'); $A.test.assertEquals(dt, inputDateStr, "Dates are not the same and they should be"); }] }, /**<|fim▁hole|> _testTodayDifferentFormat: { attributes: {displayDatePicker: 'true', format: 'DD/MM/YYYY'}, test: [function (cmp) { cmp.find("datePicker").get('c.selectToday').runDeprecated(); }, function (cmp) { var inputDateStr = cmp.find("inputText").getElement().value; var dt = moment().format('DD/MM/YYYY'); $A.test.assertEquals(dt, inputDateStr, "Dates are not the same and they should be"); }] }, /** * Test input date picker with label set. */ testDatePickerWithLabel: { browsers: ['DESKTOP'], attributes: {displayDatePicker: 'true', label: 'my date cmp'}, test: function (cmp) { var datePickerOpener = cmp.find("datePickerOpener"); $A.test.assertNotNull(datePickerOpener, "datePickerOpener anchor not present"); var datePicker = cmp.find("datePicker"); $A.test.assertNotNull(datePicker, "datePicker not present"); } } /*eslint-disable semi */ }) /*eslint-enable semi */<|fim▁end|>
* Verify behavior of Today() when 'format' is assigned a valid value. */ // TODO(W-2671175): Fails due to GMT/PST timezone difference for user.timezone and actual timezone
<|file_name|>task-comm-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![allow(unused_must_use)] // ignore-emscripten no threads support use std::thread; use std::sync::mpsc::{channel, Sender}; <|fim▁hole|>pub fn main() { println!("===== WITHOUT THREADS ====="); test00(); } fn test00_start(ch: &Sender<isize>, message: isize, count: isize) { println!("Starting test00_start"); let mut i: isize = 0; while i < count { println!("Sending Message"); ch.send(message + 0).unwrap(); i = i + 1; } println!("Ending test00_start"); } fn test00() { let number_of_tasks: isize = 16; let number_of_messages: isize = 4; println!("Creating tasks"); let (tx, rx) = channel(); let mut i: isize = 0; // Create and spawn threads... let mut results = Vec::new(); while i < number_of_tasks { let tx = tx.clone(); results.push(thread::spawn({ let i = i; move|| { test00_start(&tx, i, number_of_messages) } })); i = i + 1; } // Read from spawned threads... let mut sum = 0; for _r in &results { i = 0; while i < number_of_messages { let value = rx.recv().unwrap(); sum += value; i = i + 1; } } // Join spawned threads... for r in results { r.join(); } println!("Completed: Final number is: "); println!("{}", sum); // assert (sum == (((number_of_threads * (number_of_threads - 1)) / 2) * // number_of_messages)); assert_eq!(sum, 480); }<|fim▁end|>
<|file_name|>poddisruptionbudget.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|>Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package internalversion import ( v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" rest "k8s.io/client-go/rest" api "k8s.io/kubernetes/pkg/api" policy "k8s.io/kubernetes/pkg/apis/policy" ) // PodDisruptionBudgetsGetter has a method to return a PodDisruptionBudgetInterface. // A group's client should implement this interface. type PodDisruptionBudgetsGetter interface { PodDisruptionBudgets(namespace string) PodDisruptionBudgetInterface } // PodDisruptionBudgetInterface has methods to work with PodDisruptionBudget resources. type PodDisruptionBudgetInterface interface { Create(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error) Update(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error) UpdateStatus(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error) Delete(name string, options *api.DeleteOptions) error DeleteCollection(options *api.DeleteOptions, listOptions v1.ListOptions) error Get(name string, options v1.GetOptions) (*policy.PodDisruptionBudget, error) List(opts v1.ListOptions) (*policy.PodDisruptionBudgetList, error) Watch(opts v1.ListOptions) (watch.Interface, error) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *policy.PodDisruptionBudget, err error) PodDisruptionBudgetExpansion } // podDisruptionBudgets implements PodDisruptionBudgetInterface type podDisruptionBudgets struct { client rest.Interface ns string } // newPodDisruptionBudgets returns a PodDisruptionBudgets func newPodDisruptionBudgets(c *PolicyClient, namespace string) *podDisruptionBudgets { return &podDisruptionBudgets{ client: c.RESTClient(), ns: namespace, } } // Create takes the representation of a podDisruptionBudget and creates it. Returns the server's representation of the podDisruptionBudget, and an error, if there is any. func (c *podDisruptionBudgets) Create(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) { result = &policy.PodDisruptionBudget{} err = c.client.Post(). Namespace(c.ns). Resource("poddisruptionbudgets"). Body(podDisruptionBudget). Do(). Into(result) return } // Update takes the representation of a podDisruptionBudget and updates it. Returns the server's representation of the podDisruptionBudget, and an error, if there is any. func (c *podDisruptionBudgets) Update(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) { result = &policy.PodDisruptionBudget{} err = c.client.Put(). Namespace(c.ns). Resource("poddisruptionbudgets"). Name(podDisruptionBudget.Name). Body(podDisruptionBudget). Do(). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclientstatus=false comment above the type to avoid generating UpdateStatus(). func (c *podDisruptionBudgets) UpdateStatus(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) { result = &policy.PodDisruptionBudget{} err = c.client.Put(). Namespace(c.ns). Resource("poddisruptionbudgets"). Name(podDisruptionBudget.Name). SubResource("status"). Body(podDisruptionBudget). Do(). Into(result) return } // Delete takes name of the podDisruptionBudget and deletes it. Returns an error if one occurs. func (c *podDisruptionBudgets) Delete(name string, options *api.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("poddisruptionbudgets"). Name(name). Body(options). Do(). Error() } // DeleteCollection deletes a collection of objects. func (c *podDisruptionBudgets) DeleteCollection(options *api.DeleteOptions, listOptions v1.ListOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("poddisruptionbudgets"). VersionedParams(&listOptions, api.ParameterCodec). Body(options). Do(). Error() } // Get takes name of the podDisruptionBudget, and returns the corresponding podDisruptionBudget object, and an error if there is any. func (c *podDisruptionBudgets) Get(name string, options v1.GetOptions) (result *policy.PodDisruptionBudget, err error) { result = &policy.PodDisruptionBudget{} err = c.client.Get(). Namespace(c.ns). Resource("poddisruptionbudgets"). Name(name). VersionedParams(&options, api.ParameterCodec). Do(). Into(result) return } // List takes label and field selectors, and returns the list of PodDisruptionBudgets that match those selectors. func (c *podDisruptionBudgets) List(opts v1.ListOptions) (result *policy.PodDisruptionBudgetList, err error) { result = &policy.PodDisruptionBudgetList{} err = c.client.Get(). Namespace(c.ns). Resource("poddisruptionbudgets"). VersionedParams(&opts, api.ParameterCodec). Do(). Into(result) return } // Watch returns a watch.Interface that watches the requested podDisruptionBudgets. func (c *podDisruptionBudgets) Watch(opts v1.ListOptions) (watch.Interface, error) { return c.client.Get(). Prefix("watch"). Namespace(c.ns). Resource("poddisruptionbudgets"). VersionedParams(&opts, api.ParameterCodec). Watch() } // Patch applies the patch and returns the patched podDisruptionBudget. func (c *podDisruptionBudgets) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *policy.PodDisruptionBudget, err error) { result = &policy.PodDisruptionBudget{} err = c.client.Patch(pt). Namespace(c.ns). Resource("poddisruptionbudgets"). SubResource(subresources...). Name(name). Body(data). Do(). Into(result) return }<|fim▁end|>
<|file_name|>HSScreen.java<|end_file_name|><|fim▁begin|>package com.humooooour.kit.screen; import android.app.Activity; import android.view.KeyEvent; import android.view.MotionEvent; import com.humooooour.kit.HSApp; import com.humooooour.kit.geom.HSRect; import processing.core.PApplet; import processing.core.PGraphics; public class HSScreen { private HSApp mApp; private HSRect mBounds; public HSScreen(HSApp app, HSRect bounds) { mApp = app; mBounds =bounds; } public void dispose() { } public void update(float dt) { } public void draw(PGraphics g) { } public boolean handleTouch(MotionEvent touch) { return false; } public boolean handleKey(KeyEvent key) { return false; } protected HSApp getApp() { return mApp; } protected PApplet getPApplet() { return mApp.getPApplet(); } protected Activity getActivity() { return mApp.getActivity(); } protected PGraphics getPGraphics() { return mApp.getPGraphics(); } protected HSRect getBounds() { return mBounds;<|fim▁hole|> cBounds.offsetTo(0.0f, 0.0f); return cBounds; } }<|fim▁end|>
} protected HSRect getContentBounds() { HSRect cBounds = new HSRect(mBounds);
<|file_name|>arena.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Dynamic arenas. // Arenas are used to quickly allocate objects that share a // lifetime. The arena uses ~[u8] vectors as a backing store to // allocate objects from. For each allocated object, the arena stores // a pointer to the type descriptor followed by the // object. (Potentially with alignment padding after each of them.) // When the arena is destroyed, it iterates through all of its chunks, // and uses the tydesc information to trace through the objects, // calling the destructors on them. // One subtle point that needs to be addressed is how to handle // failures while running the user provided initializer function. It // is important to not run the destructor on uninitialized objects, but // how to detect them is somewhat subtle. Since alloc() can be invoked // recursively, it is not sufficient to simply exclude the most recent // object. To solve this without requiring extra space, we use the low // order bit of the tydesc pointer to encode whether the object it // describes has been fully initialized. // As an optimization, objects with destructors are stored in // different chunks than objects without destructors. This reduces // overhead when initializing plain-old-data and means we don't need // to waste time running the destructors of POD. use list::{MutList, MutCons, MutNil}; use core::at_vec;<|fim▁hole|>use core::cast::{transmute, transmute_mut_region}; use core::cast; use core::libc::size_t; use core::ptr; use core::sys::TypeDesc; use core::sys; use core::uint; use core::vec; pub mod rusti { #[abi = "rust-intrinsic"] pub extern "rust-intrinsic" { fn move_val_init<T>(dst: &mut T, src: T); fn needs_drop<T>() -> bool; } } pub mod rustrt { use core::libc::size_t; use core::sys::TypeDesc; pub extern { #[rust_stack] unsafe fn rust_call_tydesc_glue(root: *u8, tydesc: *TypeDesc, field: size_t); } } // This probably belongs somewhere else. Needs to be kept in sync with // changes to glue... static tydesc_drop_glue_index: size_t = 3 as size_t; // The way arena uses arrays is really deeply awful. The arrays are // allocated, and have capacities reserved, but the fill for the array // will always stay at 0. struct Chunk { data: @[u8], fill: uint, is_pod: bool, } pub struct Arena { // The head is separated out from the list as a unbenchmarked // microoptimization, to avoid needing to case on the list to // access the head. priv head: Chunk, priv pod_head: Chunk, priv chunks: @mut MutList<Chunk>, } #[unsafe_destructor] impl Drop for Arena { fn finalize(&self) { unsafe { destroy_chunk(&self.head); for self.chunks.each |chunk| { if !chunk.is_pod { destroy_chunk(chunk); } } } } } fn chunk(size: uint, is_pod: bool) -> Chunk { let mut v: @[u8] = @[]; unsafe { at_vec::raw::reserve(&mut v, size); } Chunk { data: unsafe { cast::transmute(v) }, fill: 0u, is_pod: is_pod, } } pub fn arena_with_size(initial_size: uint) -> Arena { Arena { head: chunk(initial_size, false), pod_head: chunk(initial_size, true), chunks: @mut MutNil, } } pub fn Arena() -> Arena { arena_with_size(32u) } #[inline(always)] fn round_up_to(base: uint, align: uint) -> uint { (base + (align - 1)) & !(align - 1) } // Walk down a chunk, running the destructors for any objects stored // in it. unsafe fn destroy_chunk(chunk: &Chunk) { let mut idx = 0; let buf = vec::raw::to_ptr(chunk.data); let fill = chunk.fill; while idx < fill { let tydesc_data: *uint = transmute(ptr::offset(buf, idx)); let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data); let size = (*tydesc).size, align = (*tydesc).align; let after_tydesc = idx + sys::size_of::<*TypeDesc>(); let start = round_up_to(after_tydesc, align); //debug!("freeing object: idx = %u, size = %u, align = %u, done = %b", // start, size, align, is_done); if is_done { rustrt::rust_call_tydesc_glue( ptr::offset(buf, start), tydesc, tydesc_drop_glue_index); } // Find where the next tydesc lives idx = round_up_to(start + size, sys::pref_align_of::<*TypeDesc>()); } } // We encode whether the object a tydesc describes has been // initialized in the arena in the low bit of the tydesc pointer. This // is necessary in order to properly do cleanup if a failure occurs // during an initializer. #[inline(always)] unsafe fn bitpack_tydesc_ptr(p: *TypeDesc, is_done: bool) -> uint { let p_bits: uint = transmute(p); p_bits | (is_done as uint) } #[inline(always)] unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) { (transmute(p & !1), p & 1 == 1) } pub impl Arena { // Functions for the POD part of the arena priv fn alloc_pod_grow(&mut self, n_bytes: uint, align: uint) -> *u8 { // Allocate a new chunk. let chunk_size = at_vec::capacity(self.pod_head.data); let new_min_chunk_size = uint::max(n_bytes, chunk_size); self.chunks = @mut MutCons(copy self.pod_head, self.chunks); self.pod_head = chunk(uint::next_power_of_two(new_min_chunk_size + 1u), true); return self.alloc_pod_inner(n_bytes, align); } #[inline(always)] priv fn alloc_pod_inner(&mut self, n_bytes: uint, align: uint) -> *u8 { unsafe { // XXX: Borrow check let head = transmute_mut_region(&mut self.pod_head); let start = round_up_to(head.fill, align); let end = start + n_bytes; if end > at_vec::capacity(head.data) { return self.alloc_pod_grow(n_bytes, align); } head.fill = end; //debug!("idx = %u, size = %u, align = %u, fill = %u", // start, n_bytes, align, head.fill); ptr::offset(vec::raw::to_ptr(head.data), start) } } #[inline(always)] priv fn alloc_pod<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T { unsafe { let tydesc = sys::get_type_desc::<T>(); let ptr = self.alloc_pod_inner((*tydesc).size, (*tydesc).align); let ptr: *mut T = transmute(ptr); rusti::move_val_init(&mut (*ptr), op()); return transmute(ptr); } } // Functions for the non-POD part of the arena priv fn alloc_nonpod_grow(&mut self, n_bytes: uint, align: uint) -> (*u8, *u8) { // Allocate a new chunk. let chunk_size = at_vec::capacity(self.head.data); let new_min_chunk_size = uint::max(n_bytes, chunk_size); self.chunks = @mut MutCons(copy self.head, self.chunks); self.head = chunk(uint::next_power_of_two(new_min_chunk_size + 1u), false); return self.alloc_nonpod_inner(n_bytes, align); } #[inline(always)] priv fn alloc_nonpod_inner(&mut self, n_bytes: uint, align: uint) -> (*u8, *u8) { unsafe { let head = transmute_mut_region(&mut self.head); let tydesc_start = head.fill; let after_tydesc = head.fill + sys::size_of::<*TypeDesc>(); let start = round_up_to(after_tydesc, align); let end = start + n_bytes; if end > at_vec::capacity(head.data) { return self.alloc_nonpod_grow(n_bytes, align); } head.fill = round_up_to(end, sys::pref_align_of::<*TypeDesc>()); //debug!("idx = %u, size = %u, align = %u, fill = %u", // start, n_bytes, align, head.fill); let buf = vec::raw::to_ptr(head.data); return (ptr::offset(buf, tydesc_start), ptr::offset(buf, start)); } } #[inline(always)] priv fn alloc_nonpod<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T { unsafe { let tydesc = sys::get_type_desc::<T>(); let (ty_ptr, ptr) = self.alloc_nonpod_inner((*tydesc).size, (*tydesc).align); let ty_ptr: *mut uint = transmute(ty_ptr); let ptr: *mut T = transmute(ptr); // Write in our tydesc along with a bit indicating that it // has *not* been initialized yet. *ty_ptr = transmute(tydesc); // Actually initialize it rusti::move_val_init(&mut(*ptr), op()); // Now that we are done, update the tydesc to indicate that // the object is there. *ty_ptr = bitpack_tydesc_ptr(tydesc, true); return transmute(ptr); } } // The external interface #[inline(always)] fn alloc<'a, T>(&'a mut self, op: &fn() -> T) -> &'a T { unsafe { // XXX: Borrow check let this = transmute_mut_region(self); if !rusti::needs_drop::<T>() { return this.alloc_pod(op); } // XXX: Borrow check let this = transmute_mut_region(self); this.alloc_nonpod(op) } } } #[test] fn test_arena_destructors() { let mut arena = Arena(); for uint::range(0, 10) |i| { // Arena allocate something with drop glue to make sure it // doesn't leak. do arena.alloc { @i }; // Allocate something with funny size and alignment, to keep // things interesting. do arena.alloc { [0u8, 1u8, 2u8] }; } } #[test] #[should_fail] #[ignore(cfg(windows))] fn test_arena_destructors_fail() { let mut arena = Arena(); // Put some stuff in the arena. for uint::range(0, 10) |i| { // Arena allocate something with drop glue to make sure it // doesn't leak. do arena.alloc { @i }; // Allocate something with funny size and alignment, to keep // things interesting. do arena.alloc { [0u8, 1u8, 2u8] }; } // Now, fail while allocating do arena.alloc::<@int> { // Now fail. fail!(); }; }<|fim▁end|>
<|file_name|>toc.js<|end_file_name|><|fim▁begin|>(function($) { $.fn.toc = function(options) { var self = this; var opts = $.extend({}, jQuery.fn.toc.defaults, options); var container = $(opts.container); var headings = $(opts.selectors, container); var activeClassName = opts.prefix+'-active'; var scrollTo = function(e) { if (opts.smoothScrolling) { e.preventDefault(); var elScrollTo = $(e.target).attr('href'); var $el = $(elScrollTo.replace(":", "\\:")); var callbackCalled = false; $('body,html').animate({ scrollTop: $el.offset().top - opts.scrollOffset }, 400, 'swing', function(e) { location.hash = elScrollTo; if (!callbackCalled){ opts.onScrollFinish.call(self); callbackCalled = true; } }); } $('li', self).removeClass(activeClassName); $(e.target).parent().addClass(activeClassName); }; //highlight on scroll var timeout; var highlightOnScroll = function(e) { if (timeout) { clearTimeout(timeout); } timeout = setTimeout(function() { var top = $(window).scrollTop(), highlighted; headings.each(function(i, heading) { var $h = $(heading); var htop = $h.offset().top - opts.highlightOffset; if (htop >= top) { $('li', self).removeClass(activeClassName); highlighted = $('li:eq('+(i)+')', self).addClass(activeClassName); opts.onHighlight(highlighted); return false; } }); }, 50); }; if (opts.highlightOnScroll) { $(window).bind('scroll', highlightOnScroll); highlightOnScroll(); } //Perform search and hide unmatched elements var tocList; var treeObject = {}; //Create the tree var createTree = function(ul) { var prevLevel = {level: -1, index: -1, parent: -1, val: ''}; var levelParent = {0: -1}; tocList = ul.children("li"); tocList.each(function(i) { var me = $(this).removeClass("toc-active"); var currentLevel = parseInt(me.attr('class').trim().slice(-1)); if (currentLevel > prevLevel.level) { currentParent = prevLevel.index; } else if (currentLevel == prevLevel.level) { currentParent = prevLevel.parent; } else if (currentLevel < prevLevel.level) { currentParent = levelParent[currentLevel] || prevLevel.parent; } levelParent[currentLevel] = currentParent; var currentVal = $('a', this).text().trim().toLowerCase(); treeObject[i] = { val: currentVal, level: currentLevel, parent: currentParent } prevLevel = {index: i, val: currentVal, level: currentLevel, parent: currentParent}; }); } //Show the parents recursively var showParents = function(key) { var me = treeObject[key]; if (me.parent > -1) { $(tocList[me.parent]).show(); showParents(me.parent); } }; //Perform the search var search = function(searchVal) { searchVal = searchVal.trim().toLowerCase(); for (var key in treeObject) { var me = treeObject[key]; if (me.val.indexOf(searchVal) !== -1 || searchVal.length == 0) { $(tocList[key]).show(); if ($(tocList[me.parent]).is(":hidden")) { showParents(key); } } else { $(tocList[key]).hide(); } }<|fim▁hole|> var el = $(this); var searchVal = ''; var searchForm = $("<form/>", {class: "form-search quick-search"}) .append($("<input/>", {type: "text", class: "input-medium search-query", placeholder: "Quick Search"})) .append($("<i/>", {class: "icon icon-search search-icon"})); searchForm.css({'position': 'fixed', 'top': '45px', 'padding-right': '20px'}); $(".search-icon", searchForm).css({'marginLeft': '-20px', 'marginTop': '3px'}); var ul = $('<ul/>'); headings.each(function(i, heading) { var $h = $(heading); //add anchor var anchor = $('<span/>').attr('id', opts.anchorName(i, heading, opts.prefix)).insertBefore($h); //build TOC item var a = $('<a/>') .text(opts.headerText(i, heading, $h)) .attr('href', '#' + opts.anchorName(i, heading, opts.prefix)) .bind('click', function(e) { scrollTo(e); el.trigger('selected', $(this).attr('href')); }); var li = $('<li/>') .addClass(opts.itemClass(i, heading, $h, opts.prefix)) .append(a); ul.append(li); }); el.html(ul); el.parent().prepend(searchForm); el.css({'top': '80px'}); //create the tree createTree(ul) //set intent timer var intentTimer; var accumulatedTime = 0; //bind quick search el.siblings('.quick-search').children('.search-query').bind('keyup', function(e) { if (accumulatedTime < 1000) { window.clearTimeout(intentTimer); } var me = $(this); if (me.val().length > 0) { $(".search-icon").removeClass("icon-search").addClass("icon-remove-circle").css('cursor', 'pointer'); } else { $(".search-icon").removeClass("icon-remove-circle").addClass("icon-search").css('cursor', 'auto'); } var intentTime = 500 - (me.val().length * 10); accumulatedTime += intentTime; intentTimer = window.setTimeout(function() { if (searchVal == me.val()) { return false; } searchVal = me.val(); search(me.val()); accumulatedTime = 0; }, intentTime); }); // Make text clear icon work $(".search-icon").click(function(e) { if($(this).hasClass('icon-remove-circle')) { $('.search-query').val('').trigger('keyup'); } else { $('.search-query').focus(); } }); //set positions of search box and TOC var navHeight = $(".navbar").height(); var searchHeight = $(".quick-search").height(); $(".quick-search").css({'top': navHeight + 10 + 'px', 'position': 'fixed'}); el.css('top', navHeight + searchHeight + 15 + 'px'); }); }; jQuery.fn.toc.defaults = { container: 'body', selectors: 'h1,h2,h3', smoothScrolling: true, prefix: 'toc', scrollOffset: 0, onHighlight: function() {}, highlightOnScroll: true, highlightOffset: 10, anchorName: function(i, heading, prefix) { return prefix+i; }, headerText: function(i, heading, $heading) { return $heading.text(); }, itemClass: function(i, heading, $heading, prefix) { return prefix + '-' + $heading[0].tagName.toLowerCase(); } }; })(jQuery);<|fim▁end|>
} return this.each(function() { //build TOC
<|file_name|>test_midonet_plugin.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2012 Midokura Japan K.K. # Copyright (C) 2013 Midokura PTE LTD # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import os from oslo_utils import importutils from midonet.neutron.db import task # noqa from neutron.extensions import portbindings from neutron.tests.unit import _test_extension_portbindings as test_bindings import neutron.tests.unit.test_db_plugin as test_plugin import neutron.tests.unit.test_extension_ext_gw_mode as test_gw_mode import neutron.tests.unit.test_extension_security_group as sg import neutron.tests.unit.test_l3_plugin as test_l3_plugin from oslo_config import cfg MIDOKURA_PKG_PATH = "midonet.neutron.plugin" MIDOKURA_EXT_PATH = "midonet.neutron.extensions" MIDONET_PLUGIN_NAME = ('%s.MidonetPluginV2' % MIDOKURA_PKG_PATH) class MidonetPluginV2TestCase(test_plugin.NeutronDbPluginV2TestCase): def setUp(self, plugin=MIDONET_PLUGIN_NAME, ext_mgr=None, service_plugins=None): self.midoclient_mock = mock.MagicMock() self.midoclient_mock.midonetclient.neutron.client.return_value = True modules = { 'midonetclient': self.midoclient_mock, 'midonetclient.neutron': self.midoclient_mock.neutron, 'midonetclient.neutron.client': self.midoclient_mock.client, } self.module_patcher = mock.patch.dict('sys.modules', modules) self.module_patcher.start() # import midonetclient here because it needs proper mock objects to be # assigned to this module first. 'midoclient_mock' object is the # mock object used for this module. from midonetclient.neutron.client import MidonetClient client_class = MidonetClient self.mock_class = client_class() extensions_path = importutils.import_module( MIDOKURA_EXT_PATH).__file__ cfg.CONF.set_override('api_extensions_path', os.path.dirname(extensions_path)) super(MidonetPluginV2TestCase, self).setUp(plugin=plugin) def tearDown(self): super(MidonetPluginV2TestCase, self).tearDown() self.module_patcher.stop() class TestMidonetNetworksV2(MidonetPluginV2TestCase, test_plugin.TestNetworksV2): pass class TestMidonetL3NatTestCase(MidonetPluginV2TestCase, test_l3_plugin.L3NatDBIntTestCase): def test_floatingip_with_invalid_create_port(self): self._test_floatingip_with_invalid_create_port(MIDONET_PLUGIN_NAME) class TestMidonetSecurityGroup(MidonetPluginV2TestCase, sg.TestSecurityGroups): pass class TestMidonetSubnetsV2(MidonetPluginV2TestCase, test_plugin.TestSubnetsV2): pass class TestMidonetPortsV2(MidonetPluginV2TestCase, test_plugin.TestPortsV2): def test_vif_port_binding(self): with self.port(name='myname') as port: self.assertEqual('midonet', port['port']['binding:vif_type']) self.assertTrue(port['port']['admin_state_up']) class TestMidonetPluginPortBinding(MidonetPluginV2TestCase,<|fim▁hole|> VIF_TYPE = portbindings.VIF_TYPE_MIDONET HAS_PORT_FILTER = True class TestExtGwMode(MidonetPluginV2TestCase, test_gw_mode.ExtGwModeIntTestCase): pass<|fim▁end|>
test_bindings.PortBindingsTestCase):
<|file_name|>XDCC.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os import re import select import socket import struct import time from module.plugins.internal.Hoster import Hoster from module.plugins.internal.misc import exists, fsjoin class XDCC(Hoster): __name__ = "XDCC" __type__ = "hoster" __version__ = "0.42" __status__ = "testing" __pattern__ = r'xdcc://(?P<SERVER>.*?)/#?(?P<CHAN>.*?)/(?P<BOT>.*?)/#?(?P<PACK>\d+)/?' __config__ = [("nick", "str", "Nickname", "pyload" ), ("ident", "str", "Ident", "pyloadident" ), ("realname", "str", "Realname", "pyloadreal" ), ("ctcp_version", "str","CTCP version string", "pyLoad! IRC Interface")] __description__ = """Download from IRC XDCC bot""" __license__ = "GPLv3" __authors__ = [("jeix", "[email protected]" ), ("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")] def setup(self): self.timeout = 30 self.multiDL = False def process(self, pyfile): #: Change request type self.req = self.pyload.requestFactory.getRequest(self.classname, type="XDCC") for _i in xrange(0, 3): try: nmn = self.do_download(pyfile.url) self.log_info("Download of %s finished." % nmn) return except socket.error, e: if hasattr(e, "errno") and e.errno is not None: err_no = e.errno if err_no in (10054, 10061): self.log_warning("Server blocked our ip, retry in 5 min") self.wait(300) continue else: self.log_error(_("Failed due to socket errors. Code: %s") % err_no) self.fail(_("Failed due to socket errors. Code: %s") % err_no) else: err_msg = e.args[0] self.log_error(_("Failed due to socket errors: '%s'") % err_msg) self.fail(_("Failed due to socket errors: '%s'") % err_msg) self.log_error(_("Server blocked our ip, retry again later manually")) self.fail(_("Server blocked our ip, retry again later manually")) def do_download(self, url): self.pyfile.setStatus("waiting") server, chan, bot, pack = re.match(self.__pattern__, url).groups() nick = self.config.get('nick') ident = self.config.get('ident') realname = self.config.get('realname') ctcp_version = self.config.get('ctcp_version') temp = server.split(':') ln = len(temp) if ln == 2: host, port = temp elif ln == 1: host, port = temp[0], 6667 else: self.fail(_("Invalid hostname for IRC Server: %s") % server) ####################### #: CONNECT TO IRC AND IDLE FOR REAL LINK dl_time = time.time() sock = socket.socket() self.log_info(_("Connecting to: %s:%s") % (host, port)) sock.connect((host, int(port))) if nick == "pyload": nick = "pyload-%d" % (time.time() % 1000) #: last 3 digits sock.send("NICK %s\r\n" % nick) sock.send("USER %s %s bla :%s\r\n" % (ident, host, realname)) self.log_info(_("Connect success.")) self.wait(5) # Wait for logon to complete sock.send("JOIN #%s\r\n" % chan) sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) #: IRC recv loop readbuffer = "" retry = None m = None while m is None: if retry: if time.time() > retry: retry = None dl_time = time.time() sock.send("PRIVMSG %s :xdcc send #%s\r\n" % (bot, pack)) else: if (dl_time + self.timeout) < time.time(): #@TODO: add in config sock.send("QUIT :byebye\r\n") sock.close() self.log_error(_("XDCC Bot did not answer")) self.fail(_("XDCC Bot did not answer")) fdset = select.select([sock], [], [], 0) if sock not in fdset[0]: continue readbuffer += sock.recv(1024) lines = readbuffer.split("\n") readbuffer = lines.pop() <|fim▁hole|> first = line.split() if first[0] == "PING": sock.send("PONG %s\r\n" % first[1]) if first[0] == "ERROR": self.fail(_("IRC-Error: %s") % line) msg = line.split(None, 3) if len(msg) != 4: continue msg = {'origin': msg[0][1:], 'action': msg[1], 'target': msg[2], 'text' : msg[3][1:]} if msg['target'][0:len(nick)] == nick and msg['action'] == "PRIVMSG": if msg['text'] == "\x01VERSION\x01": self.log_debug(_("Sending CTCP VERSION")) sock.send("NOTICE %s :%s\r\n" % (msg['origin'], ctcp_version)) elif msg['text'] == "\x01TIME\x01": self.log_debug(_("Sending CTCP TIME")) sock.send("NOTICE %s :%d\r\n" % (msg['origin'], time.time())) elif msg['text'] == "\x01LAG\x01": pass #: don't know how to answer if msg['origin'][0:len(bot)] != bot\ or msg['target'][0:len(nick)] != nick\ or msg['action'] not in ("PRIVMSG", "NOTICE"): continue self.log_debug(_("PrivMsg: <%s> - %s" % (msg['origin'], msg['text']))) if "You already requested that pack" in msg['text']: retry = time.time() + 300 elif "you must be on a known channel to request a pack" in msg['text']: self.log_error(_("Invalid channel")) self.fail(_("Invalid channel")) m = re.match('\x01DCC SEND (?P<NAME>.*?) (?P<IP>\d+) (?P<PORT>\d+)(?: (?P<SIZE>\d+))?\x01', msg['text']) #: Get connection data ip = socket.inet_ntoa(struct.pack('!I', int(m.group('IP')))) port = int(m.group('PORT')) file_name = m.group('NAME') if m.group('SIZE'): self.req.filesize = long(m.group('SIZE')) self.pyfile.name = file_name dl_folder = fsjoin(self.pyload.config.get('general', 'download_folder'), self.pyfile.package().folder if self.pyload.config.get("general", "folder_per_package") else "") dl_file = fsjoin(dl_folder, file_name) if not exists(dl_folder): os.makedirs(dl_folder) self.set_permissions(dl_folder) self.log_info(_("Downloading %s from %s:%d") % (file_name, ip, port)) self.pyfile.setStatus("downloading") newname = self.req.download(ip, port, dl_file, sock, self.pyfile.setProgress) if newname and newname != dl_file: self.log_info(_("%(name)s saved as %(newname)s") % {'name': self.pyfile.name, 'newname': newname}) dl_file = newname #: kill IRC socket #: sock.send("QUIT :byebye\r\n") sock.close() self.last_download = dl_file return self.last_download<|fim▁end|>
for line in lines: # if self.pyload.debug: # self.log_debug("*> " + decode(line)) line = line.rstrip()
<|file_name|>multiwallet.go<|end_file_name|><|fim▁begin|>package multiwallet import ( "errors"<|fim▁hole|> "strings" "time" eth "github.com/OpenBazaar/go-ethwallet/wallet" "github.com/OpenBazaar/multiwallet/bitcoin" "github.com/OpenBazaar/multiwallet/bitcoincash" "github.com/OpenBazaar/multiwallet/client/blockbook" "github.com/OpenBazaar/multiwallet/config" "github.com/OpenBazaar/multiwallet/litecoin" "github.com/OpenBazaar/multiwallet/service" "github.com/OpenBazaar/multiwallet/zcash" "github.com/OpenBazaar/wallet-interface" "github.com/btcsuite/btcd/chaincfg" "github.com/op/go-logging" "github.com/tyler-smith/go-bip39" ) var log = logging.MustGetLogger("multiwallet") var UnsuppertedCoinError = errors.New("multiwallet does not contain an implementation for the given coin") type MultiWallet map[wallet.CoinType]wallet.Wallet func NewMultiWallet(cfg *config.Config) (MultiWallet, error) { log.SetBackend(logging.AddModuleLevel(cfg.Logger)) service.Log = log blockbook.Log = log if cfg.Mnemonic == "" { ent, err := bip39.NewEntropy(128) if err != nil { return nil, err } mnemonic, err := bip39.NewMnemonic(ent) if err != nil { return nil, err } cfg.Mnemonic = mnemonic cfg.CreationDate = time.Now() } multiwallet := make(MultiWallet) var err error for _, coin := range cfg.Coins { var w wallet.Wallet switch coin.CoinType { case wallet.Bitcoin: w, err = bitcoin.NewBitcoinWallet(coin, cfg.Mnemonic, cfg.Params, cfg.Proxy, cfg.Cache, cfg.DisableExchangeRates) if err != nil { return nil, err } if cfg.Params.Name == chaincfg.MainNetParams.Name { multiwallet[wallet.Bitcoin] = w } else { multiwallet[wallet.TestnetBitcoin] = w } case wallet.BitcoinCash: w, err = bitcoincash.NewBitcoinCashWallet(coin, cfg.Mnemonic, cfg.Params, cfg.Proxy, cfg.Cache, cfg.DisableExchangeRates) if err != nil { return nil, err } if cfg.Params.Name == chaincfg.MainNetParams.Name { multiwallet[wallet.BitcoinCash] = w } else { multiwallet[wallet.TestnetBitcoinCash] = w } case wallet.Zcash: w, err = zcash.NewZCashWallet(coin, cfg.Mnemonic, cfg.Params, cfg.Proxy, cfg.Cache, cfg.DisableExchangeRates) if err != nil { return nil, err } if cfg.Params.Name == chaincfg.MainNetParams.Name { multiwallet[wallet.Zcash] = w } else { multiwallet[wallet.TestnetZcash] = w } case wallet.Litecoin: w, err = litecoin.NewLitecoinWallet(coin, cfg.Mnemonic, cfg.Params, cfg.Proxy, cfg.Cache, cfg.DisableExchangeRates) if err != nil { return nil, err } if cfg.Params.Name == chaincfg.MainNetParams.Name { multiwallet[wallet.Litecoin] = w } else { multiwallet[wallet.TestnetLitecoin] = w } case wallet.Ethereum: w, err = eth.NewEthereumWallet(coin, cfg.Params, cfg.Mnemonic, cfg.Proxy) if err != nil { return nil, err } if cfg.Params.Name == chaincfg.MainNetParams.Name { multiwallet[wallet.Ethereum] = w } else { multiwallet[wallet.TestnetEthereum] = w } } } return multiwallet, nil } func (w *MultiWallet) Start() { for _, wallet := range *w { wallet.Start() } } func (w *MultiWallet) Close() { for _, wallet := range *w { wallet.Close() } } func (w *MultiWallet) WalletForCurrencyCode(currencyCode string) (wallet.Wallet, error) { for _, wl := range *w { if strings.EqualFold(wl.CurrencyCode(), currencyCode) || strings.EqualFold(wl.CurrencyCode(), "T"+currencyCode) { return wl, nil } } return nil, UnsuppertedCoinError }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Provides common neural network layers. //! //! For now the layers in common should be discribed as layers that are typical //! layers for building neural networks but are not activation or loss layers. #[macro_export] macro_rules! impl_ilayer_common { () => ( fn exact_num_output_blobs(&self) -> Option<usize> { Some(1) } fn exact_num_input_blobs(&self) -> Option<usize> { Some(1) } ) } pub use self::convolution::{Convolution, ConvolutionConfig}; pub use self::linear::{Linear, LinearConfig}; pub use self::log_softmax::LogSoftmax; pub use self::pooling::{Pooling, PoolingConfig, PoolingMode}; pub use self::softmax::Softmax; pub use self::dropout::{Dropout,DropoutConfig}; pub mod convolution; pub mod linear; pub mod log_softmax; pub mod pooling; pub mod softmax; pub mod dropout; /// Provides common utilities for Layers that utilize a filter with stride and padding. /// /// This is used by the Convolution and Pooling layers. pub trait FilterLayer { /// Computes the shape of the spatial dimensions. fn calculate_spatial_output_dims(input_dims: &[usize], filter_dims: &[usize], padding: &[usize], stride: &[usize]) -> Vec<usize> { let mut output_dims = Vec::with_capacity(input_dims.len()); for (i, _) in input_dims.iter().enumerate() { output_dims.push(((input_dims[i] + (2 * padding[i]) - filter_dims[i]) / stride[i]) + 1); }<|fim▁hole|> /// Calculate output shape based on the shape of filter, padding, stride and input. fn calculate_output_shape(&self, input_shape: &[usize]) -> Vec<usize>; /// Calculates the number of spatial dimensions for the pooling operation. fn num_spatial_dims(&self, input_shape: &[usize]) -> usize; /// Retrievs the spatial dimensions for the filter based on `self.filter_shape()` /// and the number of spatial dimensions. /// /// The spatial dimensions only make up part of the whole filter shape. The other parts are the /// number of input and output feature maps. fn spatial_filter_dims(&self, num_spatial_dims: usize) -> Vec<usize> { let mut spatial_dims = Vec::with_capacity(num_spatial_dims); let filter_shape = self.filter_shape(); if filter_shape.len() == 1 { for i in 0..num_spatial_dims { spatial_dims.push(filter_shape[0]); } } else if filter_shape.len() == num_spatial_dims { panic!("unimplemented: You can not yet specify one filter dimension per spatial dimension"); } else { panic!("Must either specify one filter_shape or one filter_shape per spatial dimension. Supplied {:?}", filter_shape.len()); } spatial_dims } /// Retrievs the stride for the convolution based on `self.stride` /// and the number of spatial dimensions. fn stride_dims(&self, num_spatial_dims: usize) -> Vec<usize> { let mut stride_dims = Vec::with_capacity(num_spatial_dims); let stride = self.stride(); if stride.len() == 1 { for i in 0..num_spatial_dims { stride_dims.push(stride[0]); } } else if stride.len() == num_spatial_dims { panic!("unimplemented: You can not yet specify one stride per spatial dimension"); } else { panic!("Must either specify one stride or one stride per spatial dimension. Supplied {:?}", stride.len()); } stride_dims } /// Retrievs the padding for the convolution based on `self.padding` /// and the number of spatial dimensions. fn padding_dims(&self, num_spatial_dims: usize) -> Vec<usize> { let mut padding_dims = Vec::with_capacity(num_spatial_dims); let padding = self.padding(); if padding.len() == 1 { for i in 0..num_spatial_dims { padding_dims.push(padding[0]); } } else if padding.len() == num_spatial_dims { panic!("unimplemented: You can not yet specify one padding per spatial dimension"); } else { panic!("Must either specify one padding or one padding per spatial dimension. Supplied {:?}", padding.len()); } padding_dims } /// The filter_shape that will be used by `spatial_filter_dims`. fn filter_shape(&self) -> &[usize]; /// The stride that will be used by `stride_dims`. fn stride(&self) -> &[usize]; /// The padding that will be used by `padding_dims`. fn padding(&self) -> &[usize]; }<|fim▁end|>
output_dims }
<|file_name|>start.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use types::{Cmd, Response}; use process::process; use state::State; use input::parse_input; use output::format_output; pub fn start<R, W>(input: R, output: W) where R: Read + Send + 'static, W: Write + Send + 'static { let mut state = State::new(); let (cmd_tx, cmd_rx) = sync_channel::<Cmd>(0); let temp = cmd_tx.clone(); let _input_guard = thread::spawn(move || parse_input(BufReader::new(input), temp)); let (resp_tx, resp_rx) = sync_channel::<Response>(0); let _output_guard = thread::spawn(move || format_output(output, resp_rx)); for cmd in cmd_rx.iter() { debug!("cmd = {:?}", cmd); if cmd == Cmd::Quit { return; } else { process(&mut state, cmd, &resp_tx, &cmd_tx); } debug!("state.mode = {:?}", state.mode); } }<|fim▁end|>
use std::io::{Read, BufReader, Write}; use std::thread; use std::sync::mpsc::sync_channel;
<|file_name|>managers.py<|end_file_name|><|fim▁begin|>""" Data models managers for the user strike app. """ from django.db import models from django.db.models import Q from django.utils import timezone class UserStrikeManager(models.Manager): """ Manager class for the ``UserStrike`` data model. """ use_for_related_fields = True def search_for_strike(self, user, ip_address): """ Search the latest (non expired) strike for the given user or IP address. :param user: The user instance to search strike for. :param ip_address: The IP address to search strike for. """<|fim▁hole|> return None elif user and not ip_address: strike_lookup = Q(target_user=user) elif not user and ip_address: strike_lookup = Q(target_ip_address=ip_address) else: strike_lookup = Q(target_user=user) | Q(target_ip_address=ip_address) # Do the search return self.filter(Q(expiration_date__isnull=True) | Q(expiration_date__isnull=False, expiration_date__gte=timezone.now()), strike_lookup).order_by('-block_access', '-creation_date').first()<|fim▁end|>
# Compute the lookup expression if not user and not ip_address:
<|file_name|>layout.module.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>module app.layout { angular.module('app.layout', []); }<|fim▁end|>
/// <reference path="../_all.ts" /> 'use strict';
<|file_name|>dont-suggest-deref-inside-macro-issue-58298.rs<|end_file_name|><|fim▁begin|>fn warn(_: &str) {} <|fim▁hole|> //~^ ERROR mismatched types }; } fn main() { intrinsic_match! { "abc" }; }<|fim▁end|>
macro_rules! intrinsic_match { ($intrinsic:expr) => { warn(format!("unsupported intrinsic {}", $intrinsic));