prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>accelerator.py<|end_file_name|><|fim▁begin|>import numpy as _np
import lnls as _lnls
import pyaccel as _pyaccel
from . import lattice as _lattice
default_cavity_on = False
default_radiation_on = False
default_vchamber_on = False
def create_accelerator(optics_mode=_lattice.default_optics_mode, energy=_lattice.energy):
lattice = _lattice.create_lattice(optics_mode=optics_mode, energy=energy)
accelerator = _pyaccel.accelerator.Accelerator(
lattice=lattice,
energy=energy,
harmonic_number=_lattice.harmonic_number,<|fim▁hole|> radiation_on=default_radiation_on,
vchamber_on=default_vchamber_on
)
return accelerator
accelerator_data = dict()
accelerator_data['lattice_version'] = 'BO_V06_01'
accelerator_data['global_coupling'] = 0.0002 # expected corrected value
accelerator_data['pressure_profile'] = _np.array([[0, 496.8], [1.5e-8]*2]) # [s [m], p [mbar]]o
496.78745<|fim▁end|>
|
cavity_on=default_cavity_on,
|
<|file_name|>secrets.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package common<|fim▁hole|> "fmt"
"os"
"path"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/kubernetes/pkg/api/v1"
"k8s.io/kubernetes/pkg/util/uuid"
"k8s.io/kubernetes/test/e2e/framework"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = framework.KubeDescribe("Secrets", func() {
f := framework.NewDefaultFramework("secrets")
It("should be consumable from pods in volume [Conformance] [Volume]", func() {
doSecretE2EWithoutMapping(f, nil /* default mode */, "secret-test-"+string(uuid.NewUUID()), nil, nil)
})
It("should be consumable from pods in volume with defaultMode set [Conformance] [Volume]", func() {
defaultMode := int32(0400)
doSecretE2EWithoutMapping(f, &defaultMode, "secret-test-"+string(uuid.NewUUID()), nil, nil)
})
It("should be consumable from pods in volume as non-root with defaultMode and fsGroup set [Conformance] [Volume]", func() {
defaultMode := int32(0440) /* setting fsGroup sets mode to at least 440 */
fsGroup := int64(1001)
uid := int64(1000)
doSecretE2EWithoutMapping(f, &defaultMode, "secret-test-"+string(uuid.NewUUID()), &fsGroup, &uid)
})
It("should be consumable from pods in volume with mappings [Conformance] [Volume]", func() {
doSecretE2EWithMapping(f, nil)
})
It("should be consumable from pods in volume with mappings and Item Mode set [Conformance] [Volume]", func() {
mode := int32(0400)
doSecretE2EWithMapping(f, &mode)
})
It("should be able to mount in a volume regardless of a different secret existing with same name in different namespace [Volume]", func() {
var (
namespace2 *v1.Namespace
err error
secret2Name = "secret-test-" + string(uuid.NewUUID())
)
if namespace2, err = f.CreateNamespace("secret-namespace", nil); err != nil {
framework.Failf("unable to create new namespace %s: %v", namespace2.Name, err)
}
secret2 := secretForTest(namespace2.Name, secret2Name)
secret2.Data = map[string][]byte{
"this_should_not_match_content_of_other_secret": []byte("similarly_this_should_not_match_content_of_other_secret\n"),
}
if secret2, err = f.ClientSet.Core().Secrets(namespace2.Name).Create(secret2); err != nil {
framework.Failf("unable to create test secret %s: %v", secret2.Name, err)
}
doSecretE2EWithoutMapping(f, nil /* default mode */, secret2.Name, nil, nil)
})
It("should be consumable in multiple volumes in a pod [Conformance] [Volume]", func() {
// This test ensures that the same secret can be mounted in multiple
// volumes in the same pod. This test case exists to prevent
// regressions that break this use-case.
var (
name = "secret-test-" + string(uuid.NewUUID())
volumeName = "secret-volume"
volumeMountPath = "/etc/secret-volume"
volumeName2 = "secret-volume-2"
volumeMountPath2 = "/etc/secret-volume-2"
secret = secretForTest(f.Namespace.Name, name)
)
By(fmt.Sprintf("Creating secret with name %s", secret.Name))
var err error
if secret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(secret); err != nil {
framework.Failf("unable to create test secret %s: %v", secret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-secrets-" + string(uuid.NewUUID()),
},
Spec: v1.PodSpec{
Volumes: []v1.Volume{
{
Name: volumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: name,
},
},
},
{
Name: volumeName2,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: name,
},
},
},
},
Containers: []v1.Container{
{
Name: "secret-volume-test",
Image: "gcr.io/google_containers/mounttest:0.7",
Args: []string{
"--file_content=/etc/secret-volume/data-1",
"--file_mode=/etc/secret-volume/data-1"},
VolumeMounts: []v1.VolumeMount{
{
Name: volumeName,
MountPath: volumeMountPath,
ReadOnly: true,
},
{
Name: volumeName2,
MountPath: volumeMountPath2,
ReadOnly: true,
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
f.TestContainerOutput("consume secrets", pod, 0, []string{
"content of file \"/etc/secret-volume/data-1\": value-1",
"mode of file \"/etc/secret-volume/data-1\": -rw-r--r--",
})
})
It("optional updates should be reflected in volume [Conformance] [Volume]", func() {
// We may have to wait or a full sync period to elapse before the
// Kubelet projects the update into the volume and the container picks
// it up. This timeout is based on the default Kubelet sync period (1
// minute) plus additional time for fudge factor.
const podLogTimeout = 300 * time.Second
trueVal := true
volumeMountPath := "/etc/secret-volumes"
deleteName := "s-test-opt-del-" + string(uuid.NewUUID())
deleteContainerName := "dels-volume-test"
deleteVolumeName := "deletes-volume"
deleteSecret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: f.Namespace.Name,
Name: deleteName,
},
Data: map[string][]byte{
"data-1": []byte("value-1"),
},
}
updateName := "s-test-opt-upd-" + string(uuid.NewUUID())
updateContainerName := "upds-volume-test"
updateVolumeName := "updates-volume"
updateSecret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: f.Namespace.Name,
Name: updateName,
},
Data: map[string][]byte{
"data-1": []byte("value-1"),
},
}
createName := "s-test-opt-create-" + string(uuid.NewUUID())
createContainerName := "creates-volume-test"
createVolumeName := "creates-volume"
createSecret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: f.Namespace.Name,
Name: createName,
},
Data: map[string][]byte{
"data-1": []byte("value-1"),
},
}
By(fmt.Sprintf("Creating secret with name %s", deleteSecret.Name))
var err error
if deleteSecret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(deleteSecret); err != nil {
framework.Failf("unable to create test secret %s: %v", deleteSecret.Name, err)
}
By(fmt.Sprintf("Creating secret with name %s", updateSecret.Name))
if updateSecret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(updateSecret); err != nil {
framework.Failf("unable to create test secret %s: %v", updateSecret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-secrets-" + string(uuid.NewUUID()),
},
Spec: v1.PodSpec{
Volumes: []v1.Volume{
{
Name: deleteVolumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: deleteName,
Optional: &trueVal,
},
},
},
{
Name: updateVolumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: updateName,
Optional: &trueVal,
},
},
},
{
Name: createVolumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: createName,
Optional: &trueVal,
},
},
},
},
Containers: []v1.Container{
{
Name: deleteContainerName,
Image: "gcr.io/google_containers/mounttest:0.7",
Command: []string{"/mt", "--break_on_expected_content=false", "--retry_time=120", "--file_content_in_loop=/etc/secret-volumes/delete/data-1"},
VolumeMounts: []v1.VolumeMount{
{
Name: deleteVolumeName,
MountPath: path.Join(volumeMountPath, "delete"),
ReadOnly: true,
},
},
},
{
Name: updateContainerName,
Image: "gcr.io/google_containers/mounttest:0.7",
Command: []string{"/mt", "--break_on_expected_content=false", "--retry_time=120", "--file_content_in_loop=/etc/secret-volumes/update/data-3"},
VolumeMounts: []v1.VolumeMount{
{
Name: updateVolumeName,
MountPath: path.Join(volumeMountPath, "update"),
ReadOnly: true,
},
},
},
{
Name: createContainerName,
Image: "gcr.io/google_containers/mounttest:0.7",
Command: []string{"/mt", "--break_on_expected_content=false", "--retry_time=120", "--file_content_in_loop=/etc/secret-volumes/create/data-1"},
VolumeMounts: []v1.VolumeMount{
{
Name: createVolumeName,
MountPath: path.Join(volumeMountPath, "create"),
ReadOnly: true,
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
By("Creating the pod")
f.PodClient().CreateSync(pod)
pollCreateLogs := func() (string, error) {
return framework.GetPodLogs(f.ClientSet, f.Namespace.Name, pod.Name, createContainerName)
}
Eventually(pollCreateLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("Error reading file /etc/secret-volumes/create/data-1"))
pollUpdateLogs := func() (string, error) {
return framework.GetPodLogs(f.ClientSet, f.Namespace.Name, pod.Name, updateContainerName)
}
Eventually(pollUpdateLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("Error reading file /etc/secret-volumes/update/data-3"))
pollDeleteLogs := func() (string, error) {
return framework.GetPodLogs(f.ClientSet, f.Namespace.Name, pod.Name, deleteContainerName)
}
Eventually(pollDeleteLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("value-1"))
By(fmt.Sprintf("Deleting secret %v", deleteSecret.Name))
err = f.ClientSet.Core().Secrets(f.Namespace.Name).Delete(deleteSecret.Name, &v1.DeleteOptions{})
Expect(err).NotTo(HaveOccurred(), "Failed to delete secret %q in namespace %q", deleteSecret.Name, f.Namespace.Name)
By(fmt.Sprintf("Updating secret %v", updateSecret.Name))
updateSecret.ResourceVersion = "" // to force update
delete(updateSecret.Data, "data-1")
updateSecret.Data["data-3"] = []byte("value-3")
_, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Update(updateSecret)
Expect(err).NotTo(HaveOccurred(), "Failed to update secret %q in namespace %q", updateSecret.Name, f.Namespace.Name)
By(fmt.Sprintf("Creating secret with name %s", createSecret.Name))
if createSecret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(createSecret); err != nil {
framework.Failf("unable to create test secret %s: %v", createSecret.Name, err)
}
By("waiting to observe update in volume")
Eventually(pollCreateLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("value-1"))
Eventually(pollUpdateLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("value-3"))
Eventually(pollDeleteLogs, podLogTimeout, framework.Poll).Should(ContainSubstring("Error reading file /etc/secret-volumes/delete/data-1"))
})
It("should be consumable from pods in env vars [Conformance]", func() {
name := "secret-test-" + string(uuid.NewUUID())
secret := secretForTest(f.Namespace.Name, name)
By(fmt.Sprintf("Creating secret with name %s", secret.Name))
var err error
if secret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(secret); err != nil {
framework.Failf("unable to create test secret %s: %v", secret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-secrets-" + string(uuid.NewUUID()),
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "secret-env-test",
Image: "gcr.io/google_containers/busybox:1.24",
Command: []string{"sh", "-c", "env"},
Env: []v1.EnvVar{
{
Name: "SECRET_DATA",
ValueFrom: &v1.EnvVarSource{
SecretKeyRef: &v1.SecretKeySelector{
LocalObjectReference: v1.LocalObjectReference{
Name: name,
},
Key: "data-1",
},
},
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
f.TestContainerOutput("consume secrets", pod, 0, []string{
"SECRET_DATA=value-1",
})
})
It("should be consumable via the environment [Conformance]", func() {
name := "secret-test-" + string(uuid.NewUUID())
secret := newEnvFromSecret(f.Namespace.Name, name)
By(fmt.Sprintf("creating secret %v/%v", f.Namespace.Name, secret.Name))
var err error
if secret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(secret); err != nil {
framework.Failf("unable to create test secret %s: %v", secret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-configmaps-" + string(uuid.NewUUID()),
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "env-test",
Image: "gcr.io/google_containers/busybox:1.24",
Command: []string{"sh", "-c", "env"},
EnvFrom: []v1.EnvFromSource{
{
SecretRef: &v1.SecretEnvSource{LocalObjectReference: v1.LocalObjectReference{Name: name}},
},
{
Prefix: "p_",
SecretRef: &v1.SecretEnvSource{LocalObjectReference: v1.LocalObjectReference{Name: name}},
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
f.TestContainerOutput("consume secrets", pod, 0, []string{
"data_1=value-1", "data_2=value-2", "data_3=value-3",
"p_data_1=value-1", "p_data_2=value-2", "p_data_3=value-3",
})
})
})
func newEnvFromSecret(namespace, name string) *v1.Secret {
return &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: namespace,
Name: name,
},
Data: map[string][]byte{
"data_1": []byte("value-1\n"),
"data_2": []byte("value-2\n"),
"data_3": []byte("value-3\n"),
},
}
}
func secretForTest(namespace, name string) *v1.Secret {
return &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: namespace,
Name: name,
},
Data: map[string][]byte{
"data-1": []byte("value-1\n"),
"data-2": []byte("value-2\n"),
"data-3": []byte("value-3\n"),
},
}
}
func doSecretE2EWithoutMapping(f *framework.Framework, defaultMode *int32, secretName string, fsGroup *int64, uid *int64) {
var (
volumeName = "secret-volume"
volumeMountPath = "/etc/secret-volume"
secret = secretForTest(f.Namespace.Name, secretName)
)
By(fmt.Sprintf("Creating secret with name %s", secret.Name))
var err error
if secret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(secret); err != nil {
framework.Failf("unable to create test secret %s: %v", secret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-secrets-" + string(uuid.NewUUID()),
Namespace: f.Namespace.Name,
},
Spec: v1.PodSpec{
Volumes: []v1.Volume{
{
Name: volumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: secretName,
},
},
},
},
Containers: []v1.Container{
{
Name: "secret-volume-test",
Image: "gcr.io/google_containers/mounttest:0.7",
Args: []string{
"--file_content=/etc/secret-volume/data-1",
"--file_mode=/etc/secret-volume/data-1"},
VolumeMounts: []v1.VolumeMount{
{
Name: volumeName,
MountPath: volumeMountPath,
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
if defaultMode != nil {
pod.Spec.Volumes[0].VolumeSource.Secret.DefaultMode = defaultMode
} else {
mode := int32(0644)
defaultMode = &mode
}
if fsGroup != nil || uid != nil {
pod.Spec.SecurityContext = &v1.PodSecurityContext{
FSGroup: fsGroup,
RunAsUser: uid,
}
}
modeString := fmt.Sprintf("%v", os.FileMode(*defaultMode))
expectedOutput := []string{
"content of file \"/etc/secret-volume/data-1\": value-1",
"mode of file \"/etc/secret-volume/data-1\": " + modeString,
}
f.TestContainerOutput("consume secrets", pod, 0, expectedOutput)
}
func doSecretE2EWithMapping(f *framework.Framework, mode *int32) {
var (
name = "secret-test-map-" + string(uuid.NewUUID())
volumeName = "secret-volume"
volumeMountPath = "/etc/secret-volume"
secret = secretForTest(f.Namespace.Name, name)
)
By(fmt.Sprintf("Creating secret with name %s", secret.Name))
var err error
if secret, err = f.ClientSet.Core().Secrets(f.Namespace.Name).Create(secret); err != nil {
framework.Failf("unable to create test secret %s: %v", secret.Name, err)
}
pod := &v1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: "pod-secrets-" + string(uuid.NewUUID()),
},
Spec: v1.PodSpec{
Volumes: []v1.Volume{
{
Name: volumeName,
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{
SecretName: name,
Items: []v1.KeyToPath{
{
Key: "data-1",
Path: "new-path-data-1",
},
},
},
},
},
},
Containers: []v1.Container{
{
Name: "secret-volume-test",
Image: "gcr.io/google_containers/mounttest:0.7",
Args: []string{
"--file_content=/etc/secret-volume/new-path-data-1",
"--file_mode=/etc/secret-volume/new-path-data-1"},
VolumeMounts: []v1.VolumeMount{
{
Name: volumeName,
MountPath: volumeMountPath,
},
},
},
},
RestartPolicy: v1.RestartPolicyNever,
},
}
if mode != nil {
pod.Spec.Volumes[0].VolumeSource.Secret.Items[0].Mode = mode
} else {
defaultItemMode := int32(0644)
mode = &defaultItemMode
}
modeString := fmt.Sprintf("%v", os.FileMode(*mode))
expectedOutput := []string{
"content of file \"/etc/secret-volume/new-path-data-1\": value-1",
"mode of file \"/etc/secret-volume/new-path-data-1\": " + modeString,
}
f.TestContainerOutput("consume secrets", pod, 0, expectedOutput)
}<|fim▁end|>
|
import (
|
<|file_name|>oadm_router.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
import atexit
import json
import os
import shutil
import subprocess
import re
import yaml
# This is here because of a bug that causes yaml
# to incorrectly handle timezone info on timestamps
def timestamp_constructor(_, node):
'''return timestamps as strings'''
return str(node.value)
yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0])
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([not change[0] for change in changes]):
return {'returncode': 0, 'updated': False}
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname):
'''return all pods '''
return self.openshift_cmd(['delete', resource, rname, '-n', self.namespace])
def _get(self, resource, rname=None):
'''return a secret by name '''
cmd = ['get', resource, '-o', 'json', '-n', self.namespace]
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json'):
'''Base command for oc '''
#cmds = ['/usr/bin/oc', '--config', self.kubeconfig]
cmds = []
if oadm:
cmds = ['/usr/bin/oadm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
proc.wait()
stdout = proc.stdout.read()
stderr = proc.stderr.read()
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
print
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def create_file(rname, data, ftype=None):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.safe_dump(data, default_flow_style=False))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def create_files_from_contents(data):
'''Turn an array of dict: filename, content into a files array'''
files = []
for sfile in data:
path = Utils.create_file(sfile['path'], sfile['content'])
files.append(path)
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if result.has_key('metadata') and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
contents = yaml.safe_load(contents)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if not isinstance(user_def[key], list):
if debug:
print 'user_def[key] is not a list'
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print 'sending list - list'
print type(values[0])
print type(values[1])
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print 'list compare returned false'
return False
elif value != user_def[key]:
if debug:
print 'value should be identical'
print value
print user_def[key]
return False
# recurse on a dictionary
elif isinstance(value, dict):
if not isinstance(user_def[key], dict):
if debug:
print "dict returned false not instance of dict"
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print api_values
print user_values
print "keys are not equal in dict"
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print "dict returned false"
print result
return False
# Verify each key, value pair is the same
else:
if not user_def.has_key(key) or value != user_def[key]:
if debug:
print "value not equal; user_def does not have key"
print value
print user_def[key]
return False
return True
class YeditException(Exception):
''' Exception class for Yedit '''
pass
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([a-zA-Z-./]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([a-zA-Z-./]+)"
def __init__(self, filename=None, content=None, content_type='yaml'):
self.content = content
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
if self.filename and not self.content:
self.load(content_type=self.content_type)
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def remove_entry(data, key):
''' remove data at location key '''
if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
return None
key_indexes = re.findall(Yedit.re_key, key)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
return None
curr_data = data
key_indexes = re.findall(Yedit.re_key, key)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and data.has_key(dict_key):
data = data[dict_key]
continue
data[dict_key] = {}
data = data[dict_key]
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for add
# expected list entry
if key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
return curr_data
@staticmethod
def get_entry(data, key):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if not (key and re.match(Yedit.re_valid_key, key) and isinstance(data, (list, dict))):
return None
key_indexes = re.findall(Yedit.re_key, key)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
return data
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
with open(self.filename, 'w') as yfd:
yfd.write(yaml.safe_dump(self.yaml_dict, default_flow_style=False))
def read(self):
''' write to file '''
# check if it exists
if not self.exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents:
return None
# check if it is yaml
try:<|fim▁hole|> self.yaml_dict = json.loads(contents)
except yaml.YAMLError as _:
# Error loading yaml or json
return None
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key)
except KeyError as _:
entry = None
return entry
def delete(self, key):
''' remove key from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, key)
except KeyError as _:
entry = None
if not entry:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, key)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def put(self, key, value):
''' put key, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, key)
except KeyError as _:
entry = None
if entry == value:
return (False, self.yaml_dict)
result = Yedit.add_entry(self.yaml_dict, key, value)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def create(self, key, value):
''' create a yaml file '''
if not self.exists():
self.yaml_dict = {key: value}
return (True, self.yaml_dict)
return (False, self.yaml_dict)
import time
class RouterConfig(object):
''' RouterConfig is a DTO for the router. '''
def __init__(self, rname, kubeconfig, router_options):
self.name = rname
self.kubeconfig = kubeconfig
self._router_options = router_options
@property
def router_options(self):
''' return router options '''
return self._router_options
def to_option_list(self):
''' return all options as a string'''
return RouterConfig.stringify(self.router_options)
@staticmethod
def stringify(options):
''' return hash as list of key value pairs '''
rval = []
for key, data in options.items():
if data['include'] and data['value']:
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
class Router(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
def __init__(self,
router_config,
verbose=False):
''' Constructor for OpenshiftOC
a router consists of 3 or more parts
- dc/router
- svc/router
- endpoint/router
'''
super(Router, self).__init__('default', router_config.kubeconfig, verbose)
self.rconfig = router_config
self.verbose = verbose
self.router_parts = [{'kind': 'dc', 'name': self.rconfig.name},
{'kind': 'svc', 'name': self.rconfig.name},
#{'kind': 'endpoints', 'name': self.rconfig.name},
]
def get(self, filter_kind=None):
''' return the self.router_parts '''
rparts = self.router_parts
parts = []
if filter_kind:
rparts = [part for part in self.router_parts if filter_kind == part['kind']]
for part in rparts:
parts.append(self._get(part['kind'], rname=part['name']))
return parts
def exists(self):
'''return a deploymentconfig by name '''
parts = self.get()
for part in parts:
if part['returncode'] != 0:
return False
return True
def delete(self):
'''return all pods '''
parts = []
for part in self.router_parts:
parts.append(self._delete(part['kind'], part['name']))
return parts
def create(self, dryrun=False, output=False, output_type='json'):
'''Create a deploymentconfig '''
# We need to create the pem file
router_pem = '/tmp/router.pem'
with open(router_pem, 'w') as rfd:
rfd.write(open(self.rconfig.router_options['cert_file']['value']).read())
rfd.write(open(self.rconfig.router_options['key_file']['value']).read())
atexit.register(Utils.cleanup, [router_pem])
self.rconfig.router_options['default_cert']['value'] = router_pem
options = self.rconfig.to_option_list()
cmd = ['router']
cmd.extend(options)
if dryrun:
cmd.extend(['--dry-run=True', '-o', 'json'])
results = self.openshift_cmd(cmd, oadm=True, output=output, output_type=output_type)
return results
def update(self):
'''run update for the router. This performs a delete and then create '''
parts = self.delete()
if any([part['returncode'] != 0 for part in parts]):
return parts
# Ugly built in sleep here.
time.sleep(15)
return self.create()
def needs_update(self, verbose=False):
''' check to see if we need to update '''
dc_inmem = self.get(filter_kind='dc')[0]
if dc_inmem['returncode'] != 0:
return dc_inmem
user_dc = self.create(dryrun=True, output=True, output_type='raw')
if user_dc['returncode'] != 0:
return user_dc
# Since the output from oadm_router is returned as raw
# we need to parse it. The first line is the stats_password
user_dc_results = user_dc['results'].split('\n')
# stats_password = user_dc_results[0]
# Load the string back into json and get the newly created dc
user_dc = json.loads('\n'.join(user_dc_results[1:]))['items'][0]
# Router needs some exceptions.
# We do not want to check the autogenerated password for stats admin
if not self.rconfig.router_options['stats_password']['value']:
for idx, env_var in enumerate(user_dc['spec']['template']['spec']['containers'][0]['env']):
if env_var['name'] == 'STATS_PASSWORD':
env_var['value'] = \
dc_inmem['results'][0]['spec']['template']['spec']['containers'][0]['env'][idx]['value']
# dry-run doesn't add the protocol to the ports section. We will manually do that.
for idx, port in enumerate(user_dc['spec']['template']['spec']['containers'][0]['ports']):
if not port.has_key('protocol'):
port['protocol'] = 'TCP'
# These are different when generating
skip = ['dnsPolicy',
'terminationGracePeriodSeconds',
'restartPolicy', 'timeoutSeconds',
'livenessProbe', 'readinessProbe',
'terminationMessagePath',
'rollingParams',
]
return not Utils.check_def_equal(user_dc, dc_inmem['results'][0], skip_keys=skip, debug=verbose)
def main():
'''
ansible oc module for secrets
'''
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', type='str',
choices=['present', 'absent']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
name=dict(default='router', type='str'),
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
credentials=dict(default='/etc/origin/master/openshift-router.kubeconfig', type='str'),
cert_file=dict(default=None, type='str'),
key_file=dict(default=None, type='str'),
image=dict(default=None, type='str'), #'openshift3/ose-${component}:${version}'
latest_image=dict(default=False, type='bool'),
labels=dict(default=None, type='list'),
ports=dict(default=['80:80', '443:443'], type='list'),
replicas=dict(default=1, type='int'),
selector=dict(default=None, type='str'),
service_account=dict(default='router', type='str'),
router_type=dict(default='haproxy-router', type='str'),
host_network=dict(default=True, type='bool'),
# external host options
external_host=dict(default=None, type='str'),
external_host_vserver=dict(default=None, type='str'),
external_host_insecure=dict(default=False, type='bool'),
external_host_partition_path=dict(default=None, type='str'),
external_host_username=dict(default=None, type='str'),
external_host_password=dict(default=None, type='str'),
external_host_private_key=dict(default=None, type='str'),
# Metrics
expose_metrics=dict(default=False, type='bool'),
metrics_image=dict(default=None, type='str'),
# Stats
stats_user=dict(default=None, type='str'),
stats_password=dict(default=None, type='str'),
stats_port=dict(default=1936, type='int'),
),
mutually_exclusive=[["router_type", "images"]],
supports_check_mode=True,
)
rconfig = RouterConfig(module.params['name'],
module.params['kubeconfig'],
{'credentials': {'value': module.params['credentials'], 'include': True},
'default_cert': {'value': None, 'include': True},
'cert_file': {'value': module.params['cert_file'], 'include': False},
'key_file': {'value': module.params['key_file'], 'include': False},
'image': {'value': module.params['image'], 'include': True},
'latest_image': {'value': module.params['latest_image'], 'include': True},
'labels': {'value': module.params['labels'], 'include': True},
'ports': {'value': ','.join(module.params['ports']), 'include': True},
'replicas': {'value': module.params['replicas'], 'include': True},
'selector': {'value': module.params['selector'], 'include': True},
'service_account': {'value': module.params['service_account'], 'include': True},
'router_type': {'value': module.params['router_type'], 'include': False},
'host_network': {'value': module.params['host_network'], 'include': True},
'external_host': {'value': module.params['external_host'], 'include': True},
'external_host_vserver': {'value': module.params['external_host_vserver'],
'include': True},
'external_host_insecure': {'value': module.params['external_host_insecure'],
'include': True},
'external_host_partition_path': {'value': module.params['external_host_partition_path'],
'include': True},
'external_host_username': {'value': module.params['external_host_username'],
'include': True},
'external_host_password': {'value': module.params['external_host_password'],
'include': True},
'external_host_private_key': {'value': module.params['external_host_private_key'],
'include': True},
'expose_metrics': {'value': module.params['expose_metrics'], 'include': True},
'metrics_image': {'value': module.params['metrics_image'], 'include': True},
'stats_user': {'value': module.params['stats_user'], 'include': True},
'stats_password': {'value': module.params['stats_password'], 'include': True},
'stats_port': {'value': module.params['stats_port'], 'include': True},
})
ocrouter = Router(rconfig)
state = module.params['state']
########
# Delete
########
if state == 'absent':
if not ocrouter.exists():
module.exit_json(changed=False, state="absent")
if module.check_mode:
module.exit_json(change=False, msg='Would have performed a delete.')
api_rval = ocrouter.delete()
module.exit_json(changed=True, results=api_rval, state="absent")
if state == 'present':
########
# Create
########
if not ocrouter.exists():
if module.check_mode:
module.exit_json(change=False, msg='Would have performed a create.')
api_rval = ocrouter.create()
module.exit_json(changed=True, results=api_rval, state="present")
########
# Update
########
if not ocrouter.needs_update():
module.exit_json(changed=False, state="present")
if module.check_mode:
module.exit_json(change=False, msg='Would have performed an update.')
api_rval = ocrouter.update()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()<|fim▁end|>
|
if content_type == 'yaml':
self.yaml_dict = yaml.load(contents)
elif content_type == 'json':
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>#![allow(unused_imports, dead_code, unused_variables)]
#[cfg(feature = "pkg-config")]
extern crate pkg_config;
#[cfg(feature = "bindgen")]
extern crate bindgen;
#[cfg(feature="bundled")]
extern crate cmake;
#[cfg(feature="bundled")]
extern crate tar;
#[cfg(feature="bundled")]
extern crate flate2;
#[cfg(feature="bundled")]
extern crate unidiff;
#[macro_use]
extern crate cfg_if;
use std::path::{Path, PathBuf};
use std::{io, fs, env};
// corresponds to the headers that we have in sdl2-sys/SDL2-{version}
const SDL2_HEADERS_BUNDLED_VERSION: &str = "2.0.10";
// means the lastest stable version that can be downloaded from SDL2's source
const LASTEST_SDL2_VERSION: &str = "2.0.10";
#[cfg(feature = "bindgen")]
macro_rules! add_msvc_includes_to_bindings {
($bindings:expr) => {
$bindings = $bindings.clang_arg(format!("-IC:/Program Files (x86)/Windows Kits/8.1/Include/shared"));
$bindings = $bindings.clang_arg(format!("-IC:/Program Files/LLVM/lib/clang/5.0.0/include"));
$bindings = $bindings.clang_arg(format!("-IC:/Program Files (x86)/Windows Kits/10/Include/10.0.10240.0/ucrt"));
$bindings = $bindings.clang_arg(format!("-IC:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include"));
$bindings = $bindings.clang_arg(format!("-IC:/Program Files (x86)/Windows Kits/8.1/Include/um"));
};
}
fn get_bundled_header_path() -> PathBuf {
let mut include_path: PathBuf = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
include_path.push(format!("SDL2-{}", SDL2_HEADERS_BUNDLED_VERSION));
include_path.push("include");
include_path
}
#[cfg(feature = "bundled")]
fn run_command(cmd: &str, args: &[&str]) {
use std::process::Command;
match Command::new(cmd).args(args).output() {
Ok(output) => {
if !output.status.success() {
let error = std::str::from_utf8(&output.stderr).unwrap();
panic!("Command '{}' failed: {}", cmd, error);
}
}
Err(error) => {
panic!("Error running command '{}': {:#}", cmd, error);
}
}
}
#[cfg(feature = "bundled")]
fn download_to(url: &str, dest: &str) {
if cfg!(windows) {
run_command("powershell", &[
"-NoProfile", "-NonInteractive",
"-Command", &format!("& {{
$client = New-Object System.Net.WebClient
$client.DownloadFile(\"{0}\", \"{1}\")
if (!$?) {{ Exit 1 }}
}}", url, dest).as_str()
]);
} else {
run_command("curl", &[url, "-o", dest]);
}
}
#[cfg(feature = "use-pkgconfig")]
fn pkg_config_print(statik: bool, lib_name: &str) {
pkg_config::Config::new()
.statik(statik)
.probe(lib_name).unwrap();
}
#[cfg(feature = "use-pkgconfig")]
fn get_pkg_config() {
let statik: bool = if cfg!(feature = "static-link") { true } else { false };
pkg_config_print(statik, "sdl2");
if cfg!(feature = "image") {
pkg_config_print(statik, "SDL2_image");
}
if cfg!(feature = "ttf") {
pkg_config_print(statik, "SDL2_ttf");
}
if cfg!(feature = "mixer") {
pkg_config_print(statik, "SDL2_mixer");
}
if cfg!(feature = "gfx") {
pkg_config_print(statik, "SDL2_gfx");
}
}
// returns the location of the downloaded source
#[cfg(feature = "bundled")]
fn download_sdl2() -> PathBuf {
let out_dir = env::var("OUT_DIR").unwrap();
let sdl2_archive_name = format!("SDL2-{}.tar.gz", LASTEST_SDL2_VERSION);
let sdl2_archive_url = format!("https://libsdl.org/release/{}", sdl2_archive_name);
let sdl2_archive_path = Path::new(&out_dir).join(sdl2_archive_name);
let sdl2_build_path = Path::new(&out_dir).join(format!("SDL2-{}", LASTEST_SDL2_VERSION));
// avoid re-downloading the archive if it already exists
if !sdl2_archive_path.exists() {
download_to(&sdl2_archive_url, sdl2_archive_path.to_str().unwrap());
}
let reader = flate2::read::GzDecoder::new(
fs::File::open(&sdl2_archive_path).unwrap()
);
let mut ar = tar::Archive::new(reader);
ar.unpack(&out_dir).unwrap();
sdl2_build_path
}
// apply patches to sdl2 source
#[cfg(feature = "bundled")]
fn patch_sdl2(sdl2_source_path: &Path) {
// vector of <(patch_file_name, patch_file_contents)>
let patches: Vec<(&str, &'static str)> = vec![
// No patches at this time. If needed, add them like this:
// ("SDL-2.x.y-filename.patch", include_str!("patches/SDL-2.x.y-filename.patch")),
("SDL2-2.0.10-CMakeLists.txt.patch", include_str!("patches/SDL2-2.0.10-CMakeLists.txt.patch")),
];
let sdl_version = format!("SDL2-{}", LASTEST_SDL2_VERSION);
for patch in &patches {
// Only apply patches whose file name is prefixed with the currently
// targeted version of SDL2.
if !patch.0.starts_with(&sdl_version) {
continue;
}
let mut patch_set = unidiff::PatchSet::new();
patch_set.parse(patch.1).expect("Error parsing diff");
// For every modified file, copy the existing file to <file_name>_old,
// open a new copy of <file_name>. and fill the new file with a
// combination of the unmodified contents, and the patched sections.
// TOOD: This code is untested (save for the immediate application), and
// probably belongs in the unidiff (or similar) package.
for modified_file in patch_set.modified_files() {
use std::io::{Write, BufRead};
let file_path = sdl2_source_path.join(modified_file.path());
let old_path = sdl2_source_path.join(format!("{}_old", modified_file.path()));
fs::rename(&file_path, &old_path)
.expect(&format!(
"Rename of {} to {} failed",
file_path.to_string_lossy(),
old_path.to_string_lossy()));
let dst_file = fs::File::create(file_path).unwrap();
let mut dst_buf = io::BufWriter::new(dst_file);
let old_file = fs::File::open(old_path).unwrap();
let mut old_buf = io::BufReader::new(old_file);
let mut cursor = 0;
for (i, hunk) in modified_file.into_iter().enumerate() {
// Write old lines from cursor to the start of this hunk.
let num_lines = hunk.source_start - cursor - 1;
for _ in 0..num_lines {
let mut line = String::new();
old_buf.read_line(&mut line).unwrap();
dst_buf.write_all(line.as_bytes()).unwrap();
}
cursor += num_lines;
// Skip lines in old_file, and verify that what we expect to
// replace is present in the old_file.
for expected_line in hunk.source_lines() {
let mut actual_line = String::new();
old_buf.read_line(&mut actual_line).unwrap();
actual_line.pop(); // Remove the trailing newline.
if expected_line.value.trim_end() != actual_line {
panic!("Can't apply patch; mismatch between expected and actual in hunk {}", i);
}
}
cursor += hunk.source_length;
// Write the new lines into the destination.
for line in hunk.target_lines() {
dst_buf.write_all(line.value.as_bytes()).unwrap();
dst_buf.write_all(b"\n").unwrap();
}
}
// Write all remaining lines from the old file into the new.
for line in old_buf.lines() {
dst_buf.write_all(&line.unwrap().into_bytes()).unwrap();
dst_buf.write_all(b"\n").unwrap();
}
}
// For every removed file, simply delete the original.
// TODO: This is entirely untested code. There are likely bugs here, and
// this really should be part of the unidiff library, not a function
// defined here. Hopefully this gets moved somewhere else before it
// bites someone.
for removed_file in patch_set.removed_files() {
fs::remove_file(sdl2_source_path.join(removed_file.path()))
.expect(
&format!("Failed to remove file {} from {}",
removed_file.path(),
sdl2_source_path.to_string_lossy()));
}
// For every new file, copy the entire contents of the patched file into
// a newly created <file_name>.
// TODO: This is entirely untested code. There are likely bugs here, and
// this really should be part of the unidiff library, not a function
// defined here. Hopefully this gets moved somewhere else before it
// bites someone.
for added_file in patch_set.added_files() {
use std::io::Write;
// This should be superfluous. I don't know how a new file would
// ever have more than one hunk.
assert!(added_file.len() == 1);
let file_path = sdl2_source_path.join(added_file.path());
let dst_file = fs::File::create(&file_path)
.expect(&format!(
"Failed to create file {}",
file_path.to_string_lossy()));
let mut dst_buf = io::BufWriter::new(&dst_file);
for line in added_file.into_iter().nth(0).unwrap().target_lines() {
dst_buf.write_all(line.value.as_bytes()).unwrap();
dst_buf.write_all(b"\n").unwrap();
}
}
}
}
// compile a shared or static lib depending on the feature
#[cfg(feature = "bundled")]
fn compile_sdl2(sdl2_build_path: &Path, target_os: &str) -> PathBuf {
let mut cfg = cmake::Config::new(sdl2_build_path);
cfg.profile("release");
if target_os == "windows-gnu" {
cfg.define("VIDEO_OPENGLES", "OFF");
}
if cfg!(feature = "static-link") {
cfg.define("SDL_SHARED", "OFF");
cfg.define("SDL_STATIC", "ON");
} else {
cfg.define("SDL_SHARED", "ON");
cfg.define("SDL_STATIC", "OFF");
}
cfg.build()
}
#[cfg(not(feature = "bundled"))]
fn compute_include_paths() -> Vec<String> {
let mut include_paths: Vec<String> = vec!();
if let Ok(include_path) = env::var("SDL2_INCLUDE_PATH") {
include_paths.push(format!("{}", include_path));
};
#[cfg(feature = "pkg-config")] {
// don't print the "cargo:xxx" directives, we're just trying to get the include paths here
let pkg_config_library = pkg_config::Config::new().print_system_libs(false).probe("sdl2").unwrap();
for path in pkg_config_library.include_paths {
include_paths.push(format!("{}", path.display()));
};
}
include_paths
}
fn link_sdl2(target_os: &str) {
#[cfg(all(feature = "use-pkgconfig", not(feature = "bundled")))] {
// prints the appropriate linking parameters when using pkg-config
// useless when using "bundled"
get_pkg_config();
}
#[cfg(not(feature = "static-link"))] {
if target_os == "ios" {
// iOS requires additional linking to function properly
println!("cargo:rustc-flags=-l framework=AVFoundation");
println!("cargo:rustc-flags=-l framework=AudioToolbox");
println!("cargo:rustc-flags=-l framework=CoreAudio");
println!("cargo:rustc-flags=-l framework=CoreGraphics");
println!("cargo:rustc-flags=-l framework=CoreMotion");
println!("cargo:rustc-flags=-l framework=Foundation");
println!("cargo:rustc-flags=-l framework=GameController");
println!("cargo:rustc-flags=-l framework=OpenGLES");
println!("cargo:rustc-flags=-l framework=QuartzCore");
println!("cargo:rustc-flags=-l framework=UIKit");
}
// pkg-config automatically prints this output when probing,
// however pkg_config isn't used with the feature "bundled"
if cfg!(feature = "bundled") || cfg!(not(feature = "use-pkgconfig")) {
if cfg!(feature = "use_mac_framework") && target_os == "darwin" {
println!("cargo:rustc-flags=-l framework=SDL2");
} else if target_os != "emscripten" {
println!("cargo:rustc-flags=-l SDL2");
}
}
}
#[cfg(feature = "static-link")] {
if cfg!(feature = "bundled") || cfg!(feature = "use-pkgconfig") == false {
println!("cargo:rustc-link-lib=static=SDL2main");
println!("cargo:rustc-link-lib=static=SDL2");
}
// Also linked to any required libraries for each supported platform
if target_os.contains("windows") {
println!("cargo:rustc-link-lib=shell32");
println!("cargo:rustc-link-lib=user32");
println!("cargo:rustc-link-lib=gdi32");
println!("cargo:rustc-link-lib=winmm");
println!("cargo:rustc-link-lib=imm32");
println!("cargo:rustc-link-lib=ole32");
println!("cargo:rustc-link-lib=oleaut32");
println!("cargo:rustc-link-lib=version");
println!("cargo:rustc-link-lib=uuid");
println!("cargo:rustc-link-lib=dinput8");
println!("cargo:rustc-link-lib=dxguid");
println!("cargo:rustc-link-lib=setupapi");
} else if target_os.contains("linux") {
println!("cargo:rustc-link-lib=sndio");
} else if target_os == "darwin" {
println!("cargo:rustc-link-lib=framework=Cocoa");
println!("cargo:rustc-link-lib=framework=IOKit");
println!("cargo:rustc-link-lib=framework=Carbon");
println!("cargo:rustc-link-lib=framework=ForceFeedback");
println!("cargo:rustc-link-lib=framework=CoreVideo");
println!("cargo:rustc-link-lib=framework=CoreAudio");
println!("cargo:rustc-link-lib=framework=AudioToolbox");
println!("cargo:rustc-link-lib=iconv");
} else {
// TODO: Add other platform linker options here.
}
}
// SDL libraries seem to not be packed with pkgconfig file on all distros,
// and in the same distros (fedora at least) a symlink is also missing.
//
// Linking directly with file is not possible with cargo since the
// ':filename' syntax is used for renaming of libraries, which basically
// leaves it up to the user to make a symlink to the shared object so
// -lSDL2_mixer can find it.
#[cfg(all(not(feature = "use-pkgconfig"), not(feature = "static-link")))] {
if cfg!(feature = "mixer") {
if target_os.contains("linux") || target_os.contains("freebsd") || target_os.contains("openbsd") {
println!("cargo:rustc-flags=-l SDL2_mixer");
} else if target_os.contains("windows") {
println!("cargo:rustc-flags=-l SDL2_mixer");
} else if target_os.contains("darwin") {
if cfg!(any(mac_framework, feature="use_mac_framework")) {
println!("cargo:rustc-flags=-l framework=SDL2_mixer");
} else {
println!("cargo:rustc-flags=-l SDL2_mixer");
}
}
}
if cfg!(feature = "image") {
if target_os.contains("linux") || target_os.contains("freebsd") || target_os.contains("openbsd") {
println!("cargo:rustc-flags=-l SDL2_image");
} else if target_os.contains("windows") {
println!("cargo:rustc-flags=-l SDL2_image");
} else if target_os.contains("darwin") {
if cfg!(any(mac_framework, feature="use_mac_framework")) {
println!("cargo:rustc-flags=-l framework=SDL2_image");
} else {
println!("cargo:rustc-flags=-l SDL2_image");
}
}
}
if cfg!(feature = "ttf") {
if target_os.contains("linux") || target_os.contains("freebsd") || target_os.contains("openbsd") {
println!("cargo:rustc-flags=-l SDL2_ttf");
} else if target_os.contains("windows") {
println!("cargo:rustc-flags=-l SDL2_ttf");
} else if target_os.contains("darwin") {
if cfg!(any(mac_framework, feature="use_mac_framework")) {
println!("cargo:rustc-flags=-l framework=SDL2_ttf");
} else {
println!("cargo:rustc-flags=-l SDL2_ttf");
}
}
}
if cfg!(feature = "gfx") {
if target_os.contains("linux") || target_os.contains("freebsd") || target_os.contains("openbsd") {
println!("cargo:rustc-flags=-l SDL2_gfx");
} else if target_os.contains("windows") {
println!("cargo:rustc-flags=-l SDL2_gfx");
} else if target_os.contains("darwin") {
if cfg!(any(mac_framework, feature="use_mac_framework")) {
println!("cargo:rustc-flags=-l framework=SDL2_gfx");
} else {
println!("cargo:rustc-flags=-l SDL2_gfx");
}
}
}
}
}
fn find_cargo_target_dir() -> PathBuf {
// Infer the top level cargo target dir from the OUT_DIR by searching
// upwards until we get to $CARGO_TARGET_DIR/build/ (which is always one
// level up from the deepest directory containing our package name)
let pkg_name = env::var("CARGO_PKG_NAME").unwrap();
let mut out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
loop {
{
let final_path_segment = out_dir.file_name().unwrap();
if final_path_segment.to_string_lossy().contains(&pkg_name) {
break;
}
}
if !out_dir.pop() {
panic!("Malformed build path: {}", out_dir.to_string_lossy());
}
}
out_dir.pop();
out_dir.pop();
out_dir
}
fn copy_dynamic_libraries(sdl2_compiled_path: &PathBuf, target_os: &str) {
// Windows binaries do not embed library search paths, so successfully
// linking the DLL isn't sufficient to find it at runtime -- it must be
// either on PATH or in the current working directory when we run binaries
// linked against it. In other words, to run the test suite we need to
// copy sdl2.dll out of its build tree and down to the top level cargo
// binary output directory.
if target_os.contains("windows") {
let sdl2_dll_name = "SDL2.dll";
let sdl2_bin_path = sdl2_compiled_path.join("bin");
let target_path = find_cargo_target_dir();
let src_dll_path = sdl2_bin_path.join(sdl2_dll_name);
let dst_dll_path = target_path.join(sdl2_dll_name);
fs::copy(&src_dll_path, &dst_dll_path)
.expect(&format!("Failed to copy SDL2 dynamic library from {} to {}",
src_dll_path.to_string_lossy(),
dst_dll_path.to_string_lossy()));
}
}
fn main() {
let target = env::var("TARGET").expect("Cargo build scripts always have TARGET");
let host = env::var("HOST").expect("Cargo build scripts always have HOST");
let target_os = get_os_from_triple(target.as_str()).unwrap();
let sdl2_compiled_path: PathBuf;
#[cfg(feature = "bundled")] {
let sdl2_source_path = download_sdl2();
patch_sdl2(sdl2_source_path.as_path());
sdl2_compiled_path = compile_sdl2(sdl2_source_path.as_path(), target_os);
let sdl2_downloaded_include_path = sdl2_source_path.join("include");
let sdl2_compiled_lib_path = sdl2_compiled_path.join("lib");
println!("cargo:rustc-link-search={}", sdl2_compiled_lib_path.display());
#[cfg(feature = "bindgen")] {
let include_paths = vec!(String::from(sdl2_downloaded_include_path.to_str().unwrap()));
println!("cargo:include={}", include_paths.join(":"));
generate_bindings(target.as_str(), host.as_str(), include_paths.as_slice())
}
#[cfg(not(feature = "bindgen"))] {
println!("cargo:include={}", sdl2_downloaded_include_path.display());
}
};
#[cfg(all(not(feature = "bundled"), feature = "bindgen"))] {
let include_paths: Vec<String> = compute_include_paths();
generate_bindings(target.as_str(), host.as_str(), include_paths.as_slice())
}
#[cfg(not(feature = "bindgen"))] {
copy_pregenerated_bindings();
println!("cargo:include={}", get_bundled_header_path().display());
}
link_sdl2(target_os);
#[cfg(all(feature = "bundled", not(feature = "static-link")))] {
copy_dynamic_libraries(&sdl2_compiled_path, target_os);
}
}
#[cfg(not(feature = "bindgen"))]
fn copy_pregenerated_bindings() {
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
fs::copy(crate_path.join("sdl_bindings.rs"), out_path.join("sdl_bindings.rs"))
.expect("Couldn't find pregenerated bindings!");
if cfg!(feature = "image") {
fs::copy(crate_path.join("sdl_image_bindings.rs"), out_path.join("sdl_image_bindings.rs"))
.expect("Couldn't find pregenerated SDL_image bindings!");
}
if cfg!(feature = "ttf") {
fs::copy(crate_path.join("sdl_ttf_bindings.rs"), out_path.join("sdl_ttf_bindings.rs"))
.expect("Couldn't find pregenerated SDL_ttf bindings!");
}
if cfg!(feature = "mixer") {
fs::copy(crate_path.join("sdl_mixer_bindings.rs"), out_path.join("sdl_mixer_bindings.rs"))
.expect("Couldn't find pregenerated SDL_mixer bindings!");
}
if cfg!(feature = "gfx") {
fs::copy(crate_path.join("sdl_gfx_framerate_bindings.rs"), out_path.join("sdl_gfx_framerate_bindings.rs"))
.expect("Couldn't find pregenerated SDL_gfx framerate bindings!");
fs::copy(crate_path.join("sdl_gfx_primitives_bindings.rs"), out_path.join("sdl_gfx_primitives_bindings.rs"))
.expect("Couldn't find pregenerated SDL_gfx primitives bindings!");
fs::copy(crate_path.join("sdl_gfx_imagefilter_bindings.rs"), out_path.join("sdl_gfx_imagefilter_bindings.rs"))
.expect("Couldn't find pregenerated SDL_gfx imagefilter bindings!");
fs::copy(crate_path.join("sdl_gfx_rotozoom_bindings.rs"), out_path.join("sdl_gfx_rotozoom_bindings.rs"))
.expect("Couldn't find pregenerated SDL_gfx rotozoom bindings!");
}
}
#[cfg(feature = "bindgen")]
// headers_path is a list of directories where the SDL2 headers are expected
// to be found by bindgen (should point to the include/ directories)
fn generate_bindings(target: &str, host: &str, headers_paths: &[String]) {
let target_os = get_os_from_triple(target).unwrap();
let mut bindings = bindgen::Builder::default()
// enable no_std-friendly output by only using core definitions
.use_core()
.default_enum_style(bindgen::EnumVariation::Rust { non_exhaustive: false })
.ctypes_prefix("libc");
let mut image_bindings = bindgen::Builder::default()
.use_core()
.raw_line("use crate::*;")
.ctypes_prefix("libc");
let mut ttf_bindings = bindgen::Builder::default()
.use_core()
.raw_line("use crate::*;")
.ctypes_prefix("libc");
let mut mixer_bindings = bindgen::Builder::default()
.use_core()
.raw_line("use crate::*;")
.ctypes_prefix("libc");
let mut gfx_framerate_bindings = bindgen::Builder::default()
.use_core()
.ctypes_prefix("libc");
let mut gfx_primitives_bindings = bindgen::Builder::default()
.use_core()
.raw_line("use crate::*;")
.ctypes_prefix("libc");
let mut gfx_imagefilter_bindings = bindgen::Builder::default()
.use_core()
.ctypes_prefix("libc");
let mut gfx_rotozoom_bindings = bindgen::Builder::default()
.use_core()
.raw_line("use crate::*;")
.ctypes_prefix("libc");
// Set correct target triple for bindgen when cross-compiling
if target != host {
bindings = bindings.clang_arg("-target");
bindings = bindings.clang_arg(target.clone());
if cfg!(feature = "image") {
image_bindings = image_bindings.clang_arg("-target");
image_bindings = image_bindings.clang_arg(target.clone());
}
if cfg!(feature = "ttf") {
ttf_bindings = ttf_bindings.clang_arg("-target");
ttf_bindings = ttf_bindings.clang_arg(target.clone());
}
if cfg!(feature = "mixer") {
mixer_bindings = mixer_bindings.clang_arg("-target");
mixer_bindings = mixer_bindings.clang_arg(target.clone());
}
if cfg!(feature = "gfx") {
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg("-target");
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg(target.clone());
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg("-target");
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg(target.clone());
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg("-target");
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg(target.clone());
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg("-target");
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg(target.clone());
}
}
if headers_paths.len() == 0 {
// if no paths are being provided, fall back to the headers included in this repo
let include_path = get_bundled_header_path();
println!("cargo:include={}", include_path.display());
bindings = bindings.clang_arg(format!("-I{}", include_path.display()));
if cfg!(feature = "image") {
image_bindings = image_bindings.clang_arg(format!("-I{}", include_path.display()));
}
if cfg!(feature = "ttf") {
ttf_bindings = ttf_bindings.clang_arg(format!("-I{}", include_path.display()));
}
if cfg!(feature = "mixer") {
mixer_bindings = mixer_bindings.clang_arg(format!("-I{}", include_path.display()));
}
if cfg!(feature = "gfx") {
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg(format!("-I{}", include_path.display()));
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg(format!("-I{}", include_path.display()));
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg(format!("-I{}", include_path.display()));
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg(format!("-I{}", include_path.display()));
}
} else {
// if paths are included, use them for bindgen. Bindgen should use the first one.
println!("cargo:include={}", headers_paths.join(":"));
for headers_path in headers_paths {
bindings = bindings.clang_arg(format!("-I{}", headers_path));
if cfg!(feature = "image") {
image_bindings = image_bindings.clang_arg(format!("-I{}", headers_path));
}
if cfg!(feature = "ttf") {
ttf_bindings = ttf_bindings.clang_arg(format!("-I{}", headers_path));
}
if cfg!(feature = "mixer") {
mixer_bindings = mixer_bindings.clang_arg(format!("-I{}", headers_path));
}
if cfg!(feature = "gfx") {
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg(format!("-I{}", headers_path));
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg(format!("-I{}", headers_path));
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg(format!("-I{}", headers_path));
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg(format!("-I{}", headers_path));
}
}
}
if target_os == "windows-msvc" {
add_msvc_includes_to_bindings!(bindings);
if cfg!(feature = "image") {
add_msvc_includes_to_bindings!(image_bindings);
}
if cfg!(feature = "ttf") {
add_msvc_includes_to_bindings!(ttf_bindings);
}
if cfg!(feature = "mixer") {
add_msvc_includes_to_bindings!(mixer_bindings);
}
if cfg!(feature = "gfx") {
add_msvc_includes_to_bindings!(gfx_framerate_bindings);
add_msvc_includes_to_bindings!(gfx_primitives_bindings);
add_msvc_includes_to_bindings!(gfx_imagefilter_bindings);
add_msvc_includes_to_bindings!(gfx_rotozoom_bindings);
}
};
// SDL2 hasn't a default configuration for Linux
if target_os == "linux-gnu" {
bindings = bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
bindings = bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
if cfg!(feature = "image") {
image_bindings = image_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
image_bindings = image_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
}
if cfg!(feature = "ttf") {
ttf_bindings = ttf_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
ttf_bindings = ttf_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
}
if cfg!(feature = "mixer") {
mixer_bindings = mixer_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
mixer_bindings = mixer_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
}
if cfg!(feature = "gfx") {
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
gfx_framerate_bindings = gfx_framerate_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
gfx_primitives_bindings = gfx_primitives_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
gfx_imagefilter_bindings = gfx_imagefilter_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg("-DSDL_VIDEO_DRIVER_X11");
gfx_rotozoom_bindings = gfx_rotozoom_bindings.clang_arg("-DSDL_VIDEO_DRIVER_WAYLAND");
}
}
let bindings = bindings
.header("wrapper.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.derive_debug(false)
.generate()
.expect("Unable to generate bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("sdl_bindings.rs"))
.expect("Couldn't write bindings!");
if cfg!(feature = "image") {
let image_bindings = image_bindings
.header("wrapper_image.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.whitelist_type("IMG.*")
.whitelist_function("IMG.*")
.whitelist_var("IMG.*")
.blacklist_type("SDL_.*")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate image_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
image_bindings
.write_to_file(out_path.join("sdl_image_bindings.rs"))
.expect("Couldn't write image_bindings!");
}
if cfg!(feature = "ttf") {
let ttf_bindings = ttf_bindings
.header("wrapper_ttf.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.whitelist_type("TTF.*")
.whitelist_function("TTF.*")
.whitelist_var("TTF.*")
.blacklist_type("SDL_.*")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate ttf_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
ttf_bindings
.write_to_file(out_path.join("sdl_ttf_bindings.rs"))
.expect("Couldn't write ttf_bindings!");
}
if cfg!(feature = "mixer") {
let mixer_bindings = mixer_bindings
.header("wrapper_mixer.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")<|fim▁hole|> .whitelist_type("MUS.*")
.whitelist_function("Mix.*")
.whitelist_var("MIX.*")
.whitelist_var("MUS.*")
.blacklist_type("SDL_.*")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate mixer_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
mixer_bindings
.write_to_file(out_path.join("sdl_mixer_bindings.rs"))
.expect("Couldn't write mixer_bindings!");
}
if cfg!(feature = "gfx") {
let gfx_framerate_bindings = gfx_framerate_bindings
.header("wrapper_gfx_framerate.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.whitelist_type("FPS.*")
.whitelist_function("SDL_.*rame.*")
.whitelist_var("FPS.*")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate gfx_framerate_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
gfx_framerate_bindings
.write_to_file(out_path.join("sdl_gfx_framerate_bindings.rs"))
.expect("Couldn't write gfx_framerate_bindings!");
let gfx_primitives_bindings = gfx_primitives_bindings
.header("wrapper_gfx_primitives.h")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.blacklist_type("SDL_.*")
.whitelist_function("pixel.*")
.whitelist_function("rectangle.*")
.whitelist_function("rounded.*")
.whitelist_function("box.*")
.whitelist_function(".*line(Color|RGBA).*")
.whitelist_function("thick.*")
.whitelist_function(".*circle.*")
.whitelist_function("arc.*")
.whitelist_function("filled.*")
.whitelist_function(".*ellipse.*")
.whitelist_function("pie.*")
.whitelist_function(".*trigon.*")
.whitelist_function(".*polygon.*")
.whitelist_function("textured.*")
.whitelist_function("bezier.*")
.whitelist_function("character.*")
.whitelist_function("string.*")
.whitelist_function("gfx.*")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate gfx_primitives_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
gfx_primitives_bindings
.write_to_file(out_path.join("sdl_gfx_primitives_bindings.rs"))
.expect("Couldn't write gfx_primitives_bindings!");
let gfx_imagefilter_bindings = gfx_imagefilter_bindings
.header("wrapper_gfx_imagefilter.h")
.whitelist_function("SDL_image.*")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate gfx_imagefilter_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
gfx_imagefilter_bindings
.write_to_file(out_path.join("sdl_gfx_imagefilter_bindings.rs"))
.expect("Couldn't write gfx_imagefilter_bindings!");
let gfx_rotozoom_bindings = gfx_rotozoom_bindings
.header("wrapper_gfx_rotozoom.h")
.blacklist_type("SDL_.*")
.whitelist_function("rotozoom.*")
.whitelist_function("zoom.*")
.whitelist_function("shrink.*")
.whitelist_function("rotate.*")
.blacklist_type("FP_NAN")
.blacklist_type("FP_INFINITE")
.blacklist_type("FP_ZERO")
.blacklist_type("FP_SUBNORMAL")
.blacklist_type("FP_NORMAL")
.blacklist_type("_IO.*|FILE")
.generate()
.expect("Unable to generate gfx_rotozoom_bindings!");
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
gfx_rotozoom_bindings
.write_to_file(out_path.join("sdl_gfx_rotozoom_bindings.rs"))
.expect("Couldn't write gfx_rotozoom_bindings!");
}
}
fn get_os_from_triple(triple: &str) -> Option<&str>
{
triple.splitn(3, "-").nth(2)
}<|fim▁end|>
|
.whitelist_type("MIX.*")
.whitelist_type("Mix.*")
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>/**
* Created by shuyi.wu on 2015/4/1.
*/
'use strict';
var gulp = require('gulp'),
uglify = require('gulp-uglify'),
rename = require('gulp-rename'),
del = require('del'),
webpack = require('gulp-webpack');
gulp.task('compileES6', function () {
return gulp.src('./assets/js-es6/functions.js')
.pipe(webpack({<|fim▁hole|> {test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader'}
]
}
}))
.pipe(rename('build.js'))
.pipe(gulp.dest('./assets/js/'));
});
gulp.task('watch-compileES6', function () {
return gulp.src('./assets/js-es6/functions.js')
.pipe(webpack({
watch: true,
module: {
loaders: [
{test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader'}
]
}
}))
.pipe(rename('build.js'))
.pipe(gulp.dest('./assets/js/'));
});
gulp.task('min', ['compileES6'], function () {
return gulp.src('./assets/js/build.js')
.pipe(uglify())
.pipe(rename('build.min.js'))
.pipe(gulp.dest('./assets/js/'));
});
gulp.task('clean', function (cb) {
del('./assets/js/**', cb);
});
gulp.task('default', ['clean', 'min']);<|fim▁end|>
|
module: {
loaders: [
|
<|file_name|>lista02_exercicio01_questao06.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------------------------------------------------
#Introdução a Programação de Computadores - IPC
#Universidade do Estado do Amazonas - UEA
#Prof. Jucimar Jr.
#Alexandre Marques Uchôa 1715310028
#Jandinne Duarte de Oliveira 1015070265<|fim▁hole|>##
#Faça um Programa que peça o raio de um círculo, calcule e mostre sua área.
#-----------------------------------------------------------------------------------------------------------------------
r = float(input("Digite um raio"))
area = (3.14*r*r)
print ('Sua área é', area)<|fim▁end|>
|
#Uriel Brito Barros 1515120558
#Roberta de Oliveira da cruz 0825070169
#Evandro Padilha Barroso Filho 1715310009
#
|
<|file_name|>dht22.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <dirent.h>
#include <fcntl.h>
#include <assert.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <bcm2835.h>
#include <unistd.h>
#include <iostream>
//#define BCM2708_PERI_BASE 0x20000000
#define BCM2708_PERI_BASE 0x3F000000
#define GPIO_BASE (BCM2708_PERI_BASE + 0x200000) /* GPIO controller */
#define MAXTIMINGS 100
#define PIN_DHT 4 //GPIO Mapping DHT Sensor
#define PIN_LED RPI_GPIO_P1_12 //GPIO Mapping LED
//#define DEBUG
using namespace std;
int readDHT(int pin, float *humid0, float *temp0);
int cosmput(float humid, float temp, int *feedid, char *key, char *feed_name, char *field0_name, char *field1_name);
int readconfig(char *pFileName, int *feedid, char *key, char *feed_name, char *field0_name, char *field1_name);
int main(int argc, char **argv) {
long cnt = 1;
if(argc > 1){
cnt = atol(argv[1]);
}
int dhtpin = PIN_DHT;
int ledpin = PIN_LED;
float humid0, temp0, ahumid, atemp,otemp = 0;
int feedid = 0;
char key[100];
char feed_name[100];
char field0_name[100];
char field1_name[100];
// char pFileName[]="config.ini";
// readconfig(pFileName, &feedid, key, feed_name, field0_name, field1_name);
if (!bcm2835_init())
return 1;
bcm2835_gpio_fsel(ledpin, BCM2835_GPIO_FSEL_OUTP);
bcm2835_gpio_write(ledpin, HIGH); //LED an
fprintf(stderr,"Using pin #%d\n", dhtpin);
while(cnt > 0) {
ahumid = atemp = 0.0;
for (int i=0; i< 5; i++) { // Mittelwert bilden, um "zittern" der Kurve zu minimieren
readDHT(dhtpin, &humid0, &temp0);
ahumid = ahumid + humid0;
atemp = atemp + temp0;
sleep(1);
}
ahumid = ahumid / 5;
atemp = atemp / 5;
if(ahumid < 5 || atemp < 5 || ahumid >100 || atemp > 100)// || (otemp > 0 && (atemp < otemp - 5 || atemp > otemp +5))){
{
fprintf(stderr,"Invalid values. Still calibrating?\n");
continue;
}
time_t tr = time(NULL);
//char *t = asctime(localtime(&tr));
cnt--;
printf("TIME=%d\nTEMP=%0.1f\nHUMID=%0.1f\n", tr, atemp, ahumid);
otemp = atemp;
//cosmput(ahumid, atemp, &feedid, key, feed_name, field0_name, field1_name);
}
bcm2835_gpio_fsel(ledpin, BCM2835_GPIO_FSEL_OUTP);
bcm2835_gpio_write(ledpin, LOW); //LED aus
return 0;
} // main
int readDHT(int pin, float *humid0, float *temp0) {
int counter = 0;
int laststate = HIGH;
int j=0;
int bits[250], data[100];
int bitidx = 0;
// Set GPIO pin to output
bcm2835_gpio_fsel(pin, BCM2835_GPIO_FSEL_OUTP);
bcm2835_gpio_write(pin, HIGH);
usleep(500000); // 500 ms
bcm2835_gpio_write(pin, LOW);
usleep(20000);
bcm2835_gpio_fsel(pin, BCM2835_GPIO_FSEL_INPT);
data[0] = data[1] = data[2] = data[3] = data[4] = 0;
// wait for pin to drop?
while (bcm2835_gpio_lev(pin) == 1) {
usleep(1);
}
// read data!
for (int i=0; i< MAXTIMINGS; i++) {
counter = 0;
while ( bcm2835_gpio_lev(pin) == laststate) {
counter++;
//nanosleep(1); // overclocking might change this?
if (counter == 1000)
break;
}
laststate = bcm2835_gpio_lev(pin);
if (counter == 1000) break;
bits[bitidx++] = counter;
if ((i>3) && (i%2 == 0)) {
// shove each bit into the storage bytes
data[j/8] <<= 1;
if (counter > 200)
data[j/8] |= 1;
j++;
}
}
#ifdef DEBUG
for (int i=3; i<bitidx; i+=2) {
printf("bit %d: %d\n", i-3, bits[i]);
printf("bit %d: %d (%d)\n", i-2, bits[i+1], bits[i+1] > 200);
}
printf("Data (%d): 0x%x 0x%x 0x%x 0x%x 0x%x\n", j, data[0], data[1], data[2], data[3], data[4]);
#endif
if ((j >= 39) && (data[4] == ((data[0] + data[1] + data[2] + data[3]) & 0xFF)) ) { // yay!
float f, h;
h = data[0] * 256 + data[1];
h /= 10;
f = (data[2] & 0x7F)* 256 + data[3];
f /= 10.0;
if (data[2] & 0x80) {
f *= -1;
}
//printf("Temp = %.1f *C, Hum = %.1f \%\n", f, h);
*humid0 = h;
*temp0 = f;
}
return 0;
}
int cosmput(float humid, float temp, int *feedid, char *key, char *feed_name, char *field0_name, char *field1_name) {
// CURL *curl;
// CURLcode res;
char xapikey[60];
sprintf(xapikey, "X-ApiKey: %s",key);
char url[50];
sprintf(url, "http://api.cosm.com/v2/feeds/%d.json", *feedid);
char payload[200];
sprintf(payload, "{\"title\":\"%s\",\"version\":\"1.0.0\",\"datastreams\":[{\"id\":\"%s\",\"current_value\":%0.1f},{\"id\":\"%s\",\"current_value\":%0.1f}]}", feed_name, field0_name, humid, field1_name, temp);
// struct curl_slist *header=NULL;
// header = curl_slist_append(header, xapikey);
// curl_global_init(CURL_GLOBAL_ALL);
// curl = curl_easy_init();
//
// curl_easy_setopt(curl, CURLOPT_VERBOSE, 0);
// curl_easy_setopt(curl, CURLOPT_HTTPHEADER, header);
// curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "PUT");
// curl_easy_setopt(curl, CURLOPT_URL, url);
// curl_easy_setopt(curl, CURLOPT_POSTFIELDS, payload);
//
// res = curl_easy_perform(curl);
// if(res != CURLE_OK) {
// fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(res));
// }
//
// curl_easy_cleanup(curl);
// curl_slist_free_all(header);
// curl_global_cleanup();
return 0;
}
int readconfig(char *pFileName, int *feedid, char *key, char *feed_name, char *field0_name, char *field1_name) {
char buffer[1024];
char label[120];
char value[100];
int allread = 0;
FILE* fp;
fp = fopen(pFileName, "r");
if (!fp) {
printf("Error opening config_file %s!\n", pFileName);
return 1;
}
printf("Opening config file: %s\n", pFileName);
fflush(stdout);
while (feof(fp) == 0) {
fgets(buffer, 1024, fp);
if ((buffer[0] != '#')) // && (no>2))
{<|fim▁hole|> if (sscanf(buffer, "%[^'=']=%[^'\n']%s", &label, &value) >= 2){
if (strcmp(label, "FEEDID") == 0)
*feedid = atoi(value);
if (strcmp(label, "KEY") == 0)
sprintf(key, "%s", value);
if (strcmp(label, "FEED_NAME") == 0)
sprintf(feed_name, "%s", value);
if (strcmp(label, "FIELD0_NAME") == 0)
sprintf(field0_name, "%s", value);
if (strcmp(label, "FIELD1_NAME") == 0)
sprintf(field1_name, "%s", value);
}
}
}
fclose(fp);
return 0;
}<|fim▁end|>
| |
<|file_name|>FlowChart.js<|end_file_name|><|fim▁begin|>dojo.declare("gamelena.gfx.FlowChart", null, {
id: null,
surface: null,
domPrefix: null,
group: null,
circles: [],
circlesLabels: [],
moves: [],
groups: [],
lines: [],
linesGroups: [],
linesHelpers: [],
linesPointers: [],
linesLabels: [],
events: [],
radio: 50,
nodeComponent: null,
connectorComponent: null,
constructor: function(args) {
dojo.declare.safeMixin(this, args);
},
init : function() {
var self = this;
require(['gamelena/gfx'], function(gfx){
console.debug(self.domPrefix + "gfx_holder");
var container = dojo.byId(self.domPrefix + "gfx_holder");
self.surface = gfx.createSurface(container, 4800, 2400);
dojo.connect(self.surface, "ondragstart", dojo, function(e) {
console.debug(e)
});
dojo.connect(self.surface, "onselectstart", dojo, function(e) {
console.debug(e)
});
});
this.group = this.surface.createGroup();
},
addNode: function(data, x1, y1) {<|fim▁hole|> var self = this;
require(['gamelena/gfx/chart/Node'], function(Node){
var label = data[self.labelName];
var node = new Node({
component: self.nodeComponent,
});
node.add(data, x1, x2);
})
},
addConnector: function (originShape, destinyShape, data)
{
var self = this;
require(['gamelena/gfx/chart/Connector'], function(Connector){
var connector = new Connector(
);
connector.add()
});
},
highLightShape: function(myShape) {
var self = this;
require(['dojox/gfx/fx'], function(fx){
myShape.moveToFront();
var stroke = myShape.getStroke();
var color = stroke != null ? stroke.color : 'green';
var width = stroke != null ? stroke.width : 1;
self.animation = new fx.animateStroke({
duration : 2400,
shape : myShape,
color : {
start : "#FFA600",
end : "yellow"
},
width : {
end : 60,
start : 60
},
join : {
values : [ "outer", "bevel", "radial" ]
},
onAnimate : function() {
// onAnimate
myShape.moveToFront();
},
onEnd : function() {
myShape.moveToFront();
new fx.animateStroke({
duration : 1200,
shape : myShape,
color : {
end : color
},
width : {
end : width
}
}).play();
myShape.moveToFront();
}
});
self.animation.play();
myShape.moveToFront();
});
}
});<|fim▁end|>
| |
<|file_name|>perfect_number.py<|end_file_name|><|fim▁begin|>def perfect_number():
sum, count, number = 0, 0, 1
top = input("enter a top value: ")
while number < top:
for i in range(1, number/2+1):
if number/i*i == number:
sum += i
if sum == number:
count += 1
print count, ".perfect number: ", number
number += 1
sum = 0
<|fim▁hole|> if count == 0:
print "any perfect number in this range"
if __name__ == '__main__':
perfect_number()<|fim▁end|>
| |
<|file_name|>UnresolvedReceiverGeneric.py<|end_file_name|><|fim▁begin|>from typing import TypeVar, Dict, Iterable, Any
T = TypeVar("T")
<|fim▁hole|>def foo(values: Dict[T, Iterable[Any]]):
for e in []:
values.setdefault(e, undefined)<|fim▁end|>
| |
<|file_name|>shrd_Ed_Gd_Ib_mem.java<|end_file_name|><|fim▁begin|>/*
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine
Copyright (C) 2012-2013 Ian Preston
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Details (including contact information) can be found at:
jpc.sourceforge.net
or the developer website
sourceforge.net/projects/jpc/
End of licence header
*/
package com.github.smeny.jpc.emulator.execution.opcodes.pm;
import com.github.smeny.jpc.emulator.execution.*;
import com.github.smeny.jpc.emulator.execution.decoder.*;
import com.github.smeny.jpc.emulator.processor.*;
import com.github.smeny.jpc.emulator.processor.fpu64.*;
import static com.github.smeny.jpc.emulator.processor.Processor.*;
public class shrd_Ed_Gd_Ib_mem extends Executable
{
final Pointer op1;
final int op2Index;
final int immb;
public shrd_Ed_Gd_Ib_mem(int blockStart, int eip, int prefices, PeekableInputStream input)
{
super(blockStart, eip);
int modrm = input.readU8();
op1 = Modrm.getPointer(prefices, modrm, input);
op2Index = Modrm.Gd(modrm);
immb = Modrm.Ib(input);
}
public Branch execute(Processor cpu)
{
Reg op2 = cpu.regs[op2Index];
if(immb != 0)
{
int shift = immb & 0x1f;<|fim▁hole|> long rot = ((0xffffffffL &op2.get32()) << 32) | (0xffffffffL & op1.get32(cpu));
cpu.flagResult = ((int)(rot >> shift));
op1.set32(cpu, cpu.flagResult);
cpu.flagIns = UCodes.SHRD32;
cpu.flagStatus = OSZAPC;
}
return Branch.None;
}
public boolean isBranch()
{
return false;
}
public String toString()
{
return this.getClass().getName();
}
}<|fim▁end|>
|
cpu.flagOp1 = op1.get32(cpu);
cpu.flagOp2 = shift;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Dictionary-based password generator.
<|fim▁hole|> -h --help Show this help text
-d --dictionary=<path> Specify a non-default dictionary
-n --length=N Specify number of words to use [default: 4]
-v --verbose Print entropy estimate
--complex Bypass complexity requirements
--truncate=SIZE Truncate dictionary to specified size
--uncontrolled Generate a naively-random password from the list
The default mode ensures words are spread throughout the list, slightly
reducing absolute entropy but generally improving password memorability if the
dictionary is ordered by frequency.
"""
import math
import os
from docopt import docopt
from secrets import SystemRandom
def main():
# Normalize arguments
args = docopt(__doc__)
word_count = int(args['--length'])
# Read and transform dictionary file
if args['--dictionary']:
dict_path = args['--dictionary']
else:
dict_path = os.path.join(os.path.dirname(__file__), 'words.txt')
dictionary = [w for w in [l.strip() for l in open(dict_path)] if w]
if args['--truncate']:
dictionary = dictionary[:int(args['--truncate'])]
elif not args['--dictionary']:
# Default truncation for built-in dictionary
dictionary = dictionary[:8192]
# Basic entropy calculation
if args['--uncontrolled']:
entropy = math.log(math.pow(len(dictionary), word_count), 2)
else:
batch_size = len(dictionary) // word_count
entropy = math.log(math.pow(batch_size, word_count) *
math.factorial(word_count), 2)
if args['--verbose']:
print("Pessimistic password entropy: %.1f bits" % entropy)
print("Approximate time to crack at 20k/s: %.1f days" %
(math.pow(2, entropy) / 20000 / 60 / 60 / 24))
# Generate password
rng = SystemRandom()
if args['--uncontrolled']:
# Select random words
words = [rng.choice(dictionary) for i in range(word_count)]
else:
# Generate batches in random order
batches = [dictionary[i*batch_size:(i+1)*batch_size]
for i in range(word_count)]
rng.shuffle(batches)
# Select word from each batch
words = [rng.choice(batches[i]) for i in range(word_count)]
# Reveal to user
print(" ".join(words))
if args['--complex']:
print("Complexified: %s1." % "".join(words).capitalize())
if __name__ == '__main__':
main()<|fim▁end|>
|
Usage: pass.py [options]
Options:
|
<|file_name|>test_sim.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division, absolute_import, unicode_literals
from builtins import bytes, dict, object, range, map, input, str
from future.utils import itervalues, viewitems, iteritems, listvalues, listitems
from io import open
import rfpipe, rfpipe.candidates
import pytest
from astropy import time
from numpy import degrees, nan, argmax, abs
tparams = [(0, 0, 0, 5e-3, 0.3, 0.0001, 0.0),]
# simulate no flag, transient/no flag, transient/flag
inprefs = [({'flaglist': [], 'chans': list(range(32)), 'sigma_image1': None,
'spw': [0], 'savecandcollection': True, 'savenoise': True,
'savecanddata': True, 'returncanddata': True, 'saveplots': True,
'fftmode': 'fftw', 'searchtype': 'imagek'}, 1),
({'simulated_transient': tparams, 'dmarr': [0, 1, 2], 'dtarr': [1, 2],
'savecanddata': True, 'savenoise': True, 'saveplots': True,
'returncanddata': True, 'savecandcollection': True,
'timesub': 'mean', 'fftmode': 'fftw', 'searchtype': 'imagek',
'sigma_image1': 10, 'sigma_kalman': 1,
'clustercands': True, 'flaglist': []}, 2),
({'simulated_transient': tparams, 'dmarr': [0, 1, 2], 'dtarr': [1, 2],
'savecanddata': True, 'savenoise': True, 'saveplots': True,
'returncanddata': True, 'savecandcollection': True,
'timesub': 'cs', 'fftmode': 'fftw', 'searchtype': 'imagek',
'sigma_image1': 10, 'sigma_kalman': 1,
'clustercands': True, 'flaglist': []}, 2),]
# ({'simulated_transient': tparams, 'dmarr': [0], 'dtarr': [1],
# 'savecands': True, 'savenoise': True,
# 'sigma_image1': 10, 'sigma_kalman': 1, 'sigma_arm': 2,
# 'sigma_arms': 4, 'timesub': None, 'fftmode': 'fftw',<|fim▁hole|># {'read_tdownsample': 2, 'read_fdownsample': 2, 'npix_max': 512},
@pytest.fixture(scope="module", params=inprefs)
def mockstate(request):
inprefs, scan = request.param
t0 = time.Time.now().mjd
meta = rfpipe.metadata.mock_metadata(t0, t0+0.1/(24*3600), 20, 4, 32*4, 2,
5e3, scan=scan, datasource='sim',
antconfig='D')
return rfpipe.state.State(inmeta=meta, inprefs=inprefs)
# simulate two DMs
@pytest.fixture(scope="module")
def mockdata(mockstate):
segment = 0
data = rfpipe.source.read_segment(mockstate, segment)
data[0, 0, 0, 0] = nan
return rfpipe.source.data_prep(mockstate, segment, data)
@pytest.fixture(scope="module")
def mockcc(mockstate):
cc = rfpipe.pipeline.pipeline_scan(mockstate)
return cc
def test_dataprep(mockstate, mockdata):
assert mockdata.shape == mockstate.datashape
def test_noise(mockstate, mockdata):
for noises in rfpipe.candidates.iter_noise(mockstate.noisefile):
assert len(noises)
def test_pipelinescan(mockcc):
if mockcc.prefs.simulated_transient is not None:
rfpipe.candidates.makesummaryplot(mockcc)
assert mockcc is not None
def test_voevent(mockcc):
if mockcc.prefs.simulated_transient is not None:
name = rfpipe.candidates.make_voevent(mockcc)
assert name is not None
def test_candids(mockcc):
if mockcc.prefs.simulated_transient is not None:
assert len(mockcc.candids)
def test_cc(mockcc):
if mockcc.prefs.returncanddata:
assert isinstance(mockcc.canddata, list)
assert len(mockcc.canddata) == len(mockcc)
if mockcc.prefs.savecandcollection:
ccs = rfpipe.candidates.iter_cands(mockcc.state.candsfile)
cc = sum(ccs)
assert len(cc) == len(mockcc)
if cc.prefs.returncanddata:
assert isinstance(cc.canddata, list)
assert len(cc.canddata) == len(cc)
assert len(cc.canddata) == len(mockcc.canddata)
def test_phasecenter_detection():
inprefs = {'simulated_transient': [(0, 1, 0, 5e-3, 0.3, -0.001, 0.),
(0, 9, 0, 5e-3, 0.3, 0., 0.),
(0, 19, 0, 5e-3, 0.3, 0.001, 0.)],
'dmarr': [0], 'dtarr': [1], 'timesub': None, 'fftmode': 'fftw', 'searchtype': 'image',
'sigma_image1': 10, 'flaglist': [], 'uvres': 60, 'npix_max': 128, 'max_candfrac': 0}
t0 = time.Time.now().mjd
meta = rfpipe.metadata.mock_metadata(t0, t0+0.1/(24*3600), 20, 4, 32*4, 2,
5e3, scan=1, datasource='sim',
antconfig='D')
st = rfpipe.state.State(inmeta=meta, inprefs=inprefs)
cc = rfpipe.pipeline.pipeline_scan(st)
assert cc.array['l1'][0] <= 0.
assert cc.array['l1'][1] == 0.
assert cc.array['l1'][2] >= 0.
assert all(abs(cc.array['m1']) <= 0.0003)
def test_phasecenter_detection_shift():
inprefs = {'simulated_transient': [(0, 1, 0, 5e-3, 0.3, -0.001, 0.),
(0, 9, 0, 5e-3, 0.3, 0., 0.),
(0, 19, 0, 5e-3, 0.3, 0.001, 0.)],
'dmarr': [0], 'dtarr': [1], 'timesub': None, 'fftmode': 'fftw', 'searchtype': 'image',
'sigma_image1': 10, 'flaglist': [], 'uvres': 60, 'npix_max': 128, 'max_candfrac': 0}
t0 = time.Time.now().mjd
meta = rfpipe.metadata.mock_metadata(t0, t0+0.1/(24*3600), 20, 4, 32*4, 2,
5e3, scan=1, datasource='sim',
antconfig='D')
meta['phasecenters'] = [(t0, t0+0.01/(24*3600), degrees(0.001), 0.),
(t0+0.01/(24*3600), t0+0.05/(24*3600), 0., 0.),
(t0+0.05/(24*3600), t0+0.1/(24*3600), degrees(-0.001), 0.)]
st = rfpipe.state.State(inmeta=meta, inprefs=inprefs)
cc = rfpipe.pipeline.pipeline_scan(st)
assert all(cc.array['l1'] == 0.)
assert all(cc.array['m1'] == 0.)
def test_wide_transient():
print("Try injecting a transient of width 40ms at integration 8")
inprefs = {'simulated_transient': [(0, 8, 0, 40e-3, 0.3, 0., 0.)],
'dmarr': [0], 'dtarr': [1,2,4,8], 'timesub': None, 'fftmode': 'fftw', 'searchtype': 'image',
'sigma_image1': 10, 'flaglist': [], 'uvres': 60, 'npix_max': 128, 'max_candfrac': 0}
t0 = time.Time.now().mjd
meta = rfpipe.metadata.mock_metadata(t0, t0+0.1/(24*3600), 20, 4, 32*4, 2,
5e3, scan=1, datasource='sim',
antconfig='D')
st = rfpipe.state.State(inmeta=meta, inprefs=inprefs)
cc = rfpipe.pipeline.pipeline_scan(st)
ind = argmax(cc.array['snr1'])
assert cc.array['dtind'][ind] == 3
assert cc.array['integration'][ind]*2**cc.array['dtind'][ind] == 8
print("Try injecting a transient of width 20ms at integration 8")
inprefs['simulated_transient'] = [(0, 8, 0, 20e-3, 0.3, 0., 0.)]
t0 = time.Time.now().mjd
meta = rfpipe.metadata.mock_metadata(t0, t0+0.1/(24*3600), 20, 4, 32*4, 2,
5e3, scan=1, datasource='sim',
antconfig='D')
st = rfpipe.state.State(inmeta=meta, inprefs=inprefs)
cc = rfpipe.pipeline.pipeline_scan(st)
ind = argmax(cc.array['snr1'])
assert cc.array['dtind'][ind] == 2
assert cc.array['integration'][ind]*2**cc.array['dtind'][ind] == 8<|fim▁end|>
|
# 'searchtype': 'armkimage', 'flaglist': []}, 2) # sigma_arms forced very low
#TODO: support arbitrary channel selection and
|
<|file_name|>theme_xkcd.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib as mpl
import matplotlib.pyplot as plt
from .theme import theme_base
class theme_xkcd(theme_base):
"""
xkcd theme
The theme internaly uses the settings from pyplot.xkcd().
"""
def __init__(self, scale=1, length=100, randomness=2):
super(theme_xkcd, self).__init__()
with plt.xkcd(scale=scale, length=length, randomness=randomness):
_xkcd = mpl.rcParams.copy()
# no need to a get a deprecate warning for nothing...
for key in mpl._deprecated_map:
if key in _xkcd:
del _xkcd[key]
if 'tk.pythoninspect' in _xkcd:
del _xkcd['tk.pythoninspect']
self._rcParams.update(_xkcd)
def __deepcopy__(self, memo):
class _empty(object):
pass
result = _empty()
result.__class__ = self.__class__
result.__dict__["_rcParams"] = {}<|fim▁hole|> try:
result.__dict__["_rcParams"][k] = deepcopy(v, memo)
except NotImplementedError:
# deepcopy raises an error for objects that are drived from or
# composed of matplotlib.transform.TransformNode.
# Not desirable, but probably requires upstream fix.
# In particular, XKCD uses matplotlib.patheffects.withStrok
# -gdowding
result.__dict__["_rcParams"][k] = copy(v)
return result<|fim▁end|>
|
for k, v in self._rcParams.items():
|
<|file_name|>app.rs<|end_file_name|><|fim▁begin|>use piston::*;
pub struct App {
objects: Vec<()>,
}
<|fim▁hole|> pub fn new() -> App {
App {
objects: Vec::new(),
}
}
}
impl Game for App {
}<|fim▁end|>
|
impl App {
|
<|file_name|>GlobalPropertyTester.java<|end_file_name|><|fim▁begin|>/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|>package org.jkiss.dbeaver.ui.actions;
import org.eclipse.core.expressions.PropertyTester;
import org.eclipse.core.resources.*;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.runtime.IPluginService;
import org.jkiss.dbeaver.ui.ActionUtils;
/**
* GlobalPropertyTester
*/
public class GlobalPropertyTester extends PropertyTester {
//static final Log log = LogFactory.get vLog(ObjectPropertyTester.class);
public static final String NAMESPACE = "org.jkiss.dbeaver.core.global";
public static final String PROP_STANDALONE = "standalone";
public static final String PROP_HAS_ACTIVE_PROJECT = "hasActiveProject";
public static final String PROP_HAS_MULTI_PROJECTS = "hasMultipleProjects";
@Override
public boolean test(Object receiver, String property, Object[] args, Object expectedValue) {
switch (property) {
case PROP_HAS_MULTI_PROJECTS:
return DBWorkbench.getPlatform().getWorkspace().getProjects().size() > 1;
case PROP_HAS_ACTIVE_PROJECT:
return DBWorkbench.getPlatform().getWorkspace().getActiveProject() != null;
case PROP_STANDALONE:
return DBWorkbench.getPlatform().getApplication().isStandalone();
}
return false;
}
public static void firePropertyChange(String propName)
{
ActionUtils.evaluatePropertyState(NAMESPACE + "." + propName);
}
public static class ResourceListener implements IPluginService, IResourceChangeListener {
@Override
public void activateService() {
ResourcesPlugin.getWorkspace().addResourceChangeListener(this);
}
@Override
public void deactivateService() {
ResourcesPlugin.getWorkspace().removeResourceChangeListener(this);
}
@Override
public void resourceChanged(IResourceChangeEvent event) {
if (event.getType() == IResourceChangeEvent.POST_CHANGE) {
for (IResourceDelta childDelta : event.getDelta().getAffectedChildren()) {
if (childDelta.getResource() instanceof IProject) {
if (childDelta.getKind() == IResourceDelta.ADDED || childDelta.getKind() == IResourceDelta.REMOVED) {
firePropertyChange(GlobalPropertyTester.PROP_HAS_MULTI_PROJECTS);
}
}
}
}
}
}
}<|fim▁end|>
|
* limitations under the License.
*/
|
<|file_name|>WindowAssignCitizen.java<|end_file_name|><|fim▁begin|>package com.minecolonies.coremod.client.gui;
import com.ldtteam.blockout.Pane;
import com.ldtteam.blockout.controls.Button;
import com.ldtteam.blockout.controls.ButtonHandler;
import com.ldtteam.blockout.controls.Text;
import com.ldtteam.blockout.views.ScrollingList;
import com.ldtteam.blockout.views.Window;<|fim▁hole|>import com.minecolonies.api.colony.buildings.views.IBuildingView;
import com.minecolonies.api.util.BlockPosUtil;
import com.minecolonies.api.util.constant.Constants;
import com.minecolonies.coremod.Network;
import com.minecolonies.coremod.colony.buildings.AbstractBuildingGuards;
import com.minecolonies.coremod.colony.buildings.views.LivingBuildingView;
import com.minecolonies.coremod.network.messages.server.colony.building.home.AssignUnassignMessage;
import net.minecraft.util.math.BlockPos;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import static com.minecolonies.api.util.constant.WindowConstants.*;
import static com.minecolonies.coremod.client.gui.huts.WindowHutBuilderModule.DARKGREEN;
import static com.minecolonies.coremod.client.gui.huts.WindowHutBuilderModule.RED;
/**
* Window for the hiring or firing of a worker.
*/
public class WindowAssignCitizen extends Window implements ButtonHandler
{
/**
* Threshold that defines when the living quarters are too far away.
*/
private static final double FAR_DISTANCE_THRESHOLD = 250;
/**
* The view of the current building.
*/
private final IBuildingView building;
/**
* List of citizens which can be assigned.
*/
private final ScrollingList citizenList;
/**
* The colony.
*/
private final IColonyView colony;
/**
* Contains all the citizens.
*/
private List<ICitizenDataView> citizens = new ArrayList<>();
/**
* Constructor for the window when the player wants to assign a worker for a certain home building.
*
* @param c the colony view.
* @param buildingId the building position.
*/
public WindowAssignCitizen(final IColonyView c, final BlockPos buildingId)
{
super(Constants.MOD_ID + ASSIGN_CITIZEN_RESOURCE_SUFFIX);
this.colony = c;
building = colony.getBuilding(buildingId);
citizenList = findPaneOfTypeByID(CITIZEN_LIST, ScrollingList.class);
updateCitizens();
}
/**
* Clears and resets/updates all citizens.
*/
private void updateCitizens()
{
citizens.clear();
citizens.addAll(colony.getCitizens().values());
//Removes all citizens which already have a job.
citizens = colony.getCitizens().values().stream()
.filter(cit -> cit.getHomeBuilding() == null
|| !(colony.getBuilding(cit.getHomeBuilding()) instanceof AbstractBuildingGuards.View)
&& !cit.getHomeBuilding().equals(building.getID()))
.sorted(Comparator.comparing(cit -> ((ICitizenDataView) cit).getHomeBuilding() == null ? 0 : 1)
.thenComparingLong(cit -> {
if (((ICitizenDataView) cit).getWorkBuilding() == null)
{
return 0;
}
return BlockPosUtil.getDistance2D(((ICitizenDataView) cit).getWorkBuilding(), building.getPosition());
})).collect(Collectors.toList());
}
/**
* Called when the GUI has been opened. Will fill the fields and lists.
*/
@Override
public void onOpened()
{
updateCitizens();
citizenList.enable();
citizenList.show();
//Creates a dataProvider for the homeless citizenList.
citizenList.setDataProvider(new ScrollingList.DataProvider()
{
/**
* The number of rows of the list.
* @return the number.
*/
@Override
public int getElementCount()
{
return citizens.size();
}
/**
* Inserts the elements into each row.
* @param index the index of the row/list element.
* @param rowPane the parent Pane for the row, containing the elements to update.
*/
@Override
public void updateElement(final int index, @NotNull final Pane rowPane)
{
@NotNull final ICitizenDataView citizen = citizens.get(index);
if (building instanceof LivingBuildingView)
{
rowPane.findPaneOfTypeByID(CITIZEN_LABEL, Text.class).setText(citizen.getName());
final BlockPos work = citizen.getWorkBuilding();
String workString = "";
double newDistance = 0;
if (work != null)
{
newDistance = BlockPosUtil.getDistance2D(work, building.getPosition());;
workString = " " + newDistance + " blocks";
}
final BlockPos home = citizen.getHomeBuilding();
String homeString = "";
boolean better = false;
boolean badCurrentLiving = false;
if (home != null)
{
if (work != null)
{
final double oldDistance = BlockPosUtil.getDistance2D(work, home);
homeString = LanguageHandler.format("com.minecolonies.coremod.gui.homeHut.currently", oldDistance);
better = newDistance < oldDistance;
if (oldDistance >= FAR_DISTANCE_THRESHOLD)
{
badCurrentLiving = true;
}
}
else
{
homeString = LanguageHandler.format("com.minecolonies.coremod.gui.homeHut.current", home.getX(), home.getY(), home.getZ());
}
}
final Text newLivingLabel = rowPane.findPaneOfTypeByID(CITIZEN_JOB, Text.class);
newLivingLabel.setText(LanguageHandler.format(citizen.getJob()) + workString);
if (better)
{
newLivingLabel.setColors(DARKGREEN);
}
final Text currentLivingLabel = rowPane.findPaneOfTypeByID(CITIZEN_LIVING, Text.class);
currentLivingLabel.setText(homeString);
if (badCurrentLiving)
{
currentLivingLabel.setColors(RED);
}
final Button done = rowPane.findPaneOfTypeByID(CITIZEN_DONE, Button.class);
if (colony.isManualHousing())
{
done.enable();
}
else
{
done.disable();
}
}
}
});
}
@Override
public void onUpdate()
{
updateCitizens();
window.findPaneOfTypeByID(CITIZEN_LIST, ScrollingList.class).refreshElementPanes();
}
/**
* Called when any button has been clicked.
*
* @param button the clicked button.
*/
@Override
public void onButtonClicked(@NotNull final Button button)
{
if (button.getID().equals(BUTTON_DONE))
{
final int row = citizenList.getListElementIndexByPane(button);
final ICitizenDataView data = citizens.get(row);
if (building instanceof LivingBuildingView)
{
((LivingBuildingView) building).addResident(data.getId());
}
Network.getNetwork().sendToServer(new AssignUnassignMessage(this.building, true, data.getId()));
}
else if (!button.getID().equals(BUTTON_CANCEL))
{
return;
}
if (colony.getTownHall() != null)
{
building.openGui(false);
}
}
}<|fim▁end|>
|
import com.ldtteam.structurize.util.LanguageHandler;
import com.minecolonies.api.colony.ICitizenDataView;
import com.minecolonies.api.colony.IColonyView;
|
<|file_name|>torrcPanel.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>import curses
import threading
from util import conf, panel, torConfig, uiTools
DEFAULT_CONFIG = {"features.config.file.showScrollbars": True,
"features.config.file.maxLinesPerEntry": 8}
# TODO: The armrc use case is incomplete. There should be equivilant reloading
# and validation capabilities to the torrc.
TORRC, ARMRC = range(1, 3) # configuration file types that can be displayed
class TorrcPanel(panel.Panel):
"""
Renders the current torrc or armrc with syntax highlighting in a scrollable
area.
"""
def __init__(self, stdscr, configType, config=None):
panel.Panel.__init__(self, stdscr, "configFile", 0)
self._config = dict(DEFAULT_CONFIG)
if config:
config.update(self._config, {"features.config.file.maxLinesPerEntry": 1})
self.valsLock = threading.RLock()
self.configType = configType
self.scroll = 0
self.showLabel = True # shows top label (hides otherwise)
self.showLineNum = True # shows left aligned line numbers
self.stripComments = False # drops comments and extra whitespace
# height of the content when last rendered (the cached value is invalid if
# _lastContentHeightArgs is None or differs from the current dimensions)
self._lastContentHeight = 1
self._lastContentHeightArgs = None
def handleKey(self, key):
self.valsLock.acquire()
if uiTools.isScrollKey(key):
pageHeight = self.getPreferredSize()[0] - 1
newScroll = uiTools.getScrollPosition(key, self.scroll, pageHeight, self._lastContentHeight)
if self.scroll != newScroll:
self.scroll = newScroll
self.redraw(True)
elif key == ord('n') or key == ord('N'):
self.showLineNum = not self.showLineNum
self._lastContentHeightArgs = None
self.redraw(True)
elif key == ord('s') or key == ord('S'):
self.stripComments = not self.stripComments
self._lastContentHeightArgs = None
self.redraw(True)
self.valsLock.release()
def draw(self, subwindow, width, height):
self.valsLock.acquire()
# If true, we assume that the cached value in self._lastContentHeight is
# still accurate, and stop drawing when there's nothing more to display.
# Otherwise the self._lastContentHeight is suspect, and we'll process all
# the content to check if it's right (and redraw again with the corrected
# height if not).
trustLastContentHeight = self._lastContentHeightArgs == (width, height)
# restricts scroll location to valid bounds
self.scroll = max(0, min(self.scroll, self._lastContentHeight - height + 1))
renderedContents, corrections, confLocation = None, {}, None
if self.configType == TORRC:
loadedTorrc = torConfig.getTorrc()
loadedTorrc.getLock().acquire()
confLocation = loadedTorrc.getConfigLocation()
if not loadedTorrc.isLoaded():
renderedContents = ["### Unable to load the torrc ###"]
else:
renderedContents = loadedTorrc.getDisplayContents(self.stripComments)
# constructs a mapping of line numbers to the issue on it
corrections = dict((lineNum, (issue, msg)) for lineNum, issue, msg in loadedTorrc.getCorrections())
loadedTorrc.getLock().release()
else:
loadedArmrc = conf.getConfig("arm")
confLocation = loadedArmrc.path
renderedContents = list(loadedArmrc.rawContents)
# offset to make room for the line numbers
lineNumOffset = 0
if self.showLineNum:
if len(renderedContents) == 0: lineNumOffset = 2
else: lineNumOffset = int(math.log10(len(renderedContents))) + 2
# draws left-hand scroll bar if content's longer than the height
scrollOffset = 0
if self._config["features.config.file.showScrollbars"] and self._lastContentHeight > height - 1:
scrollOffset = 3
self.addScrollBar(self.scroll, self.scroll + height - 1, self._lastContentHeight, 1)
displayLine = -self.scroll + 1 # line we're drawing on
# draws the top label
if self.showLabel:
sourceLabel = "Tor" if self.configType == TORRC else "Arm"
locationLabel = " (%s)" % confLocation if confLocation else ""
self.addstr(0, 0, "%s Configuration File%s:" % (sourceLabel, locationLabel), curses.A_STANDOUT)
isMultiline = False # true if we're in the middle of a multiline torrc entry
for lineNumber in range(0, len(renderedContents)):
lineText = renderedContents[lineNumber]
lineText = lineText.rstrip() # remove ending whitespace
# blank lines are hidden when stripping comments
if self.stripComments and not lineText: continue
# splits the line into its component (msg, format) tuples
lineComp = {"option": ["", curses.A_BOLD | uiTools.getColor("green")],
"argument": ["", curses.A_BOLD | uiTools.getColor("cyan")],
"correction": ["", curses.A_BOLD | uiTools.getColor("cyan")],
"comment": ["", uiTools.getColor("white")]}
# parses the comment
commentIndex = lineText.find("#")
if commentIndex != -1:
lineComp["comment"][0] = lineText[commentIndex:]
lineText = lineText[:commentIndex]
# splits the option and argument, preserving any whitespace around them
strippedLine = lineText.strip()
optionIndex = strippedLine.find(" ")
if isMultiline:
# part of a multiline entry started on a previous line so everything
# is part of the argument
lineComp["argument"][0] = lineText
elif optionIndex == -1:
# no argument provided
lineComp["option"][0] = lineText
else:
optionText = strippedLine[:optionIndex]
optionEnd = lineText.find(optionText) + len(optionText)
lineComp["option"][0] = lineText[:optionEnd]
lineComp["argument"][0] = lineText[optionEnd:]
# flags following lines as belonging to this multiline entry if it ends
# with a slash
if strippedLine: isMultiline = strippedLine.endswith("\\")
# gets the correction
if lineNumber in corrections:
lineIssue, lineIssueMsg = corrections[lineNumber]
if lineIssue in (torConfig.VAL_DUPLICATE, torConfig.VAL_IS_DEFAULT):
lineComp["option"][1] = curses.A_BOLD | uiTools.getColor("blue")
lineComp["argument"][1] = curses.A_BOLD | uiTools.getColor("blue")
elif lineIssue == torConfig.VAL_MISMATCH:
lineComp["argument"][1] = curses.A_BOLD | uiTools.getColor("red")
lineComp["correction"][0] = " (%s)" % lineIssueMsg
else:
# For some types of configs the correction field is simply used to
# provide extra data (for instance, the type for tor state fields).
lineComp["correction"][0] = " (%s)" % lineIssueMsg
lineComp["correction"][1] = curses.A_BOLD | uiTools.getColor("magenta")
# draws the line number
if self.showLineNum and displayLine < height and displayLine >= 1:
lineNumStr = ("%%%ii" % (lineNumOffset - 1)) % (lineNumber + 1)
self.addstr(displayLine, scrollOffset, lineNumStr, curses.A_BOLD | uiTools.getColor("yellow"))
# draws the rest of the components with line wrap
cursorLoc, lineOffset = lineNumOffset + scrollOffset, 0
maxLinesPerEntry = self._config["features.config.file.maxLinesPerEntry"]
displayQueue = [lineComp[entry] for entry in ("option", "argument", "correction", "comment")]
while displayQueue:
msg, format = displayQueue.pop(0)
maxMsgSize, includeBreak = width - cursorLoc, False
if len(msg) >= maxMsgSize:
# message is too long - break it up
if lineOffset == maxLinesPerEntry - 1:
msg = uiTools.cropStr(msg, maxMsgSize)
else:
includeBreak = True
msg, remainder = uiTools.cropStr(msg, maxMsgSize, 4, 4, uiTools.END_WITH_HYPHEN, True)
displayQueue.insert(0, (remainder.strip(), format))
drawLine = displayLine + lineOffset
if msg and drawLine < height and drawLine >= 1:
self.addstr(drawLine, cursorLoc, msg, format)
# If we're done, and have added content to this line, then start
# further content on the next line.
cursorLoc += len(msg)
includeBreak |= not displayQueue and cursorLoc != lineNumOffset + scrollOffset
if includeBreak:
lineOffset += 1
cursorLoc = lineNumOffset + scrollOffset
displayLine += max(lineOffset, 1)
if trustLastContentHeight and displayLine >= height: break
if not trustLastContentHeight:
self._lastContentHeightArgs = (width, height)
newContentHeight = displayLine + self.scroll - 1
if self._lastContentHeight != newContentHeight:
self._lastContentHeight = newContentHeight
self.redraw(True)
self.valsLock.release()<|fim▁end|>
|
Panel displaying the torrc or armrc with the validation done against it.
"""
import math
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "crowd_server.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()<|fim▁end|>
|
WSGI config for crowd_server project.
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import Box from './Box';
import Button from './Button';
import Notification from './Notification';
import Footer from './Footer';
import Header from './Header';
import Heading from './Heading';
import Headline from './Headline';
import Hero from './Hero';
import Image from './Image';
import LoadingIndicator from './LoadingIndicator';
import Markdown from './Markdown';
import Paragraph from './Paragraph';
import Section from './Section';
import SvgIcon from './SvgIcon';
import Toast from './Toast';
import WithAnimation from './WithAnimation';
export {
Anchor,
Article,
Avatar,
Box,
Button,
Notification,
Footer,
Header,
Heading,
Headline,
Hero,
Image,
LoadingIndicator,
Markdown,
Paragraph,
Section,
SvgIcon,
Toast,
WithAnimation,
};<|fim▁end|>
|
import Anchor from './Anchor';
import Article from './Article';
import Avatar from './Avatar';
|
<|file_name|>standardize_data.py<|end_file_name|><|fim▁begin|># pylint: disable=g-bad-file-header
# Copyright 2020 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tools for pre-processing the data into individual, standardized formats."""
import collections
import datetime
import itertools
import os
import pathlib
import re
from typing import Callable, Dict, Set, Tuple
from absl import logging
from dm_c19_modelling.england_data import constants
import pandas as pd
import yaml
_PATH_FILENAME_REGEXES = "filename_regexes.yaml"
_COLUMNS = constants.Columns
_DATE_FORMAT = "%Y-%m-%d"
def _order_columns(df: pd.DataFrame) -> pd.DataFrame:
"""Orders the columns of the dataframe as: date, region, observations."""
df.insert(0, _COLUMNS.DATE.value, df.pop(_COLUMNS.DATE.value))
reg_columns = []
obs_columns = []
for col in df.columns[1:]:
if col.startswith(constants.REGION_PREFIX):
reg_columns.append(col)
elif col.startswith(constants.OBSERVATION_PREFIX):
obs_columns.append(col)
else:
raise ValueError(f"Unknown column: '{col}'")
columns = [_COLUMNS.DATE.value] + reg_columns + obs_columns
return df[columns]
def _raw_data_formatter_daily_deaths(filepath: str) -> pd.DataFrame:
"""Loads and formats daily deaths data."""
sheet_name = "Tab4 Deaths by trust"
header = 15
df = pd.read_excel(filepath, sheet_name=sheet_name, header=header)
# Drop rows and columns which are all nans.
df.dropna(axis=0, how="all", inplace=True)
df.dropna(axis=1, how="all", inplace=True)
# Drop unneeded columns and rows.
drop_columns = ["Total", "Awaiting verification"]
up_to_mar_1_index = "Up to 01-Mar-20"
if sum(i for i in df[up_to_mar_1_index] if isinstance(i, int)) == 0.0:
drop_columns.append(up_to_mar_1_index)
df.drop(columns=drop_columns, inplace=True)
df = df[df["Code"] != "-"]
# Melt the death counts by date into "Date" and "Death Count" columns.
df = df.melt(
id_vars=["NHS England Region", "Code", "Name"],
var_name="Date",
value_name="Death Count")
# Rename the columns to their standard names.
df.rename(
columns={
"Date": _COLUMNS.DATE.value,
"Death Count": _COLUMNS.OBS_DEATHS.value,
"Code": _COLUMNS.REG_TRUST_CODE.value,
"Name": _COLUMNS.REG_TRUST_NAME.value,
"NHS England Region": _COLUMNS.REG_NHSER_NAME.value,
},
inplace=True)
_order_columns(df)
df[_COLUMNS.DATE.value] = df[_COLUMNS.DATE.value].map(
lambda x: x.strftime(_DATE_FORMAT))
# Sort and clean up the indices before returning the final dataframe.
df.sort_values([
_COLUMNS.DATE.value,
_COLUMNS.REG_TRUST_NAME.value,
_COLUMNS.REG_TRUST_CODE.value,
],
inplace=True)
df.reset_index(drop=True, inplace=True)
if df.isna().any().any():
raise ValueError("Formatted data 'daily_deaths' contains nans")
return df
def _raw_data_formatter_daily_cases(filepath: str) -> pd.DataFrame:
"""Loads and formats daily cases data."""
df = pd.read_csv(filepath)
df.rename(columns={"Area type": "Area_type"}, inplace=True)
df.query("Area_type == 'ltla'", inplace=True)
# Drop unneeded columns and rows.
drop_columns = [
"Area_type", "Cumulative lab-confirmed cases",
"Cumulative lab-confirmed cases rate"
]
df.drop(columns=drop_columns, inplace=True)
# Rename the columns to their standard names.
df.rename(
columns={
"Area name": _COLUMNS.REG_LTLA_NAME.value,
"Area code": _COLUMNS.REG_LTLA_CODE.value,
"Specimen date": _COLUMNS.DATE.value,
"Daily lab-confirmed cases": _COLUMNS.OBS_CASES.value,
},
inplace=True)
_order_columns(df)
# Sort and clean up the indices before returning the final dataframe.
df.sort_values([
_COLUMNS.DATE.value,
_COLUMNS.REG_LTLA_NAME.value,
_COLUMNS.REG_LTLA_CODE.value,
],
inplace=True)
df.reset_index(drop=True, inplace=True)
if df.isna().any().any():
raise ValueError("Formatted data 'daily_cases' contains nans")
return df
def _raw_data_formatter_google_mobility(filepath: str) -> pd.DataFrame:
"""Loads and formats Google mobility data."""
df = pd.read_csv(filepath)
# Filter to UK.
df.query("country_region_code == 'GB'", inplace=True)
# Drop unneeded columns and rows.
drop_columns = [
"country_region_code", "country_region", "metro_area", "census_fips_code"
]
df.drop(columns=drop_columns, inplace=True)
# Fill missing region info with "na".
df[["sub_region_1", "sub_region_2", "iso_3166_2_code"]].fillna(
"na", inplace=True)
# Rename the columns to their standard names.
df.rename(
columns={
"sub_region_1":
_COLUMNS.REG_SUB_REGION_1.value,
"sub_region_2":
_COLUMNS.REG_SUB_REGION_2.value,
"iso_3166_2_code":
_COLUMNS.REG_ISO_3166_2_CODE.value,
"date":
_COLUMNS.DATE.value,
"retail_and_recreation_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_RETAIL_AND_RECREATION.value,
"grocery_and_pharmacy_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_GROCERY_AND_PHARMACY.value,
"parks_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_PARKS.value,
"transit_stations_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_TRANSIT_STATIONS.value,
"workplaces_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_WORKPLACES.value,
"residential_percent_change_from_baseline":
_COLUMNS.OBS_MOBILITY_RESIDENTIAL.value,
},
inplace=True)
_order_columns(df)
# Sort and clean up the indices before returning the final dataframe.
df.sort_values([
_COLUMNS.DATE.value,
_COLUMNS.REG_SUB_REGION_1.value,
_COLUMNS.REG_SUB_REGION_2.value,
_COLUMNS.REG_ISO_3166_2_CODE.value,
],
inplace=True)
df.reset_index(drop=True, inplace=True)
return df
def _raw_data_formatter_online_111(filepath: str) -> pd.DataFrame:
"""Loads and formats online 111 data."""
df = pd.read_csv(filepath)
# Drop nans.
df.dropna(subset=["ccgcode"], inplace=True)
# Reformat dates.
remap_dict = {
"journeydate":
lambda x: datetime.datetime.strptime(x, "%d/%m/%Y").strftime( # pylint: disable=g-long-lambda
_DATE_FORMAT),
"ccgname":
lambda x: x.replace("&", "and"),
"sex": {
"Female": "f",
"Male": "m",
"Indeterminate": "u",
},
"ageband": {
"0-18 years": "0",
"19-69 years": "19",
"70+ years": "70"
}
}
for col, remap in remap_dict.items():
df[col] = df[col].map(remap)<|fim▁hole|>
journeydate_values = pd.date_range(
df.journeydate.min(), df.journeydate.max()).strftime(_DATE_FORMAT)
ccgcode_values = df.ccgcode.unique()
df.sex.fillna("u", inplace=True)
sex_values = ["f", "m", "u"]
assert set(sex_values) >= set(df.sex.unique()), "unsupported sex value"
df.ageband.fillna("u", inplace=True)
ageband_values = ["0", "19", "70", "u"]
assert set(ageband_values) >= set(
df.ageband.unique()), "unsupported ageband value"
ccg_code_name_map = df[["ccgcode", "ccgname"
]].set_index("ccgcode")["ccgname"].drop_duplicates()
# Some CCG codes have duplicate names, which differ by their commas. Keep the
# longer ones.
fn = lambda x: sorted(x["ccgname"].map(lambda y: (len(y), y)))[-1][1]
ccg_code_name_map = ccg_code_name_map.reset_index().groupby("ccgcode").apply(
fn)
df_full = pd.DataFrame(
list(
itertools.product(journeydate_values, ccgcode_values, sex_values,
ageband_values)),
columns=["journeydate", "ccgcode", "sex", "ageband"])
df = pd.merge(df_full, df, how="outer")
# 0 calls don't have rows, so are nans.
df["Total"].fillna(0, inplace=True)
df["ccgname"] = df["ccgcode"].map(ccg_code_name_map)
# Combine sex and ageband columns into a joint column.
df["sex_ageband"] = df["sex"] + "_" + df["ageband"]
df = df.pivot_table(
index=["journeydate", "ccgcode", "ccgname"],
columns="sex_ageband",
values="Total").reset_index()
df.columns.name = None
# Rename the columns to their standard names.
df.rename(
columns={
"ccgcode": _COLUMNS.REG_CCG_CODE.value,
"ccgname": _COLUMNS.REG_CCG_NAME.value,
"journeydate": _COLUMNS.DATE.value,
"f_0": _COLUMNS.OBS_ONLINE_111_F_0.value,
"f_19": _COLUMNS.OBS_ONLINE_111_F_19.value,
"f_70": _COLUMNS.OBS_ONLINE_111_F_70.value,
"f_u": _COLUMNS.OBS_ONLINE_111_F_U.value,
"m_0": _COLUMNS.OBS_ONLINE_111_M_0.value,
"m_19": _COLUMNS.OBS_ONLINE_111_M_19.value,
"m_70": _COLUMNS.OBS_ONLINE_111_M_70.value,
"m_u": _COLUMNS.OBS_ONLINE_111_M_U.value,
"u_0": _COLUMNS.OBS_ONLINE_111_U_0.value,
"u_19": _COLUMNS.OBS_ONLINE_111_U_19.value,
"u_70": _COLUMNS.OBS_ONLINE_111_U_70.value,
"u_u": _COLUMNS.OBS_ONLINE_111_U_U.value,
},
inplace=True)
_order_columns(df)
# Sort and clean up the indices before returning the final dataframe.
df.sort_values([
_COLUMNS.DATE.value,
_COLUMNS.REG_CCG_NAME.value,
_COLUMNS.REG_CCG_CODE.value,
],
inplace=True)
df.reset_index(drop=True, inplace=True)
if df.isna().any().any():
raise ValueError("Formatted data 'online_111' contains nans")
return df
def _raw_data_formatter_calls_111_999(filepath: str) -> pd.DataFrame:
"""Loads and formats 111 & 999 calls data."""
df = pd.read_csv(filepath)
# Drop unneeded columns and rows.
drop_columns = []
df.drop(columns=drop_columns, inplace=True)
# Drop nans.
df.dropna(subset=["CCGCode", "CCGName"], inplace=True)
# Reformat values.
df["AgeBand"].fillna("u", inplace=True)
remap_dict = {
"Call Date":
lambda x: datetime.datetime.strptime(x, "%d/%m/%Y").strftime( # pylint: disable=g-long-lambda
"%Y-%m-%d"),
"CCGName":
lambda x: x.replace("&", "and"),
"SiteType":
lambda x: str(int(x)),
"Sex": {
"Female": "f",
"Male": "m",
"Unknown": "u",
},
"AgeBand": {
"0-18 years": "0",
"19-69 years": "19",
"70-120 years": "70",
"u": "u",
}
}
for col, remap in remap_dict.items():
df[col] = df[col].map(remap)
call_date_values = pd.date_range(df["Call Date"].min(),
df["Call Date"].max()).strftime(_DATE_FORMAT)
ccgcode_values = df["CCGCode"].unique()
sitetype_values = ["111", "999"]
assert set(sitetype_values) >= set(
df.SiteType.unique()), "unsupported sitetype value"
sex_values = ["f", "m", "u"]
assert set(sex_values) >= set(df.Sex.unique()), "unsupported sex value"
ageband_values = ["0", "19", "70", "u"]
assert set(ageband_values) >= set(
df.AgeBand.unique()), "unsupported ageband value"
ccg_code_name_map = df[["CCGCode", "CCGName"
]].set_index("CCGCode")["CCGName"].drop_duplicates()
df_full = pd.DataFrame(
list(itertools.product(call_date_values, ccgcode_values, sitetype_values,
sex_values, ageband_values)),
columns=["Call Date", "CCGCode", "SiteType", "Sex", "AgeBand"])
df = pd.merge(df_full, df, how="outer")
# 0 calls don't have rows, so are nans.
df["TriageCount"].fillna(0, inplace=True)
df["CCGName"] = df["CCGCode"].map(ccg_code_name_map)
# Combine SiteType, Sex, and AgeBand columns into a joint column.
df["SiteType_Sex_AgeBand"] = (
df["SiteType"] + "_" + df["Sex"] + "_" + df["AgeBand"])
df = df.pivot_table(
index=["Call Date", "CCGCode", "CCGName"],
columns="SiteType_Sex_AgeBand",
values="TriageCount").reset_index()
df.columns.name = None
# Rename the columns to their standard names.
df.rename(
columns={
"CCGCode": _COLUMNS.REG_CCG_CODE.value,
"CCGName": _COLUMNS.REG_CCG_NAME.value,
"Call Date": _COLUMNS.DATE.value,
"111_f_0": _COLUMNS.OBS_CALL_111_F_0.value,
"111_f_19": _COLUMNS.OBS_CALL_111_F_19.value,
"111_f_70": _COLUMNS.OBS_CALL_111_F_70.value,
"111_f_u": _COLUMNS.OBS_CALL_111_F_U.value,
"111_m_0": _COLUMNS.OBS_CALL_111_M_0.value,
"111_m_19": _COLUMNS.OBS_CALL_111_M_19.value,
"111_m_70": _COLUMNS.OBS_CALL_111_M_70.value,
"111_m_u": _COLUMNS.OBS_CALL_111_M_U.value,
"111_u_0": _COLUMNS.OBS_CALL_111_U_0.value,
"111_u_19": _COLUMNS.OBS_CALL_111_U_19.value,
"111_u_70": _COLUMNS.OBS_CALL_111_U_70.value,
"111_u_u": _COLUMNS.OBS_CALL_111_U_U.value,
"999_f_0": _COLUMNS.OBS_CALL_999_F_0.value,
"999_f_19": _COLUMNS.OBS_CALL_999_F_19.value,
"999_f_70": _COLUMNS.OBS_CALL_999_F_70.value,
"999_f_u": _COLUMNS.OBS_CALL_999_F_U.value,
"999_m_0": _COLUMNS.OBS_CALL_999_M_0.value,
"999_m_19": _COLUMNS.OBS_CALL_999_M_19.value,
"999_m_70": _COLUMNS.OBS_CALL_999_M_70.value,
"999_m_u": _COLUMNS.OBS_CALL_999_M_U.value,
"999_u_0": _COLUMNS.OBS_CALL_999_U_0.value,
"999_u_19": _COLUMNS.OBS_CALL_999_U_19.value,
"999_u_70": _COLUMNS.OBS_CALL_999_U_70.value,
"999_u_u": _COLUMNS.OBS_CALL_999_U_U.value,
},
inplace=True)
_order_columns(df)
# Sort and clean up the indices before returning the final dataframe.
df.sort_values([
_COLUMNS.DATE.value,
_COLUMNS.REG_CCG_NAME.value,
_COLUMNS.REG_CCG_CODE.value,
],
inplace=True)
df.reset_index(drop=True, inplace=True)
if df.isna().any().any():
raise ValueError("Formatted data 'calls_111_999' contains nans")
return df
_FORMATTER_FUNCTIONS = {
"daily_deaths": _raw_data_formatter_daily_deaths,
"daily_cases": _raw_data_formatter_daily_cases,
"google_mobility": _raw_data_formatter_google_mobility,
"online_111": _raw_data_formatter_online_111,
"calls_111_999": _raw_data_formatter_calls_111_999,
}
def _get_raw_data_formatter_by_name(name: str) -> Callable[[str], pd.DataFrame]:
return _FORMATTER_FUNCTIONS[name]
def _merge_online_111_and_calls_111_999(
df_online_111: pd.DataFrame,
df_calls_111_999: pd.DataFrame) -> pd.DataFrame:
"""Merges the 111 online and 111/999 calls into a single dataframe."""
df = pd.merge(
df_online_111,
df_calls_111_999,
how="outer",
on=[
_COLUMNS.DATE.value,
_COLUMNS.REG_CCG_CODE.value,
_COLUMNS.REG_CCG_NAME.value,
])
return df
def format_raw_data_files(
paths_dict: Dict[str, str]) -> Dict[str, pd.DataFrame]:
"""Loads and formats the individual raw data files.
Args:
paths_dict: mapping from data names to filepaths.
Returns:
mapping from data names to formatted dataframes.
"""
formatted_dfs = {}
for name, path in paths_dict.items():
logging.info("Formatting raw data: %s", name)
formatter = _get_raw_data_formatter_by_name(name)
formatted_dfs[name] = formatter(path)
logging.info("Merging online 111 and 111/999 calls")
if "online_111" and "calls_111_999" in formatted_dfs:
formatted_dfs[
"online_111_and_calls_111_999"] = _merge_online_111_and_calls_111_999(
formatted_dfs.pop("online_111"), formatted_dfs.pop("calls_111_999"))
elif "online_111" in formatted_dfs:
formatted_dfs["online_111_and_calls_111_999"] = formatted_dfs.pop(
"online_111")
elif "calls_111_999" in formatted_dfs:
formatted_dfs["online_111_and_calls_111_999"] = formatted_dfs.pop(
"calls_111_999")
return formatted_dfs
def merge_formatted_data(
formatted_data: Dict[str, pd.DataFrame]) -> pd.DataFrame:
"""Concatenates all formatted data into a single dataframe.
Args:
formatted_data: mapping from the data name to its dataframe.
Returns:
a dataframe containing all of the input dataframes.
"""
logging.info("Merging all dataframes")
dfs = []
for name, df in formatted_data.items():
df = df.copy()
df.insert(1, _COLUMNS.OBSERVATION_TYPE.value, name)
dfs.append(df)
df_merged = pd.concat(dfs)
reg_columns = [
c for c in df_merged.columns if c.startswith(constants.REGION_PREFIX)
]
df_merged.sort_values(
[_COLUMNS.DATE.value, _COLUMNS.OBSERVATION_TYPE.value] + reg_columns,
inplace=True)
df_merged.reset_index(drop=True, inplace=True)
return df_merged
def _load_filename_regexes() -> Dict[str, str]:
"""Gets a mapping from the data name to the regex for that data's filepath."""
path = pathlib.Path(os.path.dirname(
os.path.realpath(__file__))) / _PATH_FILENAME_REGEXES
with open(path) as fid:
return yaml.load(fid, Loader=yaml.SafeLoader)
def get_paths_for_given_date(
raw_data_directory: str,
scrape_date: str) -> Tuple[Dict[str, str], str, Set[str]]:
"""Get the raw data paths for a scrape date and filename regex.
Args:
raw_data_directory: the directory where the raw data is saved.
scrape_date: the scrape date to use, in the form YYYYMMDD, or 'latest'.
Returns:
mapping of data names to filepaths
the scrape date used
names whose data was not found on disk
"""
filename_regexes = _load_filename_regexes()
if scrape_date == "latest":
rx = re.compile("^[0-9]{8}$")
directories = []
for filename in os.listdir(raw_data_directory):
if rx.match(filename) is None:
continue
path = pathlib.Path(raw_data_directory) / filename
if not os.path.isdir(path):
continue
directories.append(path)
if not directories:
raise ValueError("Could not find latest scrape date directory")
directory = max(directories)
scrape_date_dirname = directory.parts[-1]
else:
try:
datetime.datetime.strptime(scrape_date, "%Y%m%d")
except ValueError:
raise ValueError("Date must be formatted: YYYYMMDD")
scrape_date_dirname = scrape_date
directory = pathlib.Path(raw_data_directory) / scrape_date_dirname
paths_dict = collections.defaultdict(lambda: None)
for name, filename_regex in filename_regexes.items():
rx = re.compile(f"^{filename_regex}$")
for filename in os.listdir(directory):
path = directory / filename
if os.path.isdir(path):
continue
match = rx.match(filename)
if match is None:
continue
if paths_dict[name] is not None:
raise ValueError("There should only be 1 file per name")
paths_dict[name] = str(path)
missing_names = set(filename_regexes.keys()) - set(paths_dict.keys())
return dict(paths_dict), scrape_date_dirname, missing_names
def load_population_dataframe(raw_data_directory: str) -> pd.DataFrame:
"""Load population data from disk, and create a dataframe from it.
Args:
raw_data_directory: the directory where the raw data is saved.
Returns:
a dataframe containing population data.
"""
filename = _load_filename_regexes()["population"]
filepath = pathlib.Path(raw_data_directory) / filename
kwargs = dict(header=0, skiprows=(0, 1, 2, 3, 4, 5, 7))
try:
pop_m = pd.read_excel(filepath, sheet_name="Mid-2019 Males", **kwargs)
pop_f = pd.read_excel(filepath, sheet_name="Mid-2019 Females", **kwargs)
except FileNotFoundError:
return None
# Remove lower resolution columns.
columns_to_remove = ("STP20 Code", "STP20 Name", "NHSER20 Code",
"NHSER20 Name", "All Ages")
for col in columns_to_remove:
del pop_m[col]
del pop_f[col]
mapping = {"CCG Code": _COLUMNS.REG_CCG_CODE.value,
"CCG Name": _COLUMNS.REG_CCG_NAME.value,
"90+": 90}
pop_m.rename(columns=mapping, inplace=True)
pop_f.rename(columns=mapping, inplace=True)
# This labels the male and female data uniquely so they can be merged.
pop_m.rename(
columns=lambda x: f"m_{str(x).lower()}" if isinstance(x, int) else x,
inplace=True)
pop_f.rename(
columns=lambda x: f"f_{str(x).lower()}" if isinstance(x, int) else x,
inplace=True)
region_columns = [_COLUMNS.REG_CCG_NAME.value, _COLUMNS.REG_CCG_CODE.value]
df = pd.merge(pop_m, pop_f, how="outer", on=tuple(region_columns))
mapping = {
f"{gender}_{age}":
_COLUMNS.OBS_POPULATION_GENDER_AGE.value.format(gender=gender, age=age)
for gender, age in itertools.product(("m", "f"), range(91))
}
df.rename(columns=mapping, inplace=True)
return df<|fim▁end|>
| |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import URL from 'url';
import * as packageCache from '../../util/cache/package';
import { GitlabHttp } from '../../util/http/gitlab';
import type { GetReleasesConfig, ReleaseResult } from '../types';
import type { GitlabTag } from './types';
const gitlabApi = new GitlabHttp();
export const id = 'gitlab-tags';
export const customRegistrySupport = true;
export const defaultRegistryUrls = ['https://gitlab.com'];
export const registryStrategy = 'first';
const cacheNamespace = 'datasource-gitlab';
function getCacheKey(depHost: string, repo: string): string {
const type = 'tags';
return `${depHost}:${repo}:${type}`;
}
<|fim▁hole|>export async function getReleases({
registryUrl: depHost,
lookupName: repo,
}: GetReleasesConfig): Promise<ReleaseResult | null> {
const cachedResult = await packageCache.get<ReleaseResult>(
cacheNamespace,
getCacheKey(depHost, repo)
);
// istanbul ignore if
if (cachedResult) {
return cachedResult;
}
const urlEncodedRepo = encodeURIComponent(repo);
// tag
const url = URL.resolve(
depHost,
`/api/v4/projects/${urlEncodedRepo}/repository/tags?per_page=100`
);
const gitlabTags = (
await gitlabApi.getJson<GitlabTag[]>(url, {
paginate: true,
})
).body;
const dependency: ReleaseResult = {
sourceUrl: URL.resolve(depHost, repo),
releases: null,
};
dependency.releases = gitlabTags.map(({ name, commit }) => ({
version: name,
gitRef: name,
releaseTimestamp: commit?.created_at,
}));
const cacheMinutes = 10;
await packageCache.set(
cacheNamespace,
getCacheKey(depHost, repo),
dependency,
cacheMinutes
);
return dependency;
}<|fim▁end|>
| |
<|file_name|>audit-log.ts<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 VMware, Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
{
"log_id": 3,
"user_id": 0,
"project_id": 0,
"repo_name": "library/mysql",
"repo_tag": "5.6",
"guid": "",
"operation": "push",
"op_time": "2017-02-14T09:22:58Z",
"username": "admin",<|fim▁hole|> "keywords": "",
"BeginTime": "0001-01-01T00:00:00Z",
"begin_timestamp": 0,
"EndTime": "0001-01-01T00:00:00Z",
"end_timestamp": 0
}
*/
export class AuditLog {
log_id: number | string;
project_id: number | string;
username: string;
repo_name: string;
repo_tag: string;
operation: string;
op_time: Date;
begin_timestamp: number | string;
end_timestamp: number | string;
keywords: string;
page: number | string;
page_size: number | string;
fromTime: string;
toTime: string;
}<|fim▁end|>
| |
<|file_name|>CosRelationships_idl.py<|end_file_name|><|fim▁begin|># Python stubs generated by omniidl from /usr/local/share/idl/omniORB/COS/CosRelationships.idl
# DO NOT EDIT THIS FILE!
import omniORB, _omnipy
from omniORB import CORBA, PortableServer
_0_CORBA = CORBA
_omnipy.checkVersion(4,2, __file__, 1)
try:
property
except NameError:
def property(*args):
return None
# #include "corbaidl.idl"
import corbaidl_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "boxes.idl"
import boxes_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "ir.idl"
import ir_idl
_0_CORBA = omniORB.openModule("CORBA")
_0_CORBA__POA = omniORB.openModule("CORBA__POA")
# #include "CosObjectIdentity.idl"
import CosObjectIdentity_idl
_0_CosObjectIdentity = omniORB.openModule("CosObjectIdentity")
_0_CosObjectIdentity__POA = omniORB.openModule("CosObjectIdentity__POA")
#
# Start of module "CosRelationships"
#
__name__ = "CosRelationships"
_0_CosRelationships = omniORB.openModule("CosRelationships", r"/usr/local/share/idl/omniORB/COS/CosRelationships.idl")
_0_CosRelationships__POA = omniORB.openModule("CosRelationships__POA", r"/usr/local/share/idl/omniORB/COS/CosRelationships.idl")
# forward interface RoleFactory;
_0_CosRelationships._d_RoleFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RoleFactory:1.0", "RoleFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory:1.0"] = _0_CosRelationships._d_RoleFactory
# forward interface RelationshipFactory;
_0_CosRelationships._d_RelationshipFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipFactory:1.0", "RelationshipFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory:1.0"] = _0_CosRelationships._d_RelationshipFactory
# forward interface Relationship;
_0_CosRelationships._d_Relationship = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Relationship:1.0", "Relationship")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"] = _0_CosRelationships._d_Relationship
# forward interface Role;
_0_CosRelationships._d_Role = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Role:1.0", "Role")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"] = _0_CosRelationships._d_Role
# forward interface RelationshipIterator;
_0_CosRelationships._d_RelationshipIterator = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipIterator:1.0", "RelationshipIterator")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"] = _0_CosRelationships._d_RelationshipIterator
# typedef ... RelatedObject
class RelatedObject:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelatedObject:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RelatedObject = RelatedObject
_0_CosRelationships._d_RelatedObject = omniORB.typeMapping["IDL:omg.org/CORBA/Object:1.0"]
_0_CosRelationships._ad_RelatedObject = (omniORB.tcInternal.tv_alias, RelatedObject._NP_RepositoryId, "RelatedObject", omniORB.typeMapping["IDL:omg.org/CORBA/Object:1.0"])
_0_CosRelationships._tc_RelatedObject = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RelatedObject)
omniORB.registerType(RelatedObject._NP_RepositoryId, _0_CosRelationships._ad_RelatedObject, _0_CosRelationships._tc_RelatedObject)
del RelatedObject
# typedef ... Roles
class Roles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Roles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.Roles = Roles
_0_CosRelationships._d_Roles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], 0)
_0_CosRelationships._ad_Roles = (omniORB.tcInternal.tv_alias, Roles._NP_RepositoryId, "Roles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], 0))
_0_CosRelationships._tc_Roles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_Roles)
omniORB.registerType(Roles._NP_RepositoryId, _0_CosRelationships._ad_Roles, _0_CosRelationships._tc_Roles)
del Roles
# typedef ... RoleName
class RoleName:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleName:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RoleName = RoleName
_0_CosRelationships._d_RoleName = (omniORB.tcInternal.tv_string,0)
_0_CosRelationships._ad_RoleName = (omniORB.tcInternal.tv_alias, RoleName._NP_RepositoryId, "RoleName", (omniORB.tcInternal.tv_string,0))
_0_CosRelationships._tc_RoleName = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RoleName)
omniORB.registerType(RoleName._NP_RepositoryId, _0_CosRelationships._ad_RoleName, _0_CosRelationships._tc_RoleName)
del RoleName
# typedef ... RoleNames
class RoleNames:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleNames:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RoleNames = RoleNames
_0_CosRelationships._d_RoleNames = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], 0)
_0_CosRelationships._ad_RoleNames = (omniORB.tcInternal.tv_alias, RoleNames._NP_RepositoryId, "RoleNames", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], 0))
_0_CosRelationships._tc_RoleNames = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RoleNames)
omniORB.registerType(RoleNames._NP_RepositoryId, _0_CosRelationships._ad_RoleNames, _0_CosRelationships._tc_RoleNames)
del RoleNames
# struct NamedRole
_0_CosRelationships.NamedRole = omniORB.newEmptyClass()
class NamedRole (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/NamedRole:1.0"
def __init__(self, name, aRole):
self.name = name
self.aRole = aRole
_0_CosRelationships.NamedRole = NamedRole
_0_CosRelationships._d_NamedRole = (omniORB.tcInternal.tv_struct, NamedRole, NamedRole._NP_RepositoryId, "NamedRole", "name", omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], "aRole", omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"])
_0_CosRelationships._tc_NamedRole = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_NamedRole)
omniORB.registerType(NamedRole._NP_RepositoryId, _0_CosRelationships._d_NamedRole, _0_CosRelationships._tc_NamedRole)
del NamedRole
# typedef ... NamedRoles
class NamedRoles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/NamedRoles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.NamedRoles = NamedRoles
_0_CosRelationships._d_NamedRoles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRole:1.0"], 0)
_0_CosRelationships._ad_NamedRoles = (omniORB.tcInternal.tv_alias, NamedRoles._NP_RepositoryId, "NamedRoles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRole:1.0"], 0))
_0_CosRelationships._tc_NamedRoles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_NamedRoles)
omniORB.registerType(NamedRoles._NP_RepositoryId, _0_CosRelationships._ad_NamedRoles, _0_CosRelationships._tc_NamedRoles)
del NamedRoles
# struct RelationshipHandle
_0_CosRelationships.RelationshipHandle = omniORB.newEmptyClass()
class RelationshipHandle (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipHandle:1.0"
def __init__(self, the_relationship, constant_random_id):
self.the_relationship = the_relationship
self.constant_random_id = constant_random_id
_0_CosRelationships.RelationshipHandle = RelationshipHandle
_0_CosRelationships._d_RelationshipHandle = (omniORB.tcInternal.tv_struct, RelationshipHandle, RelationshipHandle._NP_RepositoryId, "RelationshipHandle", "the_relationship", omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"], "constant_random_id", omniORB.typeMapping["IDL:omg.org/CosObjectIdentity/ObjectIdentifier:1.0"])
_0_CosRelationships._tc_RelationshipHandle = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipHandle)
omniORB.registerType(RelationshipHandle._NP_RepositoryId, _0_CosRelationships._d_RelationshipHandle, _0_CosRelationships._tc_RelationshipHandle)
del RelationshipHandle
# typedef ... RelationshipHandles
class RelationshipHandles:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipHandles:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_0_CosRelationships.RelationshipHandles = RelationshipHandles
_0_CosRelationships._d_RelationshipHandles = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], 0)
_0_CosRelationships._ad_RelationshipHandles = (omniORB.tcInternal.tv_alias, RelationshipHandles._NP_RepositoryId, "RelationshipHandles", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], 0))
_0_CosRelationships._tc_RelationshipHandles = omniORB.tcInternal.createTypeCode(_0_CosRelationships._ad_RelationshipHandles)
omniORB.registerType(RelationshipHandles._NP_RepositoryId, _0_CosRelationships._ad_RelationshipHandles, _0_CosRelationships._tc_RelationshipHandles)
del RelationshipHandles
# interface RelationshipFactory
_0_CosRelationships._d_RelationshipFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipFactory:1.0", "RelationshipFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory:1.0"] = _0_CosRelationships._d_RelationshipFactory
_0_CosRelationships.RelationshipFactory = omniORB.newEmptyClass()
class RelationshipFactory :
_NP_RepositoryId = _0_CosRelationships._d_RelationshipFactory[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# struct NamedRoleType
_0_CosRelationships.RelationshipFactory.NamedRoleType = omniORB.newEmptyClass()
class NamedRoleType (omniORB.StructBase):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.NamedRoleType"
def __init__(self, name, named_role_type):
self.name = name
self.named_role_type = named_role_type
_d_NamedRoleType = _0_CosRelationships.RelationshipFactory._d_NamedRoleType = (omniORB.tcInternal.tv_struct, NamedRoleType, NamedRoleType._NP_RepositoryId, "NamedRoleType", "name", omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"], "named_role_type", omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"])
_tc_NamedRoleType = omniORB.tcInternal.createTypeCode(_d_NamedRoleType)
omniORB.registerType(NamedRoleType._NP_RepositoryId, _d_NamedRoleType, _tc_NamedRoleType)
# typedef ... NamedRoleTypes
class NamedRoleTypes:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleTypes:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_d_NamedRoleTypes = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"], 0)
_ad_NamedRoleTypes = (omniORB.tcInternal.tv_alias, NamedRoleTypes._NP_RepositoryId, "NamedRoleTypes", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleType:1.0"], 0))
_tc_NamedRoleTypes = omniORB.tcInternal.createTypeCode(_ad_NamedRoleTypes)
omniORB.registerType(NamedRoleTypes._NP_RepositoryId, _ad_NamedRoleTypes, _tc_NamedRoleTypes)
# exception RoleTypeError
_0_CosRelationships.RelationshipFactory.RoleTypeError = omniORB.newEmptyClass()
class RoleTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/RoleTypeError:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.RoleTypeError"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_RoleTypeError = (omniORB.tcInternal.tv_except, RoleTypeError, RoleTypeError._NP_RepositoryId, "RoleTypeError", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_RoleTypeError = omniORB.tcInternal.createTypeCode(_d_RoleTypeError)
omniORB.registerType(RoleTypeError._NP_RepositoryId, _d_RoleTypeError, _tc_RoleTypeError)
# exception MaxCardinalityExceeded
_0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded = omniORB.newEmptyClass()
class MaxCardinalityExceeded (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/MaxCardinalityExceeded:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.MaxCardinalityExceeded"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_MaxCardinalityExceeded = (omniORB.tcInternal.tv_except, MaxCardinalityExceeded, MaxCardinalityExceeded._NP_RepositoryId, "MaxCardinalityExceeded", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_MaxCardinalityExceeded = omniORB.tcInternal.createTypeCode(_d_MaxCardinalityExceeded)
omniORB.registerType(MaxCardinalityExceeded._NP_RepositoryId, _d_MaxCardinalityExceeded, _tc_MaxCardinalityExceeded)
# exception DegreeError
_0_CosRelationships.RelationshipFactory.DegreeError = omniORB.newEmptyClass()
class DegreeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/DegreeError:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.DegreeError"
def __init__(self, required_degree):
CORBA.UserException.__init__(self, required_degree)
self.required_degree = required_degree
_d_DegreeError = (omniORB.tcInternal.tv_except, DegreeError, DegreeError._NP_RepositoryId, "DegreeError", "required_degree", omniORB.tcInternal.tv_ushort)
_tc_DegreeError = omniORB.tcInternal.createTypeCode(_d_DegreeError)
omniORB.registerType(DegreeError._NP_RepositoryId, _d_DegreeError, _tc_DegreeError)
# exception DuplicateRoleName
_0_CosRelationships.RelationshipFactory.DuplicateRoleName = omniORB.newEmptyClass()
class DuplicateRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/DuplicateRoleName:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.DuplicateRoleName"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_DuplicateRoleName = (omniORB.tcInternal.tv_except, DuplicateRoleName, DuplicateRoleName._NP_RepositoryId, "DuplicateRoleName", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_DuplicateRoleName = omniORB.tcInternal.createTypeCode(_d_DuplicateRoleName)
omniORB.registerType(DuplicateRoleName._NP_RepositoryId, _d_DuplicateRoleName, _tc_DuplicateRoleName)
# exception UnknownRoleName
_0_CosRelationships.RelationshipFactory.UnknownRoleName = omniORB.newEmptyClass()
class UnknownRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RelationshipFactory/UnknownRoleName:1.0"
_NP_ClassName = "CosRelationships.RelationshipFactory.UnknownRoleName"
def __init__(self, culprits):
CORBA.UserException.__init__(self, culprits)
self.culprits = culprits
_d_UnknownRoleName = (omniORB.tcInternal.tv_except, UnknownRoleName, UnknownRoleName._NP_RepositoryId, "UnknownRoleName", "culprits", omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"])
_tc_UnknownRoleName = omniORB.tcInternal.createTypeCode(_d_UnknownRoleName)
omniORB.registerType(UnknownRoleName._NP_RepositoryId, _d_UnknownRoleName, _tc_UnknownRoleName)
_0_CosRelationships.RelationshipFactory = RelationshipFactory
_0_CosRelationships._tc_RelationshipFactory = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipFactory)
omniORB.registerType(RelationshipFactory._NP_RepositoryId, _0_CosRelationships._d_RelationshipFactory, _0_CosRelationships._tc_RelationshipFactory)
# RelationshipFactory operations and attributes
RelationshipFactory._d__get_relationship_type = ((),(omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"],),None)
RelationshipFactory._d__get_degree = ((),(omniORB.tcInternal.tv_ushort,),None)
RelationshipFactory._d__get_named_role_types = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipFactory/NamedRoleTypes:1.0"],),None)
RelationshipFactory._d_create = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"], ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"], ), {_0_CosRelationships.RelationshipFactory.RoleTypeError._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_RoleTypeError, _0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_MaxCardinalityExceeded, _0_CosRelationships.RelationshipFactory.DegreeError._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_DegreeError, _0_CosRelationships.RelationshipFactory.DuplicateRoleName._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_DuplicateRoleName, _0_CosRelationships.RelationshipFactory.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_UnknownRoleName})
# RelationshipFactory object reference
class _objref_RelationshipFactory (CORBA.Object):
_NP_RepositoryId = RelationshipFactory._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_relationship_type(self, *args):
return self._obj.invoke("_get_relationship_type", _0_CosRelationships.RelationshipFactory._d__get_relationship_type, args)
relationship_type = property(_get_relationship_type)
def _get_degree(self, *args):
return self._obj.invoke("_get_degree", _0_CosRelationships.RelationshipFactory._d__get_degree, args)
degree = property(_get_degree)
def _get_named_role_types(self, *args):
return self._obj.invoke("_get_named_role_types", _0_CosRelationships.RelationshipFactory._d__get_named_role_types, args)
named_role_types = property(_get_named_role_types)
def create(self, *args):
return self._obj.invoke("create", _0_CosRelationships.RelationshipFactory._d_create, args)<|fim▁hole|>del RelationshipFactory, _objref_RelationshipFactory
# RelationshipFactory skeleton
__name__ = "CosRelationships__POA"
class RelationshipFactory (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RelationshipFactory._NP_RepositoryId
_omni_op_d = {"_get_relationship_type": _0_CosRelationships.RelationshipFactory._d__get_relationship_type, "_get_degree": _0_CosRelationships.RelationshipFactory._d__get_degree, "_get_named_role_types": _0_CosRelationships.RelationshipFactory._d__get_named_role_types, "create": _0_CosRelationships.RelationshipFactory._d_create}
RelationshipFactory._omni_skeleton = RelationshipFactory
_0_CosRelationships__POA.RelationshipFactory = RelationshipFactory
omniORB.registerSkeleton(RelationshipFactory._NP_RepositoryId, RelationshipFactory)
del RelationshipFactory
__name__ = "CosRelationships"
# interface Relationship
_0_CosRelationships._d_Relationship = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Relationship:1.0", "Relationship")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Relationship:1.0"] = _0_CosRelationships._d_Relationship
_0_CosRelationships.Relationship = omniORB.newEmptyClass()
class Relationship (_0_CosObjectIdentity.IdentifiableObject):
_NP_RepositoryId = _0_CosRelationships._d_Relationship[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception CannotUnlink
_0_CosRelationships.Relationship.CannotUnlink = omniORB.newEmptyClass()
class CannotUnlink (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Relationship/CannotUnlink:1.0"
_NP_ClassName = "CosRelationships.Relationship.CannotUnlink"
def __init__(self, offending_roles):
CORBA.UserException.__init__(self, offending_roles)
self.offending_roles = offending_roles
_d_CannotUnlink = (omniORB.tcInternal.tv_except, CannotUnlink, CannotUnlink._NP_RepositoryId, "CannotUnlink", "offending_roles", omniORB.typeMapping["IDL:omg.org/CosRelationships/Roles:1.0"])
_tc_CannotUnlink = omniORB.tcInternal.createTypeCode(_d_CannotUnlink)
omniORB.registerType(CannotUnlink._NP_RepositoryId, _d_CannotUnlink, _tc_CannotUnlink)
_0_CosRelationships.Relationship = Relationship
_0_CosRelationships._tc_Relationship = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_Relationship)
omniORB.registerType(Relationship._NP_RepositoryId, _0_CosRelationships._d_Relationship, _0_CosRelationships._tc_Relationship)
# Relationship operations and attributes
Relationship._d__get_named_roles = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"],),None)
Relationship._d_destroy = ((), (), {_0_CosRelationships.Relationship.CannotUnlink._NP_RepositoryId: _0_CosRelationships.Relationship._d_CannotUnlink})
# Relationship object reference
class _objref_Relationship (_0_CosObjectIdentity._objref_IdentifiableObject):
_NP_RepositoryId = Relationship._NP_RepositoryId
def __init__(self, obj):
_0_CosObjectIdentity._objref_IdentifiableObject.__init__(self, obj)
def _get_named_roles(self, *args):
return self._obj.invoke("_get_named_roles", _0_CosRelationships.Relationship._d__get_named_roles, args)
named_roles = property(_get_named_roles)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.Relationship._d_destroy, args)
omniORB.registerObjref(Relationship._NP_RepositoryId, _objref_Relationship)
_0_CosRelationships._objref_Relationship = _objref_Relationship
del Relationship, _objref_Relationship
# Relationship skeleton
__name__ = "CosRelationships__POA"
class Relationship (_0_CosObjectIdentity__POA.IdentifiableObject):
_NP_RepositoryId = _0_CosRelationships.Relationship._NP_RepositoryId
_omni_op_d = {"_get_named_roles": _0_CosRelationships.Relationship._d__get_named_roles, "destroy": _0_CosRelationships.Relationship._d_destroy}
_omni_op_d.update(_0_CosObjectIdentity__POA.IdentifiableObject._omni_op_d)
Relationship._omni_skeleton = Relationship
_0_CosRelationships__POA.Relationship = Relationship
omniORB.registerSkeleton(Relationship._NP_RepositoryId, Relationship)
del Relationship
__name__ = "CosRelationships"
# interface Role
_0_CosRelationships._d_Role = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/Role:1.0", "Role")
omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"] = _0_CosRelationships._d_Role
_0_CosRelationships.Role = omniORB.newEmptyClass()
class Role :
_NP_RepositoryId = _0_CosRelationships._d_Role[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception UnknownRoleName
_0_CosRelationships.Role.UnknownRoleName = omniORB.newEmptyClass()
class UnknownRoleName (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/UnknownRoleName:1.0"
_NP_ClassName = "CosRelationships.Role.UnknownRoleName"
def __init__(self):
CORBA.UserException.__init__(self)
_d_UnknownRoleName = (omniORB.tcInternal.tv_except, UnknownRoleName, UnknownRoleName._NP_RepositoryId, "UnknownRoleName")
_tc_UnknownRoleName = omniORB.tcInternal.createTypeCode(_d_UnknownRoleName)
omniORB.registerType(UnknownRoleName._NP_RepositoryId, _d_UnknownRoleName, _tc_UnknownRoleName)
# exception UnknownRelationship
_0_CosRelationships.Role.UnknownRelationship = omniORB.newEmptyClass()
class UnknownRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/UnknownRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.UnknownRelationship"
def __init__(self):
CORBA.UserException.__init__(self)
_d_UnknownRelationship = (omniORB.tcInternal.tv_except, UnknownRelationship, UnknownRelationship._NP_RepositoryId, "UnknownRelationship")
_tc_UnknownRelationship = omniORB.tcInternal.createTypeCode(_d_UnknownRelationship)
omniORB.registerType(UnknownRelationship._NP_RepositoryId, _d_UnknownRelationship, _tc_UnknownRelationship)
# exception RelationshipTypeError
_0_CosRelationships.Role.RelationshipTypeError = omniORB.newEmptyClass()
class RelationshipTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/RelationshipTypeError:1.0"
_NP_ClassName = "CosRelationships.Role.RelationshipTypeError"
def __init__(self):
CORBA.UserException.__init__(self)
_d_RelationshipTypeError = (omniORB.tcInternal.tv_except, RelationshipTypeError, RelationshipTypeError._NP_RepositoryId, "RelationshipTypeError")
_tc_RelationshipTypeError = omniORB.tcInternal.createTypeCode(_d_RelationshipTypeError)
omniORB.registerType(RelationshipTypeError._NP_RepositoryId, _d_RelationshipTypeError, _tc_RelationshipTypeError)
# exception CannotDestroyRelationship
_0_CosRelationships.Role.CannotDestroyRelationship = omniORB.newEmptyClass()
class CannotDestroyRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/CannotDestroyRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.CannotDestroyRelationship"
def __init__(self, offenders):
CORBA.UserException.__init__(self, offenders)
self.offenders = offenders
_d_CannotDestroyRelationship = (omniORB.tcInternal.tv_except, CannotDestroyRelationship, CannotDestroyRelationship._NP_RepositoryId, "CannotDestroyRelationship", "offenders", omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"])
_tc_CannotDestroyRelationship = omniORB.tcInternal.createTypeCode(_d_CannotDestroyRelationship)
omniORB.registerType(CannotDestroyRelationship._NP_RepositoryId, _d_CannotDestroyRelationship, _tc_CannotDestroyRelationship)
# exception ParticipatingInRelationship
_0_CosRelationships.Role.ParticipatingInRelationship = omniORB.newEmptyClass()
class ParticipatingInRelationship (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/Role/ParticipatingInRelationship:1.0"
_NP_ClassName = "CosRelationships.Role.ParticipatingInRelationship"
def __init__(self, the_relationships):
CORBA.UserException.__init__(self, the_relationships)
self.the_relationships = the_relationships
_d_ParticipatingInRelationship = (omniORB.tcInternal.tv_except, ParticipatingInRelationship, ParticipatingInRelationship._NP_RepositoryId, "ParticipatingInRelationship", "the_relationships", omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"])
_tc_ParticipatingInRelationship = omniORB.tcInternal.createTypeCode(_d_ParticipatingInRelationship)
omniORB.registerType(ParticipatingInRelationship._NP_RepositoryId, _d_ParticipatingInRelationship, _tc_ParticipatingInRelationship)
_0_CosRelationships.Role = Role
_0_CosRelationships._tc_Role = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_Role)
omniORB.registerType(Role._NP_RepositoryId, _0_CosRelationships._d_Role, _0_CosRelationships._tc_Role)
# Role operations and attributes
Role._d__get_related_object = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"],),None)
Role._d_get_other_related_object = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"]), (omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"], ), {_0_CosRelationships.Role.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRoleName, _0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
Role._d_get_other_role = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleName:1.0"]), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], ), {_0_CosRelationships.Role.UnknownRoleName._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRoleName, _0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
Role._d_get_relationships = ((omniORB.tcInternal.tv_ulong, ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"]), None)
Role._d_destroy_relationships = ((), (), {_0_CosRelationships.Role.CannotDestroyRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_CannotDestroyRelationship})
Role._d_destroy = ((), (), {_0_CosRelationships.Role.ParticipatingInRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_ParticipatingInRelationship})
Role._d_check_minimum_cardinality = ((), (omniORB.tcInternal.tv_boolean, ), None)
Role._d_link = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], omniORB.typeMapping["IDL:omg.org/CosRelationships/NamedRoles:1.0"]), (), {_0_CosRelationships.RelationshipFactory.MaxCardinalityExceeded._NP_RepositoryId: _0_CosRelationships.RelationshipFactory._d_MaxCardinalityExceeded, _0_CosRelationships.Role.RelationshipTypeError._NP_RepositoryId: _0_CosRelationships.Role._d_RelationshipTypeError})
Role._d_unlink = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"], ), (), {_0_CosRelationships.Role.UnknownRelationship._NP_RepositoryId: _0_CosRelationships.Role._d_UnknownRelationship})
# Role object reference
class _objref_Role (CORBA.Object):
_NP_RepositoryId = Role._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_related_object(self, *args):
return self._obj.invoke("_get_related_object", _0_CosRelationships.Role._d__get_related_object, args)
related_object = property(_get_related_object)
def get_other_related_object(self, *args):
return self._obj.invoke("get_other_related_object", _0_CosRelationships.Role._d_get_other_related_object, args)
def get_other_role(self, *args):
return self._obj.invoke("get_other_role", _0_CosRelationships.Role._d_get_other_role, args)
def get_relationships(self, *args):
return self._obj.invoke("get_relationships", _0_CosRelationships.Role._d_get_relationships, args)
def destroy_relationships(self, *args):
return self._obj.invoke("destroy_relationships", _0_CosRelationships.Role._d_destroy_relationships, args)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.Role._d_destroy, args)
def check_minimum_cardinality(self, *args):
return self._obj.invoke("check_minimum_cardinality", _0_CosRelationships.Role._d_check_minimum_cardinality, args)
def link(self, *args):
return self._obj.invoke("link", _0_CosRelationships.Role._d_link, args)
def unlink(self, *args):
return self._obj.invoke("unlink", _0_CosRelationships.Role._d_unlink, args)
omniORB.registerObjref(Role._NP_RepositoryId, _objref_Role)
_0_CosRelationships._objref_Role = _objref_Role
del Role, _objref_Role
# Role skeleton
__name__ = "CosRelationships__POA"
class Role (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.Role._NP_RepositoryId
_omni_op_d = {"_get_related_object": _0_CosRelationships.Role._d__get_related_object, "get_other_related_object": _0_CosRelationships.Role._d_get_other_related_object, "get_other_role": _0_CosRelationships.Role._d_get_other_role, "get_relationships": _0_CosRelationships.Role._d_get_relationships, "destroy_relationships": _0_CosRelationships.Role._d_destroy_relationships, "destroy": _0_CosRelationships.Role._d_destroy, "check_minimum_cardinality": _0_CosRelationships.Role._d_check_minimum_cardinality, "link": _0_CosRelationships.Role._d_link, "unlink": _0_CosRelationships.Role._d_unlink}
Role._omni_skeleton = Role
_0_CosRelationships__POA.Role = Role
omniORB.registerSkeleton(Role._NP_RepositoryId, Role)
del Role
__name__ = "CosRelationships"
# interface RoleFactory
_0_CosRelationships._d_RoleFactory = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RoleFactory:1.0", "RoleFactory")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory:1.0"] = _0_CosRelationships._d_RoleFactory
_0_CosRelationships.RoleFactory = omniORB.newEmptyClass()
class RoleFactory :
_NP_RepositoryId = _0_CosRelationships._d_RoleFactory[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
# exception NilRelatedObject
_0_CosRelationships.RoleFactory.NilRelatedObject = omniORB.newEmptyClass()
class NilRelatedObject (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/NilRelatedObject:1.0"
_NP_ClassName = "CosRelationships.RoleFactory.NilRelatedObject"
def __init__(self):
CORBA.UserException.__init__(self)
_d_NilRelatedObject = (omniORB.tcInternal.tv_except, NilRelatedObject, NilRelatedObject._NP_RepositoryId, "NilRelatedObject")
_tc_NilRelatedObject = omniORB.tcInternal.createTypeCode(_d_NilRelatedObject)
omniORB.registerType(NilRelatedObject._NP_RepositoryId, _d_NilRelatedObject, _tc_NilRelatedObject)
# exception RelatedObjectTypeError
_0_CosRelationships.RoleFactory.RelatedObjectTypeError = omniORB.newEmptyClass()
class RelatedObjectTypeError (CORBA.UserException):
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/RelatedObjectTypeError:1.0"
_NP_ClassName = "CosRelationships.RoleFactory.RelatedObjectTypeError"
def __init__(self):
CORBA.UserException.__init__(self)
_d_RelatedObjectTypeError = (omniORB.tcInternal.tv_except, RelatedObjectTypeError, RelatedObjectTypeError._NP_RepositoryId, "RelatedObjectTypeError")
_tc_RelatedObjectTypeError = omniORB.tcInternal.createTypeCode(_d_RelatedObjectTypeError)
omniORB.registerType(RelatedObjectTypeError._NP_RepositoryId, _d_RelatedObjectTypeError, _tc_RelatedObjectTypeError)
# typedef ... InterfaceDefs
class InterfaceDefs:
_NP_RepositoryId = "IDL:omg.org/CosRelationships/RoleFactory/InterfaceDefs:1.0"
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_d_InterfaceDefs = (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"], 0)
_ad_InterfaceDefs = (omniORB.tcInternal.tv_alias, InterfaceDefs._NP_RepositoryId, "InterfaceDefs", (omniORB.tcInternal.tv_sequence, omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"], 0))
_tc_InterfaceDefs = omniORB.tcInternal.createTypeCode(_ad_InterfaceDefs)
omniORB.registerType(InterfaceDefs._NP_RepositoryId, _ad_InterfaceDefs, _tc_InterfaceDefs)
_0_CosRelationships.RoleFactory = RoleFactory
_0_CosRelationships._tc_RoleFactory = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RoleFactory)
omniORB.registerType(RoleFactory._NP_RepositoryId, _0_CosRelationships._d_RoleFactory, _0_CosRelationships._tc_RoleFactory)
# RoleFactory operations and attributes
RoleFactory._d__get_role_type = ((),(omniORB.typeMapping["IDL:omg.org/CORBA/InterfaceDef:1.0"],),None)
RoleFactory._d__get_max_cardinality = ((),(omniORB.tcInternal.tv_ulong,),None)
RoleFactory._d__get_min_cardinality = ((),(omniORB.tcInternal.tv_ulong,),None)
RoleFactory._d__get_related_object_types = ((),(omniORB.typeMapping["IDL:omg.org/CosRelationships/RoleFactory/InterfaceDefs:1.0"],),None)
RoleFactory._d_create_role = ((omniORB.typeMapping["IDL:omg.org/CosRelationships/RelatedObject:1.0"], ), (omniORB.typeMapping["IDL:omg.org/CosRelationships/Role:1.0"], ), {_0_CosRelationships.RoleFactory.NilRelatedObject._NP_RepositoryId: _0_CosRelationships.RoleFactory._d_NilRelatedObject, _0_CosRelationships.RoleFactory.RelatedObjectTypeError._NP_RepositoryId: _0_CosRelationships.RoleFactory._d_RelatedObjectTypeError})
# RoleFactory object reference
class _objref_RoleFactory (CORBA.Object):
_NP_RepositoryId = RoleFactory._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def _get_role_type(self, *args):
return self._obj.invoke("_get_role_type", _0_CosRelationships.RoleFactory._d__get_role_type, args)
role_type = property(_get_role_type)
def _get_max_cardinality(self, *args):
return self._obj.invoke("_get_max_cardinality", _0_CosRelationships.RoleFactory._d__get_max_cardinality, args)
max_cardinality = property(_get_max_cardinality)
def _get_min_cardinality(self, *args):
return self._obj.invoke("_get_min_cardinality", _0_CosRelationships.RoleFactory._d__get_min_cardinality, args)
min_cardinality = property(_get_min_cardinality)
def _get_related_object_types(self, *args):
return self._obj.invoke("_get_related_object_types", _0_CosRelationships.RoleFactory._d__get_related_object_types, args)
related_object_types = property(_get_related_object_types)
def create_role(self, *args):
return self._obj.invoke("create_role", _0_CosRelationships.RoleFactory._d_create_role, args)
omniORB.registerObjref(RoleFactory._NP_RepositoryId, _objref_RoleFactory)
_0_CosRelationships._objref_RoleFactory = _objref_RoleFactory
del RoleFactory, _objref_RoleFactory
# RoleFactory skeleton
__name__ = "CosRelationships__POA"
class RoleFactory (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RoleFactory._NP_RepositoryId
_omni_op_d = {"_get_role_type": _0_CosRelationships.RoleFactory._d__get_role_type, "_get_max_cardinality": _0_CosRelationships.RoleFactory._d__get_max_cardinality, "_get_min_cardinality": _0_CosRelationships.RoleFactory._d__get_min_cardinality, "_get_related_object_types": _0_CosRelationships.RoleFactory._d__get_related_object_types, "create_role": _0_CosRelationships.RoleFactory._d_create_role}
RoleFactory._omni_skeleton = RoleFactory
_0_CosRelationships__POA.RoleFactory = RoleFactory
omniORB.registerSkeleton(RoleFactory._NP_RepositoryId, RoleFactory)
del RoleFactory
__name__ = "CosRelationships"
# interface RelationshipIterator
_0_CosRelationships._d_RelationshipIterator = (omniORB.tcInternal.tv_objref, "IDL:omg.org/CosRelationships/RelationshipIterator:1.0", "RelationshipIterator")
omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipIterator:1.0"] = _0_CosRelationships._d_RelationshipIterator
_0_CosRelationships.RelationshipIterator = omniORB.newEmptyClass()
class RelationshipIterator :
_NP_RepositoryId = _0_CosRelationships._d_RelationshipIterator[1]
def __init__(self, *args, **kw):
raise RuntimeError("Cannot construct objects of this type.")
_nil = CORBA.Object._nil
_0_CosRelationships.RelationshipIterator = RelationshipIterator
_0_CosRelationships._tc_RelationshipIterator = omniORB.tcInternal.createTypeCode(_0_CosRelationships._d_RelationshipIterator)
omniORB.registerType(RelationshipIterator._NP_RepositoryId, _0_CosRelationships._d_RelationshipIterator, _0_CosRelationships._tc_RelationshipIterator)
# RelationshipIterator operations and attributes
RelationshipIterator._d_next_one = ((), (omniORB.tcInternal.tv_boolean, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandle:1.0"]), None)
RelationshipIterator._d_next_n = ((omniORB.tcInternal.tv_ulong, ), (omniORB.tcInternal.tv_boolean, omniORB.typeMapping["IDL:omg.org/CosRelationships/RelationshipHandles:1.0"]), None)
RelationshipIterator._d_destroy = ((), (), None)
# RelationshipIterator object reference
class _objref_RelationshipIterator (CORBA.Object):
_NP_RepositoryId = RelationshipIterator._NP_RepositoryId
def __init__(self, obj):
CORBA.Object.__init__(self, obj)
def next_one(self, *args):
return self._obj.invoke("next_one", _0_CosRelationships.RelationshipIterator._d_next_one, args)
def next_n(self, *args):
return self._obj.invoke("next_n", _0_CosRelationships.RelationshipIterator._d_next_n, args)
def destroy(self, *args):
return self._obj.invoke("destroy", _0_CosRelationships.RelationshipIterator._d_destroy, args)
omniORB.registerObjref(RelationshipIterator._NP_RepositoryId, _objref_RelationshipIterator)
_0_CosRelationships._objref_RelationshipIterator = _objref_RelationshipIterator
del RelationshipIterator, _objref_RelationshipIterator
# RelationshipIterator skeleton
__name__ = "CosRelationships__POA"
class RelationshipIterator (PortableServer.Servant):
_NP_RepositoryId = _0_CosRelationships.RelationshipIterator._NP_RepositoryId
_omni_op_d = {"next_one": _0_CosRelationships.RelationshipIterator._d_next_one, "next_n": _0_CosRelationships.RelationshipIterator._d_next_n, "destroy": _0_CosRelationships.RelationshipIterator._d_destroy}
RelationshipIterator._omni_skeleton = RelationshipIterator
_0_CosRelationships__POA.RelationshipIterator = RelationshipIterator
omniORB.registerSkeleton(RelationshipIterator._NP_RepositoryId, RelationshipIterator)
del RelationshipIterator
__name__ = "CosRelationships"
#
# End of module "CosRelationships"
#
__name__ = "CosRelationships_idl"
_exported_modules = ( "CosRelationships", )
# The end.<|fim▁end|>
|
omniORB.registerObjref(RelationshipFactory._NP_RepositoryId, _objref_RelationshipFactory)
_0_CosRelationships._objref_RelationshipFactory = _objref_RelationshipFactory
|
<|file_name|>acquire.py<|end_file_name|><|fim▁begin|>"""
Basic framework for acquiring a roach measurement that includes both sweep(s) and stream(s).
Acquire
-Initialize equipment.
-Initialize roach: preload frequencies, if necessary.
-Create state dictionary containing state from all equipment, including temperatures, if possible.
-Run a coarse sweep, if necessary: create a SweepArray and extract resonance frequencies.
-Run fine sweeps to map out resonance frequencies carefully.
If desired, we can combine the data from coarse and fine sweeps into a single SweepArray.
All streams in these sweeps are created with the same roach state, which should not change during the sweeps.
The sweep(s) are created with the experiment state, which should also not change.
Acquire streams:
-Initialize equipment for stream(s).
-Initialize roach for stream(s).
-Create experiment state dictionary.
-Acquire a StreamArray.
-Repeat the stream acquisition as needed
-Instantiate the final measurement with all data, and save it to disk.
-Clean up equipment.
If instead we want to save data as it is collected, we can do that by writing a blank final measurement to disk, then
writing the sub-measurements as they are acquired.
"""
from __future__ import division
import os
import sys
import time
import inspect
import subprocess
import logging
import numpy as np
from kid_readout import settings
from kid_readout.utils import log
from kid_readout.measurement import core, basic
from kid_readout.measurement.io import nc, npy
logger = logging.getLogger(__name__)
# Frequency sweep
def load_baseband_sweep_tones(ri, tone_banks, num_tone_samples):
return ri.set_tone_freqs(freqs=np.vstack(tone_banks), nsamp=num_tone_samples)
def load_heterodyne_sweep_tones(ri, tone_banks, num_tone_samples):
return ri.set_tone_freqs(freqs=np.vstack(tone_banks), nsamp=num_tone_samples)
def run_sweep(ri, tone_banks, num_tone_samples, length_seconds=0, state=None, description='', verbose=False,
wait_for_sync=0.1, **kwargs):
"""
Return a SweepArray acquired using the given tone banks.
Parameters
----------
ri : RoachInterface
An instance of a subclass.
tone_banks : iterable of ndarray (float)
An iterable of arrays (or a 2-D array) of frequencies to use for the sweep.
num_tone_samples : int
The number of samples in the playback buffer; must be a power of two.
length_seconds : float
The duration of each data stream; the default of 0 means the minimum unit of data that can be read out in the
current configuration.
state : dict
The non-roach state to pass to the SweepArray.
description : str
A human-readable description of the measurement.
verbose : bool
If true, print progress messages.
wait_for_sync : float
Sleep for this time in seconds to let the ROACH sync finish.
kwargs
Keyword arguments passed to ri.get_measurement().
Returns
-------
SweepArray
"""
stream_arrays = core.MeasurementList()
if verbose:
print("Measuring bank")
for n, tone_bank in enumerate(tone_banks):
if verbose:
print n,
sys.stdout.flush()
ri.set_tone_freqs(tone_bank, nsamp=num_tone_samples)
ri.select_fft_bins(np.arange(tone_bank.size))
# we wait a bit here to let the roach2 sync catch up. figuring this out still.
time.sleep(wait_for_sync)
stream_arrays.append(ri.get_measurement(num_seconds=length_seconds, **kwargs))
return basic.SweepArray(stream_arrays, state=state, description=description)
def run_loaded_sweep(ri, length_seconds=0, state=None, description='', tone_bank_indices=None, bin_indices=None,
verbose=False, **kwargs):
"""
Return a SweepArray acquired using previously-loaded tones.
Parameters
----------
ri : RoachInterface
An instance of a subclass.
length_seconds : float
The duration of each data stream; the default of 0 means the minimum unit of data that can be read out in the
current configuration.
state : dict
The non-roach state to pass to the SweepArray.
description : str
A human-readable description of the measurement.
tone_bank_indices : numpy.ndarray[int]
The indices of the tone banks to use in the sweep; the default is to use all existing.
bin_indices : numpy.ndarray[int]
The indices of the filterbank bins to read out; the default is to read out all bins.
verbose : bool
If true, print progress messages.
kwargs
Keyword arguments passed to ri.get_measurement().
Returns
-------
SweepArray
"""
if tone_bank_indices is None:
tone_bank_indices = np.arange(ri.tone_bins.shape[0])
if bin_indices is None:
bin_indices = np.arange(ri.tone_bins.shape[1])
stream_arrays = core.MeasurementList()
if verbose:
print "Measuring bank:",
for tone_bank_index in tone_bank_indices:
if verbose:
print tone_bank_index,
sys.stdout.flush()
ri.select_bank(tone_bank_index)
ri.select_fft_bins(bin_indices)
stream_arrays.append(ri.get_measurement(num_seconds=length_seconds, **kwargs))
return basic.SweepArray(stream_arrays, state=state, description=description)
<|fim▁hole|>def run_multipart_sweep(ri, length_seconds=0, state=None, description='', num_tones_read_at_once=32, verbose=False,
**kwargs):
num_tones = ri.tone_bins.shape[1]
num_steps = num_tones // num_tones_read_at_once
if num_steps == 0:
num_steps = 1
indices_to_read = range(num_tones)
parts = []
for step in range(num_steps):
if verbose:
print("running sweep step {} of {}.".format(step,num_steps))
parts.append(run_loaded_sweep(ri, length_seconds=length_seconds, state=state, description=description,
bin_indices=indices_to_read[step::num_steps], **kwargs))
stream_arrays = core.MeasurementList()
for part in parts:
stream_arrays.extend(list(part.stream_arrays))
return basic.SweepArray(stream_arrays, state=state, description=description)
# Metadata
def script_code():
"""
Return the source code of a module running as '__main__'. Acquisition scripts can use this to save their code.
If attempting to load the source code raises an exception, return a string representation of the exception.
Returns
-------
str
The code, with lines separated by newline characters.
"""
try:
return inspect.getsource(sys.modules['__main__'])
except Exception as e:
return str(e)
def git_log():
import kid_readout
kid_readout_directory = os.path.dirname(os.path.abspath(kid_readout.__file__))
try:
return subprocess.check_output(("cd {}; git log -1".format(kid_readout_directory)), shell=True)
except Exception as e:
return str(e)
def git_status():
import kid_readout
kid_readout_directory = os.path.dirname(os.path.abspath(kid_readout.__file__))
try:
return subprocess.check_output(("cd {}; git status --porcelain".format(kid_readout_directory)), shell=True)
except Exception as e:
return str(e)
def all_metadata():
meta = {'script_code': script_code(),
'git_log': git_log(),
'git_status': git_status(),
'cryostat': settings.CRYOSTAT,
'cooldown': settings.COOLDOWN}
return meta
# IO object creation
def new_nc_file(suffix='', directory=settings.BASE_DATA_DIR, metadata=None):
if suffix and not suffix.startswith('_'):
suffix = '_' + suffix
if metadata is None:
metadata = all_metadata()
root_path = os.path.join(directory, time.strftime('%Y-%m-%d_%H%M%S') + suffix + nc.NCFile.EXTENSION)
logger.debug("Creating new NCFile with path %s" % root_path)
return nc.NCFile(root_path, metadata=metadata)
def new_npy_directory(suffix='', directory=settings.BASE_DATA_DIR, metadata=None):
if suffix and not suffix.startswith('_'):
suffix = '_' + suffix
if metadata is None:
metadata = all_metadata()
root_path = os.path.join(directory, time.strftime('%Y-%m-%d_%H%M%S') + suffix + npy.NumpyDirectory.EXTENSION)
logger.debug("Creating new NumpyDirectory with path %s" % root_path)
return npy.NumpyDirectory(root_path, metadata=metadata)
# Interactive checks to be used at the beginning of scripts
def show_settings():
print("cryostat: {}".format(settings.CRYOSTAT))
for k, v in settings.COOLDOWN.items():
print("{}: {}".format(k, v))
raw_input("Press enter to continue or ctrl-C to quit.")
def show_git_status():
print("git status:")
print(git_status())
raw_input("Press enter to continue or ctrl-C to quit.")
# Logging
def get_script_logger(name, level=logging.INFO):
script_logger = logging.getLogger('kid_readout')
script_logger.setLevel(logging.DEBUG)
if log.default_handler not in script_logger.handlers:
stream_handler = log.default_handler
stream_handler.setLevel(level)
script_logger.addHandler(stream_handler)
script_logger.addHandler(log.file_handler(name))
return script_logger<|fim▁end|>
| |
<|file_name|>shl.rs<|end_file_name|><|fim▁begin|>#![feature(core, core_simd)]
extern crate core;
#[cfg(test)]
mod tests {
use core::simd::u64x2;
// #[simd]
// #[derive(Copy, Clone, Debug)]
// #[repr(C)]
// pub struct u64x2(pub u64, pub u64);
#[test]
fn shl_test1() {
let x: u64x2 = u64x2(
0, 1
);
let y: u64x2 = u64x2(
2, 2
);
let z: u64x2 = x << y;
let result: String = format!("{:?}", z);
assert_eq!(result, "u64x2(0, 4)".to_string());<|fim▁hole|> }
}<|fim▁end|>
| |
<|file_name|>matchup.py<|end_file_name|><|fim▁begin|>import os
import sys
import pandas as pd
import numpy as np
from numpy.random import poisson, uniform
from numpy import mean
import time
import math
po = True
teamsheetpath = sys.path[0] + '/teamcsvs/'
compstat = {'TDF': 'TDA', 'TDA': 'TDF', #Dictionary to use to compare team stats with opponent stats
'FGF': 'FGA', 'FGA': 'FGF',
'SFF': 'SFA', 'SFA': 'SFF',
'PAT1%F': 'PAT1%A', 'PAT1%A': 'PAT1%F',
'PAT2%F': 'PAT2%A', 'PAT2%A': 'PAT2%F'}
def get_opponent_stats(opponent): #Gets summaries of statistics for opponent each week
opponent_stats = {}
global teamsheetpath
opp_stats = pd.DataFrame.from_csv(teamsheetpath + opponent + '.csv')
for stat in opp_stats.columns:
if stat in ['TDF', 'FGF', 'SFF', 'TDA', 'FGA', 'SFA']:
opponent_stats.update({stat: opp_stats[stat].mean()})
try:
opponent_stats.update({'PAT1%F': float(opp_stats['PAT1FS'].sum()) / opp_stats['PAT1FA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT1%F': .99})
try:
opponent_stats.update({'PAT2%F': float(opp_stats['PAT2FS'].sum()) / opp_stats['PAT2FA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT2%F': .5})
try:
opponent_stats.update({'PAT1%A': float(opp_stats['PAT1AS'].sum()) / opp_stats['PAT1AA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT1%A': .99})
try:
opponent_stats.update({'PAT2%A': float(opp_stats['PAT2AS'].sum()) / opp_stats['PAT2AA'].sum()})
except ZeroDivisionError:
opponent_stats.update({'PAT2%A': .5})
return opponent_stats
def get_residual_performance(team): #Get how each team has done compared to the average performance of their opponents
global teamsheetpath
score_df = pd.DataFrame.from_csv(teamsheetpath + team + '.csv')
residual_stats = {}
score_df['PAT1%F'] = np.nan
score_df['PAT2%F'] = np.nan
score_df['PAT1%A'] = np.nan
score_df['PAT2%A'] = np.nan
for week in score_df.index:
try:
score_df['PAT1%F'][week] = float(score_df['PAT1FS'][week]) / score_df['PAT1FA'][week]
except ZeroDivisionError:
score_df['PAT1%F'][week] = 0.99
#print ('For: ' + str(score_df['PAT1%F'][week]))
try:
score_df['PAT2%F'][week] = float(score_df['PAT2FS'][week]) / score_df['PAT2FA'][week]
except ZeroDivisionError:
score_df['PAT2%F'][week] = 0.5
try:
score_df['PAT1%A'][week] = float(score_df['PAT1AS'][week]) / score_df['PAT1AA'][week]
except ZeroDivisionError:
score_df['PAT1%A'][week] = 0.99
#print ('Against: ' + str(score_df['PAT1%F'][week]))
try:
score_df['PAT2%A'][week] = float(score_df['PAT2AS'][week]) / score_df['PAT2AA'][week]
except ZeroDivisionError:
score_df['PAT2%A'][week] = 0.5
opponent_stats = get_opponent_stats(score_df['OPP'][week])
for stat in opponent_stats:
if week == 1:
score_df['OPP_' + stat] = np.nan
score_df['OPP_' + stat][week] = opponent_stats[stat]
for stat in opponent_stats:
score_df['R_' + stat] = score_df[stat] - score_df['OPP_' + compstat[stat]]
if stat in ['TDF', 'FGF', 'SFF', 'TDA', 'FGA', 'SFA']:
residual_stats.update({stat: score_df['R_' + stat].mean()})
elif stat == 'PAT1%F':
residual_stats.update({stat: (score_df['R_PAT1%F'].multiply(score_df['PAT1FA'])).sum() / score_df['PAT1FA'].sum()})
elif stat == 'PAT2%F':
residual_stats.update({stat: (score_df['R_PAT2%F'].multiply(score_df['PAT2FA'])).sum() / score_df['PAT2FA'].sum()})
elif stat == 'PAT1%A':
residual_stats.update({stat: (score_df['R_PAT1%A'].multiply(score_df['PAT1AA'])).sum() / score_df['PAT1AA'].sum()})
elif stat == 'PAT2%A':
residual_stats.update({stat: (score_df['R_PAT2%A'].multiply(score_df['PAT2AA'])).sum() / score_df['PAT2AA'].sum()})
try:
residual_stats.update({'GOFOR2': float(score_df['PAT2FA'].sum()) / score_df['TDF'].sum()})
except ZeroDivisionError:
residual_stats.update({'GOFOR2': .1})
#print team
#print residual_stats
return residual_stats
def get_score(expected_scores): #Get the score for a team based on expected scores
score = 0
if expected_scores['TD'] > 0:
tds = poisson(expected_scores['TD'])
else:
tds = poisson(0.01)
score = score + 6 * tds
if expected_scores['FG'] > 0:
fgs = poisson(expected_scores['FG'])
else:
fgs = poisson(0.01)
score = score + 3 * fgs
if expected_scores['S'] > 0:
sfs = poisson(expected_scores['S'])
else:
sfs = poisson(0.01)
score = score + 2 * sfs
for td in range(tds):
go_for_2_determinant = uniform(0, 1)
if go_for_2_determinant <= expected_scores['GOFOR2']: #Going for 2
successful_pat_determinant = uniform(0, 1)
if successful_pat_determinant <= expected_scores['PAT2PROB']:
score = score + 2
else:
continue
else: #Going for 1
#print(expected_scores['PAT1PROB'])
successful_pat_determinant = uniform(0, 1)
if successful_pat_determinant <= expected_scores['PAT1PROB']:
score = score + 1
else:
continue
return score
def game(team_1, team_2,
expected_scores_1, expected_scores_2,
playoff): #Get two scores and determine a winner
score_1 = get_score(expected_scores_1)
score_2 = get_score(expected_scores_2)
if score_1 > score_2:
win_1 = 1
win_2 = 0
draw_1 = 0
draw_2 = 0
elif score_2 > score_1:
win_1 = 0
win_2 = 1
draw_1 = 0
draw_2 = 0
else:
if playoff:
win_1 = 0.5
win_2 = 0.5
draw_1 = 0
draw_2 = 0
else:
win_1 = 0
win_2 = 0
draw_1 = 1
draw_2 = 1
summary = {team_1: [win_1, draw_1, score_1]}
summary.update({team_2: [win_2, draw_2, score_2]})
return summary
def get_expected_scores(team_1_stats, team_2_stats, team_1_df, team_2_df): #Get the expected scores for a matchup based on the previous teams' performances
expected_scores = {}
for stat in team_1_stats:
expected_scores.update({'TD': mean([team_1_stats['TDF'] + team_2_df['TDA'].mean(),
team_2_stats['TDA'] + team_1_df['TDF'].mean()])})
expected_scores.update({'FG': mean([team_1_stats['FGF'] + team_2_df['FGA'].mean(),
team_2_stats['FGA'] + team_1_df['FGF'].mean()])})
expected_scores.update({'S': mean([team_1_stats['SFF'] + team_2_df['SFA'].mean(),
team_2_stats['SFA'] + team_1_df['SFF'].mean()])})
#print mean([team_1_stats['PAT1%F'] + team_2_df['PAT1AS'].astype('float').sum() / team_2_df['PAT1AA'].sum(),
# team_2_stats['PAT1%A'] + team_1_df['PAT1FS'].astype('float').sum() / team_1_df['PAT1FA'].sum()])
expected_scores.update({'GOFOR2': team_1_stats['GOFOR2']})
pat1prob = mean([team_1_stats['PAT1%F'] + team_2_df['PAT1AS'].astype('float').sum() / team_2_df['PAT1AA'].sum(),
team_2_stats['PAT1%A'] + team_1_df['PAT1FS'].astype('float').sum() / team_1_df['PAT1FA'].sum()])
if not math.isnan(pat1prob):
expected_scores.update({'PAT1PROB': pat1prob})<|fim▁hole|> pat2prob = mean([team_1_stats['PAT2%F'] + team_2_df['PAT2AS'].astype('float').sum() / team_2_df['PAT2AA'].sum(),
team_2_stats['PAT2%A'] + team_1_df['PAT2FS'].astype('float').sum() / team_1_df['PAT2FA'].sum()])
if not math.isnan(pat2prob):
expected_scores.update({'PAT2PROB': pat2prob})
else:
expected_scores.update({'PAT2PROB': 0.5})
#print(expected_scores)
return expected_scores
def matchup(team_1, team_2):
ts = time.time()
team_1_season = pd.DataFrame.from_csv(teamsheetpath + team_1 + '.csv')
team_2_season = pd.DataFrame.from_csv(teamsheetpath + team_2 + '.csv')
stats_1 = get_residual_performance(team_1)
stats_2 = get_residual_performance(team_2)
expected_scores_1 = get_expected_scores(stats_1, stats_2, team_1_season, team_2_season)
expected_scores_2 = get_expected_scores(stats_2, stats_1, team_2_season, team_1_season)
team_1_wins = 0
team_2_wins = 0
team_1_draws = 0
team_2_draws = 0
team_1_scores = []
team_2_scores = []
i = 0
error = 1
while error > 0.000001 or i < 5000000: #Run until convergence after 5 million iterations
summary = game(team_1, team_2,
expected_scores_1, expected_scores_2,
po)
team_1_prev_wins = team_1_wins
team_1_wins += summary[team_1][0]
team_2_wins += summary[team_2][0]
team_1_draws += summary[team_1][1]
team_2_draws += summary[team_2][1]
team_1_scores.append(summary[team_1][2])
team_2_scores.append(summary[team_2][2])
team_1_prob = float(team_1_wins) / len(team_1_scores)
team_2_prob = float(team_2_wins) / len(team_2_scores)
if i > 0:
team_1_prev_prob = float(team_1_prev_wins) / i
error = team_1_prob - team_1_prev_prob
i = i + 1
if i == 5000000:
print('Probability converged within 5 million iterations')
else:
print('Probability converged after ' + str(i) + ' iterations')
games = pd.DataFrame.from_items([(team_1, team_1_scores), (team_2, team_2_scores)])
summaries = games.describe(percentiles = [0.025, 0.1, 0.25, 0.5, 0.75, 0.9, 0.975])
output = {'ProbWin': {team_1: team_1_prob, team_2: team_2_prob}, 'Scores': summaries}
print(team_1 + '/' + team_2 + ' score distributions computed in ' + str(round(time.time() - ts, 1)) + ' seconds')
return output<|fim▁end|>
|
else:
expected_scores.update({'PAT1PROB': 0.99})
#print(expected_scores['PAT1PROB'])
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var express = require('express');
var request = require('request');
var rp = require('rp');
var config = require('../../config')
var router = express.Router();
var twilio = require('twilio');
var mysql = require('mysql');
var connection = mysql.createConnection(config.mysqlConfig);
/* GET home page. */
router.get('/', function(req, res, next) {
res.render('index', { title: '네이버 채용 알리미' });
});
router.post('/enlist', function(req, res, next) {
if(req.body['g-recaptcha-response'] === undefined || req.body['g-recaptcha-response'] === '' || req.body['g-recaptcha-response'] === null) {
return res.json({"response" : "Please complete recaptcha."});
}
var regex = /^\d{3}-\d{4}-\d{4}$/;
if(!req.body.phonenumber.match(regex)){
return res.json({"response" : "Please input a correct phone number. (000-0000-0000)"});
}
request.post({url:"https://www.google.com/recaptcha/api/siteverify", form:{"secret" : config.captchasecret, "response" : req.body['g-recaptcha-response']}}, function(error, response, body){
body = JSON.parse(body);
// Success will be true or false depending upon captcha validation.
if(body.success !== undefined && !body.success) {
return res.json({"response" : "Recaptcha validation failed, please try again."})
}
//everything OK, now we add the phone number to the DB.
connection.query('INSERT INTO `NotifyList`(phonenumber) VALUES("'+req.body.phonenumber+'");', function(error, cursor){
if(error==null){
var twclient = new twilio(config.twaccountSid, config.twaccountToken);
twclient.messages.create({
body: "Welcome to Naver job opening notification service!"+" / 구독취소:gyuhyeonlee.com",
to: '+82'+req.body.phonenumber,
from: '+12568184331'
})
.then((message) => console.log(message.sid));
return res.json({"response" : "Success! Please wait for confirmation SMS."});
}
else{
return res.json({"response" : "We're sorry, but either our DB is not working, or you're already subscribed!"});
}
}); //end of insert connection.query
}); //end of request.post (sorry for callback hell!)<|fim▁hole|>
router.post('/unsubscribe', function(req, res, next) {
if(req.body['g-recaptcha-response'] === undefined || req.body['g-recaptcha-response'] === '' || req.body['g-recaptcha-response'] === null) {
return res.json({"response" : "Please complete recaptcha."});
}
var regex = /^\d{3}-\d{4}-\d{4}$/;
if(!req.body.phonenumber.match(regex)){
return res.json({"response" : "Please input a correct phone number. (000-0000-0000)"});
}
request.post({url:"https://www.google.com/recaptcha/api/siteverify", form:{"secret" : config.captchasecret, "response" : req.body['g-recaptcha-response']}}, function(error, response, body){
body = JSON.parse(body);
// Success will be true or false depending upon captcha validation.
if(body.success !== undefined && !body.success) {
return res.json({"response" : "Recaptcha validation failed, please try again."})
}
//everything OK, now we add the phone number to the DB.
connection.query('DELETE FROM `NaverJobs`.`NotifyList` WHERE `phonenumber`="'+req.body.phonenumber+'";', function(error, cursor){
if(error==null){
if(cursor.affectedRows>0){
return res.json({"response" : "Success! Your number has been deleted."});
}
else{
return res.json({"response" : "Your number is not in the database!"});
}
}
else{
return res.json({"response" : "We're sorry, our DB seems to be down right now..."});
}
}); //end of insert connection.query
}); //end of request.post (sorry for callback hell!)
});
// line webhook for receiving sub&unsub events.
router.post('/lineevents', function(req, res, next) {
let insertvalues = [];
let removevalues = [];
if(req.body.events!==null && req.body.events!==undefined){
for (let i = 0; i < req.body.events.length; ++i) {
if (req.body.events[i].type == 'follow') {
insertvalues.push(req.body.events[i].source.userId);
}
else if(req.body.events[i].type == 'unfollow') {
removevalues.push(req.body.events[i].source.userId);
}
}
if (insertvalues.length > 0) {
// don't really care about data consistency. All we need make sure is that removing takes priority over adding.
connection.query('INSERT INTO `NaverJobs`.`LineFriends`(id) VALUES (?);', insertvalues, function(error, cursor){
if(error == null){
let options = {
method: "POST",
uri: "https://api.line.me/v2/bot/message/multicast",
headers: {
'Content-Type':'application/json',
'Authorization':'Bearer {'+config.linetoken+'}'
},
body: {
to: insertvalues,
messages: [{"type":"text", "text": "구독 신청 감사합니다! 변경사항이 있을 경우 바로 알려드릴게요 :)"}]
},
json: true // this encodes our body as json when SENDING POST request.
// in GET requests, this means it will encode RESPONSE in json when we RECEIVE IT.
// pretty confusing...
};
rp(options).catch((e) => console.log(e)); // one way request, don't really need .then() promises. Send greetings to new users.
}
else{
console.log("DB error : "+error);
}
});
}
if (removevalues.length > 0) {
connection.query('DELETE FROM `NaverJobs`.`LineFriends` WHERE `id`=?;', removevalues, function(error){
if(error != null){
console.log("DB error : "+error);
}
});
}
}
res.set('Content-Type', 'text/plain');
res.send("Thanks LINE!");
});
module.exports = router;<|fim▁end|>
|
}) //end of router post handling
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>$(window).scroll(function (e) {
var $w = $(window).width();
var $h = $(window).height();
if ($w > 980 && $h > 400) {
if ($(this).scrollTop() >= 268) {
$("#index_nav.index_nav, #compatible").addClass("index_nav_fixed");
} else {
$("#index_nav.index_nav, #compatible").removeClass("index_nav_fixed");
}
$("#slides").css("height", 300 - ($(this).scrollTop()) + "px");<|fim▁hole|><|fim▁end|>
|
$("#slides img").css("opacity", (((250 - ($(this).scrollTop())) * 100) / 250) / 100);
$(".si-ctitle-c").css("opacity", (((250 - ($(this).scrollTop())) * 100) / 250) / 100);
}
});
|
<|file_name|>deploy_snapshots.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.base.action import action
from cloudferrylib.os.actions import snap_transfer
from cloudferrylib.os.actions import task_transfer
from cloudferrylib.utils.drivers import ssh_ceph_to_ceph
from cloudferrylib.utils import rbd_util
from cloudferrylib.utils import utils as utl
import copy
OLD_ID = 'old_id'
class DeployVolSnapshots(action.Action):
def run(self, storage_info=None, identity_info=None, **kwargs):
storage_info = copy.deepcopy(storage_info)
deploy_info = copy.deepcopy(storage_info)
deploy_info.update(identity_info)
storage_info.update(identity_info)
volume_resource = self.cloud.resources[utl.STORAGE_RESOURCE]
for vol_id, vol in deploy_info[utl.VOLUMES_TYPE].iteritems():
if vol['snapshots']:
vol_info = vol[utl.VOLUME_BODY]
snapshots_list = \
[snap_info for snap_info in vol['snapshots'].values()]
snapshots_list.sort(key=lambda x: x['created_at'])
for snap in snapshots_list:
if snapshots_list.index(snap) == 0:
act_snap_transfer = \
snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
1)
else:
snap_num = snapshots_list.index(snap)
snap['prev_snapname'] = \
snapshots_list[snap_num - 1]['name']
act_snap_transfer = \
snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
2)
act_snap_transfer.run(volume=vol_info, snapshot_info=snap)
volume_resource.create_snapshot(
volume_id=vol_id,
display_name=snap['display_name'],
display_description=snap['display_description'])
act_snap_transfer = snap_transfer.SnapTransfer(
self.init,
ssh_ceph_to_ceph.SSHCephToCeph,
3)
act_snap_transfer.run(volume=vol_info,
snapshot_info=snapshots_list[-1])
for snap in snapshots_list:
if volume_resource.config.storage.host:
act_delete_redundant_snap = \
rbd_util.RbdUtil(cloud=self.cloud,
config_migrate=self.cfg.migrate,
host=vol_info[utl.HOST_DST])
act_delete_redundant_snap.snap_rm(
vol_info[utl.PATH_DST],
snap['name'])
else:
act_delete_redundant_snap = \
rbd_util.RbdUtil(cloud=self.cloud,
config_migrate=self.cfg.migrate)
act_delete_redundant_snap.snap_rm(
vol_info[utl.PATH_DST],
snap['name'], vol_info[utl.HOST_DST])
else:
one_volume_info = {
'one_volume_info': {
utl.VOLUMES_TYPE: {
vol_id: vol<|fim▁hole|> }
act_transport_vol_data = \
task_transfer.TaskTransfer(self.init,
'SSHCephToCeph',
input_info='one_volume_info')
act_transport_vol_data.run(**one_volume_info)
return {}<|fim▁end|>
|
}
}
|
<|file_name|>stateless_random_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for stateless random ops."""
import functools
from absl.testing import parameterized
import numpy as np
from tensorflow.python.compat import compat
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_stateless_random_ops_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import stateless_random_ops as stateless
from tensorflow.python.platform import test
# Note that in theory each test will reset the eager context and may choose to
# hide some devices, so we shouldn't cache this transient info. Tests in this
# file don't make those config changes, so caching is fine. It provides a good
# speed-up.
_cached_device = None
def get_device():
global _cached_device
if _cached_device is not None:
return _cached_device
# Precedence from high to low
for device_type in ('XLA_GPU', 'GPU', 'XLA_CPU', 'CPU'):
devices = config.list_logical_devices(device_type)
if devices:
_cached_device = devices[0]
return _cached_device
raise ValueError('Cannot find any suitable device. Available devices: %s' %
config.list_logical_devices())
BEFORE_EXPIRE = (2020, 10, 24)
AFTER_EXPIRE = (2020, 10, 26)
def invert_philox(key, value):
"""Invert the Philox bijection."""
key = np.array(key, dtype=np.uint32)
value = np.array(value, dtype=np.uint32)
step = np.array([0x9E3779B9, 0xBB67AE85], dtype=np.uint32)
for n in range(10)[::-1]:
key0, key1 = key + n * step
v0 = value[3] * 0x991a7cdb & 0xffffffff
v2 = value[1] * 0x6d7cae67 & 0xffffffff
hi0 = v0 * 0xD2511F53 >> 32
hi1 = v2 * 0xCD9E8D57 >> 32
v1 = hi1 ^ value[0] ^ key0
v3 = hi0 ^ value[2] ^ key1
value = v0, v1, v2, v3
return np.array(value)
SEEDS = ((7, 17), (11, 5), (2, 3))
SEED_TYPES = [dtypes.int32, dtypes.int64]
def float_cases(shape_dtypes=(None,)):
cases = (
# Uniform distribution, with and without range
('uniform', stateless.stateless_random_uniform, random_ops.random_uniform,
{}),
('uniform2', stateless.stateless_random_uniform,
random_ops.random_uniform, dict(minval=2.2, maxval=7.1)),
# Normal distribution, with and without mean+stddev
('normal', stateless.stateless_random_normal, random_ops.random_normal,
{}),
('normal2', stateless.stateless_random_normal, random_ops.random_normal,
dict(mean=2, stddev=3)),
# Truncated normal distribution, with and without mean+stddev
('trnorm', stateless.stateless_truncated_normal,
random_ops.truncated_normal, {}),
('trnorm2', stateless.stateless_truncated_normal,
random_ops.truncated_normal, dict(mean=3, stddev=4)),
)
# Explicitly passing in params because capturing cell variable from loop is
# problematic in Python
def wrap(op, dtype, shape, shape_dtype, seed, **kwargs):
device_type = get_device().device_type
# Some dtypes are not supported on some devices
if (dtype == dtypes.float16 and device_type in ('XLA_GPU', 'XLA_CPU') or
dtype == dtypes.bfloat16 and device_type == 'GPU'):
dtype = dtypes.float32
shape_ = (constant_op.constant(shape, dtype=shape_dtype)
if shape_dtype is not None else shape)
return op(seed=seed, shape=shape_, dtype=dtype, **kwargs)
def _name(a):
if hasattr(a, 'name'):
return a.name
else:
return a
for dtype in dtypes.float16, dtypes.bfloat16, dtypes.float32, dtypes.float64:
for shape_dtype in shape_dtypes:
for shape in (), (3,), (2, 5):
for name, stateless_op, stateful_op, kwargs in cases:
yield (('%s_%s_%s_%s' %
(name, _name(dtype), shape, _name(shape_dtype))).replace(
' ', ''),
functools.partial(wrap, stateless_op, dtype, shape,
shape_dtype, **kwargs),
functools.partial(wrap, stateful_op, dtype, shape, shape_dtype,
**kwargs))
def int_cases(shape_dtypes=(None,), minval_maxval=None):
def wrap(op, minval, maxval, shape, shape_dtype, dtype, seed, **kwargs):
shape_ = (constant_op.constant(shape, dtype=shape_dtype)
if shape_dtype is not None else shape)
return op(
seed=seed, shape=shape_, minval=minval, maxval=maxval, dtype=dtype,
**kwargs)
if minval_maxval is None:
minval_maxval = ((2, 11111),)
for minval, maxval in minval_maxval:
for shape_dtype in shape_dtypes:
for shape in (), (3,), (2, 5):
for dtype in dtypes.int32, dtypes.int64:
yield ('uniform_%s_%s' % (minval, maxval),
functools.partial(wrap, stateless.stateless_random_uniform,
minval, maxval, shape, shape_dtype, dtype),
functools.partial(wrap, random_ops.random_uniform, minval,
maxval, shape, shape_dtype, dtype))
def multinomial_cases():
num_samples = 10
def wrap(op, logits, logits_dtype, output_dtype, seed):
return op(seed=seed,
logits=constant_op.constant(logits, dtype=logits_dtype),
num_samples=num_samples, output_dtype=output_dtype)
for logits_dtype in np.float16, np.float32, np.float64:
for output_dtype in dtypes.int32, dtypes.int64:
for logits in ([[0.1, 0.25, 0.5, 0.15]], [[0.5, 0.5], [0.8, 0.2],
[0.25, 0.75]]):
yield ('multinomial',
functools.partial(wrap, stateless.stateless_multinomial, logits,
logits_dtype, output_dtype),
functools.partial(wrap, random_ops.multinomial, logits,
logits_dtype, output_dtype))
def gamma_cases():
def wrap(op, alpha, dtype, shape, seed):
return op(seed=seed, shape=shape,
alpha=constant_op.constant(alpha, dtype=dtype), dtype=dtype)
for dtype in np.float16, np.float32, np.float64:
for alpha in ([[.5, 1., 2.]], [[0.5, 0.5], [0.8, 0.2], [0.25, 0.75]]):
yield ('gamma',
functools.partial(wrap, stateless.stateless_random_gamma, alpha,
dtype, (10,) + tuple(np.shape(alpha))),
functools.partial(wrap, random_ops.random_gamma, alpha, dtype,
(10,)))
def poisson_cases():
def wrap(op, lam, lam_dtype, out_dtype, shape, seed):
return op(seed=seed, shape=shape,
lam=constant_op.constant(lam_dtype(lam), dtype=lam_dtype),
dtype=out_dtype)
for lam_dtype in np.float16, np.float32, np.float64, np.int32, np.int64:
for out_dtype in np.float16, np.float32, np.float64, np.int32, np.int64:
for lam in ([[5.5, 1., 2.]], [[7.5, 10.5], [3.8, 8.2], [1.25, 9.75]]):
yield ('poisson',
functools.partial(wrap, stateless.stateless_random_poisson, lam,
lam_dtype, out_dtype,
(10,) + tuple(np.shape(lam))),
functools.partial(wrap, random_ops.random_poisson, lam,
lam_dtype, out_dtype, (10,)))
@test_util.with_eager_op_as_function
class StatelessOpsTest(test.TestCase, parameterized.TestCase):
def _test_match(self, case, seed):
# Stateless ops should be the same as stateful ops on the first call
# after seed scrambling.<|fim▁hole|> _, stateless_op, stateful_op = case
random_seed.set_random_seed(seed[0])
stateful = stateful_op(seed=seed[1])
pure = stateless_op(seed=preseed)
self.assertAllEqual(stateful, pure)
def _test_match_stateless_cpu_gpu(self, case, seed):
# Stateless ops should produce the same result on CPUs and GPUs.
_, stateless_op, _ = case
with ops.device('CPU'):
result_cpu = stateless_op(seed=seed)
with ops.device(get_device().name):
result_gpu = stateless_op(seed=seed)
self.assertAllClose(result_cpu, result_gpu)
def _test_old_and_new_stateless_match(self, case, seed):
"""Tests that the new stateless ops match the old stateless ones."""
with ops.device(get_device().name):
_, stateless_op, _ = case
with compat.forward_compatibility_horizon(*BEFORE_EXPIRE):
old = stateless_op(seed=seed)
with compat.forward_compatibility_horizon(*AFTER_EXPIRE):
new = stateless_op(seed=seed)
self.assertAllClose(old, new)
def _test_explicit_alg(self, case, seed):
"""Tests that alg=philox and alg=None are the same (on CPU/GPU)."""
with ops.device(get_device().name):
_, stateless_op, _ = case
implicit_alg = stateless_op(seed=seed)
# All device types allowed in this test will result in Philox
explicit_alg = stateless_op(seed=seed, alg='philox')
self.assertAllClose(implicit_alg, explicit_alg)
def _test_determinism(self, case, seed_type):
# Stateless values should be equal iff the seeds are equal (roughly)
seeds = [(x, y) for x in range(5) for y in range(5)] * 3 # pylint: disable=g-complex-comprehension
with self.test_session(), ops.device(get_device().name):
_, stateless_op, _ = case
if context.executing_eagerly():
values = [
(seed, stateless_op(seed=constant_op.constant(seed, seed_type)))
for seed in seeds]
else:
# Have this branch because the above branch is too slow in graph
# mode
seed_t = array_ops.placeholder(seed_type, shape=[2])
pure = stateless_op(seed=seed_t)
values = [
(seed, pure.eval(feed_dict={seed_t: seed})) for seed in seeds
]
for s0, v0 in values:
for s1, v1 in values:
if dtypes.as_dtype(v0.dtype) != dtypes.bfloat16:
self.assertEqual(s0 == s1, np.all(v0 == v1))
elif s0 == s1:
# Skip the s0 != s1 case because v0 and v1 can be either equal or
# unequal in that case due to bfloat16's low precision
self.assertAllEqual(v0, v1)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(float_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchFloat(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Skip on XLA because XLA kernels do not support int64 '
'seeds needed by this test.')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(int_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchInt(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Skip on XLA because XLA kernels do not support int64 '
'seeds needed by this test.')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(multinomial_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchMultinomial(self, case, seed):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchGamma(self, case, seed):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testStatelessGammaCpuGpuMatch(self, case, seed):
if get_device().device_type != 'GPU':
# This test compares the numbers produced by the CPU and GPU kernel for
# stateless_random_gamma.
self.skipTest('This test requires GPU')
self._test_match_stateless_cpu_gpu(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(poisson_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testMatchPoisson(self, case, seed):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(float_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testOldAndNewStatelessMatchFloat(self, case, seed):
self._test_old_and_new_stateless_match(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], case_id, seed_id), case, seed) # pylint: disable=g-complex-comprehension
for seed_id, seed in enumerate(SEEDS)
for case_id, case in enumerate(
int_cases(minval_maxval=((2, 11111), (None, None)))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testOldAndNewStatelessMatchInt(self, case, seed):
self._test_old_and_new_stateless_match(case, seed)
@parameterized.named_parameters(
('_%s_%s' % (case[0], case_id), case)
for case_id, case in enumerate(float_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testExplicitAlgFloat(self, case):
seed = (7, 17)
self._test_explicit_alg(case, seed)
@parameterized.named_parameters(
('_%s_%s' % (case[0], case_id), case)
for case_id, case in enumerate(
int_cases(minval_maxval=((2, 11111), (None, None)))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testExplicitAlgInt(self, case):
seed = (7, 17)
self._test_explicit_alg(case, seed)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(
float_cases(shape_dtypes=(dtypes.int32, dtypes.int64))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismFloat(self, case, seed_type):
if seed_type == dtypes.int64 and get_device().device_type in ('XLA_GPU',
'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest(
'Skip on XLA because XLA kernels do not support int64 seeds.')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(
int_cases(shape_dtypes=(dtypes.int32, dtypes.int64))))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismInt(self, case, seed_type):
if seed_type == dtypes.int64 and get_device().device_type in ('XLA_GPU',
'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest(
'Skip on XLA because XLA kernels do not support int64 seeds.')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(multinomial_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismMultinomial(self, case, seed_type):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(gamma_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismGamma(self, case, seed_type):
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@parameterized.named_parameters(
('_%s_%s_%s' % (case[0], seed_type.name, case_id), case, seed_type) # pylint: disable=g-complex-comprehension
for seed_type in SEED_TYPES
for case_id, case in enumerate(poisson_cases()))
@test_util.disable_tfrt('tensorflow::DirectSession::Run crashes. b/156187396')
def testDeterminismPoisson(self, case, seed_type):
if get_device().device_type == 'GPU':
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking GPU kernel')
if get_device().device_type in ('XLA_GPU', 'XLA_CPU'):
# This test was passing before because soft placement silently picked the
# CPU kernels.
self.skipTest('Lacking XLA kernel')
self._test_determinism(case, seed_type)
@test_util.run_v2_only
def testGetKeyCounterAlg(self):
seed = [1, 2]
key, counter = gen_stateless_random_ops_v2.stateless_random_get_key_counter(
seed)
self.assertAllEqual(key.shape, [1])
self.assertAllEqual(counter.shape, [2])
alg = gen_stateless_random_ops_v2.stateless_random_get_alg()
self.assertAllEqual(alg.shape, [])
def assertDTypeEqual(self, a, b):
self.assertEqual(dtypes.as_dtype(a), dtypes.as_dtype(b))
def assertNoEqualPair(self, ls):
for i in range(len(ls)):
for j in range(i + 1, len(ls)):
self.assertFalse(math_ops.reduce_all(ls[i] == ls[j]))
@parameterized.parameters(['int32', 'int64'])
@test_util.run_v2_only
def testSplit(self, dtype):
"""Test for `split`."""
seed = constant_op.constant([1, 2], dtype=dtype)
new_seed = stateless.split(seed, 3)
self.assertEqual(new_seed.shape, [3, 2])
self.assertDTypeEqual(new_seed.dtype, dtype)
self.assertNoEqualPair([seed] + array_ops.unstack(new_seed))
@parameterized.parameters(['int32', 'int64'])
@test_util.run_v2_only
def testFoldIn(self, dtype):
"""Test for `fold_in`."""
orig_seed = constant_op.constant([1, 2], dtype='int32')
seed = stateless.fold_in(orig_seed, constant_op.constant(3, dtype=dtype))
new_seeds = []
new_seeds.append(seed)
seed = stateless.fold_in(seed, constant_op.constant(4, dtype=dtype))
new_seeds.append(seed)
for s in new_seeds:
self.assertEqual(s.shape, [2])
self.assertDTypeEqual(s.dtype, dtype)
self.assertNoEqualPair([math_ops.cast(orig_seed, dtype)] + new_seeds)
@test_util.run_v2_only
def testErrors(self):
"""Tests that proper errors are raised.
"""
shape = [2, 3]
with self.assertRaisesWithPredicateMatch(
ValueError,
'minval must be a scalar; got a tensor of shape '):
@def_function.function
def f():
stateless.stateless_random_uniform(
shape=shape, seed=[1, 2], minval=array_ops.zeros(shape, 'int32'),
maxval=100, dtype='int32')
f()
with self.assertRaisesWithPredicateMatch(
ValueError,
'maxval must be a scalar; got a tensor of shape '):
@def_function.function
def f2():
stateless.stateless_random_uniform(
shape=shape, seed=[1, 2], minval=0,
maxval=array_ops.ones(shape, 'int32') * 100,
dtype='int32')
f2()
if __name__ == '__main__':
config.set_soft_device_placement(False)
context.context().enable_xla_devices()
test.main()<|fim▁end|>
|
key = 0x3ec8f720, 0x02461e29
preseed = invert_philox(key, (seed[0], 0, seed[1], 0)).astype(np.uint64)
preseed = preseed[::2] | preseed[1::2] << 32
with ops.device(get_device().name):
|
<|file_name|>raw_mantissa_and_exponent.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::float::NiceFloat;
use malachite_base_test_util::bench::bucketers::{pair_1_bit_bucketer, primitive_float_bucketer};
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::{primitive_float_gen, unsigned_pair_gen_var_26};
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_primitive_float_demos!(runner, demo_raw_mantissa_and_exponent);
register_primitive_float_demos!(runner, demo_raw_mantissa);
register_primitive_float_demos!(runner, demo_raw_exponent);
register_primitive_float_demos!(runner, demo_from_raw_mantissa_and_exponent);
register_primitive_float_benches!(runner, benchmark_raw_mantissa_and_exponent_algorithms);
register_primitive_float_benches!(runner, benchmark_raw_mantissa_algorithms);
register_primitive_float_benches!(runner, benchmark_raw_exponent_algorithms);
register_primitive_float_benches!(runner, benchmark_from_raw_mantissa_and_exponent);
}
fn demo_raw_mantissa_and_exponent<T: PrimitiveFloat>(gm: GenMode, config: GenConfig, limit: usize) {
for x in primitive_float_gen::<T>().get(gm, &config).take(limit) {
println!(
"raw_mantissa_and_exponent({}) = {:?}",
NiceFloat(x),
x.raw_mantissa_and_exponent()
);
}
}
fn demo_raw_mantissa<T: PrimitiveFloat>(gm: GenMode, config: GenConfig, limit: usize) {
for x in primitive_float_gen::<T>().get(gm, &config).take(limit) {
println!("raw_mantissa({}) = {}", NiceFloat(x), x.raw_mantissa());
}
}
fn demo_raw_exponent<T: PrimitiveFloat>(gm: GenMode, config: GenConfig, limit: usize) {
for x in primitive_float_gen::<T>().get(gm, &config).take(limit) {
println!("raw_exponent({}) = {}", NiceFloat(x), x.raw_exponent());
}
}
fn demo_from_raw_mantissa_and_exponent<T: PrimitiveFloat>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (mantissa, exponent) in unsigned_pair_gen_var_26::<T>().get(gm, &config).take(limit) {
println!(
"{}::from_raw_mantissa_and_exponent({}, {}) = {}",
T::NAME,
mantissa,
exponent,
NiceFloat(T::from_raw_mantissa_and_exponent(mantissa, exponent))
);
}
}
#[allow(clippy::unnecessary_operation)]
fn benchmark_raw_mantissa_and_exponent_algorithms<T: PrimitiveFloat>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.raw_mantissa_and_exponent()", T::NAME),
BenchmarkType::Algorithms,
primitive_float_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&primitive_float_bucketer("f"),
&mut [
("default", &mut |x| no_out!(x.raw_mantissa_and_exponent())),
("alt", &mut |x| {
no_out!((x.raw_mantissa(), x.raw_exponent()))
}),
],
);
}
#[allow(clippy::unnecessary_operation)]
fn benchmark_raw_mantissa_algorithms<T: PrimitiveFloat>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.raw_mantissa()", T::NAME),
BenchmarkType::Algorithms,
primitive_float_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&primitive_float_bucketer("f"),
&mut [
("default", &mut |x| no_out!(x.raw_mantissa())),
("alt", &mut |x| no_out!(x.raw_mantissa_and_exponent().0)),
],
);
}
#[allow(clippy::unnecessary_operation)]
fn benchmark_raw_exponent_algorithms<T: PrimitiveFloat>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.raw_exponent()", T::NAME),
BenchmarkType::Algorithms,
primitive_float_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&primitive_float_bucketer("f"),
&mut [<|fim▁hole|> ("alt", &mut |x| no_out!(x.raw_mantissa_and_exponent().1)),
],
);
}
fn benchmark_from_raw_mantissa_and_exponent<T: PrimitiveFloat>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}::from_raw_mantissa_and_exponent(u64, u64)", T::NAME,),
BenchmarkType::Single,
unsigned_pair_gen_var_26::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&pair_1_bit_bucketer("mantissa"),
&mut [("Malachite", &mut |(mantissa, exponent)| {
no_out!(T::from_raw_mantissa_and_exponent(mantissa, exponent))
})],
);
}<|fim▁end|>
|
("default", &mut |x| no_out!(x.raw_exponent())),
|
<|file_name|>blkio_test.go<|end_file_name|><|fim▁begin|>package fs
import (
"testing"
"github.com/dotcloud/docker/pkg/libcontainer/cgroups"
)
const (
sectorsRecursiveContents = `8:0 1024`
serviceBytesRecursiveContents = `8:0 Read 100
8:0 Write 200
8:0 Sync 300
8:0 Async 500
8:0 Total 500
Total 500`
servicedRecursiveContents = `8:0 Read 10
8:0 Write 40
8:0 Sync 20
8:0 Async 30
8:0 Total 50
Total 50`
queuedRecursiveContents = `8:0 Read 1
8:0 Write 4
8:0 Sync 2
8:0 Async 3
8:0 Total 5
Total 5`
)
var actualStats = *cgroups.NewStats()
func appendBlkioStatEntry(blkioStatEntries *[]cgroups.BlkioStatEntry, major, minor, value uint64, op string) {
*blkioStatEntries = append(*blkioStatEntries, cgroups.BlkioStatEntry{Major: major, Minor: minor, Value: value, Op: op})
}
func TestBlkioStats(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": serviceBytesRecursiveContents,
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,
"blkio.sectors_recursive": sectorsRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err != nil {
t.Fatal(err)
}
// Verify expected stats.
expectedStats := cgroups.BlkioStats{}
appendBlkioStatEntry(&expectedStats.SectorsRecursive, 8, 0, 1024, "")
appendBlkioStatEntry(&expectedStats.IoServiceBytesRecursive, 8, 0, 100, "Read")
appendBlkioStatEntry(&expectedStats.IoServiceBytesRecursive, 8, 0, 200, "Write")
appendBlkioStatEntry(&expectedStats.IoServiceBytesRecursive, 8, 0, 300, "Sync")
appendBlkioStatEntry(&expectedStats.IoServiceBytesRecursive, 8, 0, 500, "Async")
appendBlkioStatEntry(&expectedStats.IoServiceBytesRecursive, 8, 0, 500, "Total")
appendBlkioStatEntry(&expectedStats.IoServicedRecursive, 8, 0, 10, "Read")
appendBlkioStatEntry(&expectedStats.IoServicedRecursive, 8, 0, 40, "Write")
appendBlkioStatEntry(&expectedStats.IoServicedRecursive, 8, 0, 20, "Sync")
appendBlkioStatEntry(&expectedStats.IoServicedRecursive, 8, 0, 30, "Async")
appendBlkioStatEntry(&expectedStats.IoServicedRecursive, 8, 0, 50, "Total")
appendBlkioStatEntry(&expectedStats.IoQueuedRecursive, 8, 0, 1, "Read")
appendBlkioStatEntry(&expectedStats.IoQueuedRecursive, 8, 0, 4, "Write")
appendBlkioStatEntry(&expectedStats.IoQueuedRecursive, 8, 0, 2, "Sync")
appendBlkioStatEntry(&expectedStats.IoQueuedRecursive, 8, 0, 3, "Async")
appendBlkioStatEntry(&expectedStats.IoQueuedRecursive, 8, 0, 5, "Total")
expectBlkioStatsEquals(t, expectedStats, actualStats.BlkioStats)
}
func TestBlkioStatsNoSectorsFile(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": serviceBytesRecursiveContents,
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}
func TestBlkioStatsNoServiceBytesFile(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,
"blkio.sectors_recursive": sectorsRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}
func TestBlkioStatsNoServicedFile(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": serviceBytesRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,
"blkio.sectors_recursive": sectorsRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}
func TestBlkioStatsNoQueuedFile(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": serviceBytesRecursiveContents,
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.sectors_recursive": sectorsRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}
func TestBlkioStatsUnexpectedNumberOfFields(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": "8:0 Read 100 100",
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,
"blkio.sectors_recursive": sectorsRecursiveContents,
})
blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}
func TestBlkioStatsUnexpectedFieldType(t *testing.T) {
helper := NewCgroupTestUtil("blkio", t)
defer helper.cleanup()
helper.writeFileContents(map[string]string{
"blkio.io_service_bytes_recursive": "8:0 Read Write",
"blkio.io_serviced_recursive": servicedRecursiveContents,
"blkio.io_queued_recursive": queuedRecursiveContents,<|fim▁hole|> blkio := &blkioGroup{}
err := blkio.GetStats(helper.CgroupData, &actualStats)
if err == nil {
t.Fatal("Expected to fail, but did not")
}
}<|fim▁end|>
|
"blkio.sectors_recursive": sectorsRecursiveContents,
})
|
<|file_name|>zenjmx.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
##############################################################################
#
# Copyright (C) Zenoss, Inc. 2008, 2009, all rights reserved.
#
# This content is made available according to terms specified in
# License.zenoss under the directory where your Zenoss product is installed.
#
##############################################################################
__doc__ = """Monitor Java Management eXtension (JMX) mbeans
Dispatches calls to a java server process to collect JMX values for a device.
"""
import logging
import sys
import os
import socket
import Globals
import zope
from twisted.internet.defer import Deferred
from twisted.web import xmlrpc
from twisted.internet.protocol import ProcessProtocol
from twisted.internet import defer, reactor, error
from Products.ZenCollector.daemon import CollectorDaemon
from Products.ZenCollector.interfaces import ICollectorPreferences,\
IDataService,\
IEventService,\
IScheduledTask
from Products.ZenCollector.tasks import SimpleTaskFactory,\
SimpleTaskSplitter,\
TaskStates
from Products.ZenEvents import Event
from Products.ZenHub.XmlRpcService import XmlRpcService
from Products.ZenUtils.NJobs import NJobs
from Products.ZenUtils.Utils import unused
from Products.ZenUtils.observable import ObservableMixin
import ZenPacks.zenoss.ZenJMX
from ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService import JMXDataSourceConfig
unused(JMXDataSourceConfig)
log = logging.getLogger( "zen.zenjmx" )
DEFAULT_HEARTBEAT_TIME = 5 * 60
WARNING_EVENT = dict(eventClass='/Status/JMX', component='JMX',
device=socket.getfqdn(), severity=Event.Warning)
class ZenJMXPreferences(object):
"""
Configuration values for the zenjmx daemon.
"""
zope.interface.implements(ICollectorPreferences)
def __init__(self):
"""
Construct a new ZenJMXPreferences instance and provide default
values for needed attributes.
"""
self.collectorName = "zenjmx"
self.defaultRRDCreateCommand = None
self.cycleInterval = 5 * 60 # seconds
self.configCycleInterval = 20 # minutes
self.options = None
# the configurationService attribute is the fully qualified class-name
# of our configuration service that runs within ZenHub
self.configurationService = 'ZenPacks.zenoss.ZenJMX.services.ZenJMXConfigService'
def buildOptions(self, parser):
parser.add_option('-j','--zenjmxjavaport',
dest='zenjmxjavaport',
default=9988,
type='int',
help='Port for zenjmxjava process; default 9988. '+\
'Tries 5 consecutive ports if there is a conflict',
)
parser.add_option('--concurrentJMXCalls',
dest='concurrentJMXCalls',
action='store_true', default=False,
help='Enable concurrent calls to a JMX server'
)
parser.add_option('--parallel', dest='parallel',
default=200, type='int',
help='Number of devices to collect from at one time'
)
parser.add_option('--cycleInterval', dest='cycleInterval',
default=300, type='int',
help='Cycle time, in seconds, to run collection'
)
parser.add_option('--portRange', dest='portRange',
default=5, type='int',
help='Number of ports to attempt when starting' +
'Java jmx client')
parser.add_option('--javaheap',
dest="maxHeap",type="int", default=512,
help="Max heap, in MB, to use for java process")
def postStartup(self):
pass
def getJavaClientArgs(self):
args = None
if self.options.configfile:
args = ('--configfile', self.options.configfile)
if self.options.logseverity:
args = args + ('-v', str(self.options.logseverity))
if self.options.concurrentJMXCalls:
args = args + ('-concurrentJMXCalls', )
return args
def getStartingPort(self):
return self.options.zenjmxjavaport
def getAttemptedPortRange(self):
return self.options.portRange
class IZenJMXJavaClient(zope.interface.Interface):
listenPort = zope.interface.Attribute("listenPort")
class ZenJMXJavaClientImpl(ProcessProtocol):
"""
Protocol to control the zenjmxjava process
"""
zope.interface.implements(IZenJMXJavaClient)
def __init__(
self,
args,
cycle=True,
zenjmxjavaport=9988,
maxHeap=512
):
"""
Initializer
@param args: argument list for zenjmx
@type args: list of strings
@param cycle: whether to run once or repeat
@type cycle: boolean
@param zenjmxjavaport: port on which java process
will listen for queries
@type zenjmxjavaport: int
"""
self.deferred = Deferred()
self.stopCalled = False
self.process = None
self.outReceived = sys.stdout.write
self.errReceived = sys.stderr.write
self.log = logging.getLogger('zen.ZenJMXJavaClient')
self.args = args
self.cycle = cycle
self.listenPort = zenjmxjavaport
self._maxHeap = maxHeap
self.restartEnabled = False
self._eventService = zope.component.queryUtility(IEventService)
self._preferences = zope.component.queryUtility(ICollectorPreferences,
'zenjmx')
def processEnded(self, reason):
"""
Twisted reactor function called when the process ends.
@param reason: message from the process
@type reason: string
"""
self.process = None
if not self.stopCalled:
procEndEvent = {
'eventClass': '/Status/JMX',
'summary': 'zenjmxjava ended unexpectedly: %s'\
% reason.getErrorMessage(),
'severity': Event.Warning,
'component': 'zenjmx',
'device': self._preferences.options.monitor,
}
self._eventService.sendEvent(procEndEvent)
self.log.warn('processEnded():zenjmxjava process ended %s'
% reason)
if self.deferred:
msg = reason.getErrorMessage()
exitCode = reason.value.exitCode
if exitCode == 10:
msg = 'Could not start up Java web server, '+\
'possible port conflict'
self.deferred.callback((exitCode,msg))
self.deferred = None
elif self.restartEnabled:
self.log.info('processEnded():restarting zenjmxjava')
reactor.callLater(1, self.run)
def stop(self):
"""
Twisted reactor function called when we are shutting down.
"""
import signal
self.log.info('stop():stopping zenjmxjava')
self.stopCalled = True
if not self.process:
self.log.debug('stop():no zenjmxjava process to stop')
return
try:
self.process.signalProcess(signal.SIGKILL)
except error.ProcessExitedAlready:
self.log.info('stop():zenjmxjava process already exited')
pass
try:
self.process.loseConnection()
except Exception:
pass
self.process = None
def connectionMade(self):
"""
Called when the Twisted reactor starts up
"""
self.log.debug('connectionMade():zenjmxjava started')
def doCallback():
"""
doCallback
"""
msg = \
'doCallback(): callback on deferred zenjmxjava proc is up'
self.log.debug(msg)
if self.deferred:
self.deferred.callback((True,'zenjmx java started'))
if self.process:
procStartEvent = {
'eventClass': '/Status/JMX',
'summary': 'zenjmxjava started',
'severity': Event.Clear,
'component': 'zenjmx',
'device': self._preferences.options.monitor,
}
self._eventService.sendEvent(procStartEvent)
self.deferred = None
if self.deferred:
self.log.debug('connectionMade():scheduling callback')
# give the java service a chance to startup
reactor.callLater(3, doCallback)
self.log.debug('connectionMade(): done')
def run(self):
"""
Twisted function called when started
"""
if self.stopCalled:
return
self.log.info('run():starting zenjmxjava')
zenjmxjavacmd = os.path.join(ZenPacks.zenoss.ZenJMX.binDir,
'zenjmxjava')
if self.cycle:
args = ('runjmxenabled', )
else:
# don't want to start up with jmx server to avoid port conflicts
args = ('run', )
args = args + ('-zenjmxjavaport',
str(self.listenPort))
if self.args:
args = args + self.args
cmd = (zenjmxjavacmd, ) + args
self.log.debug('run():spawn process %s' % (cmd, ))
self.deferred = Deferred()
env = dict(os.environ)
env['JVM_MAX_HEAP'] = '-Xmx%sm'%self._maxHeap
self.process = reactor.spawnProcess(self, zenjmxjavacmd, cmd,
env=env)
return self.deferred
DEFAULT_JMX_JAVA_CLIENT_NAME = 'zenjmxjavaclient'
class ZenJMXJavaClientInitialization(object):
"""
Wrapper that continues to start the Java jmx client until
successful.
"""
def __init__(self,
registeredName=DEFAULT_JMX_JAVA_CLIENT_NAME):
"""
@param registeredName: the name with which this client
will be registered as a utility
"""
self._jmxClient = None
self._clientName = registeredName
def initialize(self):
"""
Begin the first attempt to start the Java jmx client. Note that
this method returns a Deferred that relies on the ZenJMXPreferences
being present when it is finally executed. This is meant to be
the Deferred that is given to the CollectorDaemon for
initialization before the first JMX task is scheduled.
@return the deferred that represents the loading of preferences
and the initial attempt to start the Java jmx client
@rtype defer.Deferred
"""
def loadPrefs():
log.debug( "Retrieving java client startup args")
preferences = zope.component.queryUtility(ICollectorPreferences,
'zenjmx')
self._args = preferences.getJavaClientArgs()
self._cycle = preferences.options.cycle
self._maxHeap = preferences.options.maxHeap
self._startingPort = preferences.getStartingPort()
self._rpcPort = self._startingPort
self._attemptedPortRange = preferences.getAttemptedPortRange()
def printProblem(result):
log.error( str(result) )
sys.exit(1)
d = defer.maybeDeferred( loadPrefs )
d.addCallback( self._startJavaProc )
d.addErrback( printProblem )
return d
def _tryClientOnCurrentPort( self ):
"""
Returns the Deferred for executing an attempt
to start the java jmx client on the current port.
"""
log.debug( 'Attempting java client startup on port %s',
self._rpcPort )
self._jmxClient = ZenJMXJavaClientImpl( self._args, self._cycle, self._rpcPort, self._maxHeap )
zope.component.provideUtility(
self._jmxClient,
IZenJMXJavaClient,
self._clientName
)
return self._jmxClient.run()
def _startJavaProc( self, result=None ):
"""
Checks whether startup of the java jmx client was successful. If
it was unsuccessful due to port conflict, increments the port and
tries to start the client again.
"""
# If the result is not None, that means this was called as a callback
# after an attempt to start the client
if result is not None:
# If result[0] is True, the client process started
if result[0] is True:
log.debug( 'Java jmx client started' )
self._jmxClient.restartEnabled = True
deferred = defer.succeed( True )
# If the result[0] is 10, there was a port conflict
elif result[0] == 10:
log.debug( 'Java client didn\'t start; port %s occupied',
self._rpcPort )
if self._rpcPort < ( self._startingPort +
self._attemptedPortRange ):
self._rpcPort += 1
deferred = self._tryClientOnCurrentPort()
deferred.addCallback( self._startJavaProc )
else:
raise RuntimeError(
"ZenJMXJavaClient could not be started, check ports")
else:
#unknown error
raise RuntimeError('ZenJMXJavaClient could not be started, '+\
'check JVM type and version: %s' % result[1])
# If there was no result passed in, then this is the first attempt
# to start the client
else:
deferred = self._tryClientOnCurrentPort()
deferred.addCallback( self._startJavaProc )
return deferred
class ZenJMXTask(ObservableMixin):
"""
The scheduled task for all the jmx datasources on an individual device.
"""
zope.interface.implements(IScheduledTask)
def __init__(self,
deviceId,
taskName,
scheduleIntervalSeconds,
taskConfig,
clientName=DEFAULT_JMX_JAVA_CLIENT_NAME ):
super( ZenJMXTask, self ).__init__()
self.name = taskName
self.configId = deviceId
self.state = TaskStates.STATE_IDLE
self._taskConfig = taskConfig
self._manageIp = self._taskConfig.manageIp
self._dataService = zope.component.queryUtility( IDataService )
self._eventService = zope.component.queryUtility( IEventService )
self._preferences = zope.component.queryUtility( ICollectorPreferences,
'zenjmx' )
self._client = zope.component.queryUtility( IZenJMXJavaClient,
clientName )
# At this time, do not use the interval passed from the device
# configuration. Use the value pulled from the daemon
# configuration.
unused( scheduleIntervalSeconds )
self.interval = self._preferences.options.cycleInterval
def createEvent(self, errorMap, component=None):
"""
Given an event dictionary, copy it and return the event
@param errorMap: errorMap
@type errorMap: s dictionarytring
@param component: component name
@type component: string
@return: updated event
@rtype: dictionary
"""
event = errorMap.copy()
if component:
event['component'] = component
if event.get('datasourceId') and not event.get('eventKey'):
event['eventKey'] = event.get('datasourceId')
return event
def sendEvent(self, event, **kw):
self._eventService.sendEvent(event, **kw)
def _collectJMX(self, dsConfigList):
"""
Call Java JMX process to collect JMX values
@param dsConfigList: DataSource configuration
@type dsConfigList: list of JMXDataSourceConfig
@return: Twisted deferred object
@rtype: Twisted deferred object
"""
def toDict(config):
"""
Marshall the fields from the datasource into a dictionary and
ignore everything that is not a primitive
@param config: dictionary of results
@type config: string<|fim▁hole|> @rtype: dictionary
"""
vals = {}
for (key, val) in config.__dict__.items():
if key != 'rrdConfig' and type(val)\
in XmlRpcService.PRIMITIVES:
vals[key] = val
rrdConfigs = config.rrdConfig.values()
rrdConfigs.sort(lambda x, y: cmp(x.dataPointId,
y.dataPointId))
vals['dps'] = []
vals['dptypes'] = []
for rrdConfig in rrdConfigs:
vals['dps'].append(rrdConfig.dataPointId)
vals['dptypes'].append(rrdConfig.rrdType)
vals['connectionKey'] = config.getConnectionPropsKey()
return vals
def rpcCall():
"""
Communicate with our local JMX process to collect results.
This is a generator function
@param driver: generator
@type driver: string
"""
port = self._client.listenPort
xmlRpcProxy = xmlrpc.Proxy('http://localhost:%s/' % port)
d = xmlRpcProxy.callRemote('zenjmx.collect', configMaps)
d.addCallbacks( processResults , processRpcError)
return d
def processRpcError(error):
log.debug("Could not make XML RPC call for device %s; content of call: %s", self._taskConfig, configMaps)
self.sendEvent({}, severity=Event.Error,
eventClass='/Status/JMX',
summary='unexpected error: %s' % error.getErrorMessage(),
eventKey='unexpected_xmlrpc_error',
device=self.configId)
return error
def processResults(jmxResults):
"""
Given the results from JMX, store them or send events.
@param jmxResults: jmxResults
@type jmxResults: string
"""
#Send clear for RPC error
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX',
summary='unexpected error cleared',
eventKey='unexpected_xmlrpc_error',
device=self.configId)
result = {}
hasConnectionError = False
hasUnexpectedError = False
for result in jmxResults:
log.debug("JMX result -> %s", result)
evtSummary = result.get('summary')
deviceId = result.get('device')
evt = self.createEvent(result)
if not evtSummary:
rrdPath = result.get('rrdPath')
dsId = result.get('datasourceId')
dpId = result.get('dpId')
value = result.get('value')
try:
self.storeRRD(deviceId, rrdPath, dsId, dpId, value)
except ValueError:
pass
self.sendEvent(evt,summary="Clear",severity=Event.Clear)
else:
# send event
log.debug('processResults(): '
+ 'jmx error, sending event for %s'
% result)
if evt.get("eventClass", "") == '/Status/JMX/Connection':
hasConnectionError = True
if evt.get("eventKey", "") == 'unexpected_error':
hasUnexpectedError = True
self.sendEvent(evt, severity=Event.Error)
if not hasConnectionError:
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX/Connection',
summary='Connection is up',
eventKey=connectionComponentKey,
device=self.configId)
if not hasUnexpectedError:
self.sendEvent({}, severity=Event.Clear,
eventClass='/Status/JMX',
summary='Unexpected error cleared',
eventKey='unexpected_error',
device=self.configId)
return jmxResults
connectionComponentKey = ''
configMaps = []
for config in dsConfigList:
connectionComponentKey = config.getConnectionPropsKey()
configMaps.append(toDict(config))
log.info('collectJMX(): for %s %s' % (config.device,
connectionComponentKey))
return rpcCall()
def storeRRD(
self,
deviceId,
rrdPath,
dataSourceId,
dataPointId,
dpValue,
):
"""
Store a value into an RRD file
@param deviceId: name of the remote device
@type deviceId: string
@param dataSourceId: name of the data source
@type dataSourceId: string
@param dataPointId: name of the data point
@type dataPointId: string
@param dpValue: dpValue
@type dpValue: number
"""
deviceConfig = self._taskConfig
dsConfig = deviceConfig.findDataSource(dataSourceId)
if not dsConfig:
log.info(
'No data source config found for device %s datasource %s' \
% (deviceId, dataSourceId))
return
rrdConf = dsConfig.rrdConfig.get(dataPointId)
type = rrdConf.rrdType
if(type in ('COUNTER', 'DERIVE')):
try:
# cast to float first because long('100.0') will fail with a
# ValueError
dpValue = long(float(dpValue))
except (TypeError, ValueError):
log.warning("value %s not valid for derive or counter data points", dpValue)
else:
try:
dpValue = float(dpValue)
except (TypeError, ValueError):
log.warning("value %s not valid for data point", dpValue)
if not rrdConf:
log.info(
'No RRD config found for device %s datasource %s datapoint %s' \
% (deviceId, dataSourceId, dataPointId))
return
dpPath = '/'.join((rrdPath, rrdConf.dpName))
min = rrdConf.min
max = rrdConf.max
self._dataService.writeRRD(dpPath, dpValue, rrdConf.rrdType,
rrdConf.command, min=min, max=max)
def _finished(self, results):
for result in results:
log.debug("Finished with result %s" % str( result ) )
return results
def doTask(self):
log.debug("Scanning device %s [%s]", self.configId, self._manageIp)
d = self._collectCallback()
d.addBoth(self._finished)
# returning a Deferred will keep the framework from assuming the task
# is done until the Deferred actually completes
return d
def _collectCallback(self):
jobs = NJobs(self._preferences.options.parallel,
self._collectJMX,
self._taskConfig.jmxDataSourceConfigs.values())
deferred = jobs.start()
return deferred
def cleanup(self):
pass
def stopJavaJmxClients():
# Currently only starting/stopping one.
clientName = DEFAULT_JMX_JAVA_CLIENT_NAME
client = zope.component.queryUtility( IZenJMXJavaClient,
clientName )
if client is not None:
log.debug( 'Shutting down JMX Java client %s' % clientName )
client.stop()
if __name__ == '__main__':
myPreferences = ZenJMXPreferences()
initialization = ZenJMXJavaClientInitialization()
myTaskFactory = SimpleTaskFactory(ZenJMXTask)
myTaskSplitter = SimpleTaskSplitter(myTaskFactory)
daemon = CollectorDaemon(myPreferences, myTaskSplitter,
initializationCallback=initialization.initialize,
stoppingCallback=stopJavaJmxClients)
daemon.run()<|fim▁end|>
|
@return: results from remote device
|
<|file_name|>6.py<|end_file_name|><|fim▁begin|># Created by PyCharm Pro Edition
# User: Kaushik Talukdar
# Date: 27-03-2017
# Time: 05:25 PM
fastfood = ["momo", "roll", "chow", "pizza"]
print(fastfood)
print("\n")
#print one element using pop()
#output the popped element
print(fastfood.pop() + "\n")
<|fim▁hole|><|fim▁end|>
|
#print the new list with less elements
print(fastfood)
|
<|file_name|>timer.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The sdl2-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use ffi::stdinc::{SDL_bool, Uint32, Uint64};
use libc::{c_int, c_void};<|fim▁hole|>// SDL_timer.h
pub type SDL_TimerCallback = extern "C" fn(interval: Uint32, param: *c_void) -> Uint32;
pub type SDL_TimerID = c_int;
extern "C" {
pub fn SDL_GetTicks() -> Uint32;
pub fn SDL_GetPerformanceCounter() -> Uint64;
pub fn SDL_GetPerformanceFrequency() -> Uint64;
pub fn SDL_Delay(ms: Uint32);
pub fn SDL_AddTimer(interval: Uint32, callback: SDL_TimerCallback, param: *c_void) -> SDL_TimerID;
pub fn SDL_RemoveTimer(id: SDL_TimerID) -> SDL_bool;
}<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Profiler to check if there are any bottlenecks in your code."""
import logging
import os
from abc import ABC, abstractmethod
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Callable, Dict, Generator, Iterable, Optional, TextIO, Union
from pytorch_lightning.utilities.cloud_io import get_filesystem
log = logging.getLogger(__name__)
class AbstractProfiler(ABC):
"""Specification of a profiler."""
@abstractmethod
def start(self, action_name: str) -> None:
"""Defines how to start recording an action."""
@abstractmethod
def stop(self, action_name: str) -> None:
"""Defines how to record the duration once an action is complete."""
@abstractmethod
def summary(self) -> str:
"""Create profiler summary in text format."""
@abstractmethod
def setup(self, **kwargs: Any) -> None:
"""Execute arbitrary pre-profiling set-up steps as defined by subclass."""
@abstractmethod
def teardown(self, **kwargs: Any) -> None:
"""Execute arbitrary post-profiling tear-down steps as defined by subclass."""
class BaseProfiler(AbstractProfiler):
"""
If you wish to write a custom profiler, you should inherit from this class.
"""
def __init__(
self,
dirpath: Optional[Union[str, Path]] = None,
filename: Optional[str] = None,
) -> None:
self.dirpath = dirpath
self.filename = filename
self._output_file: Optional[TextIO] = None
self._write_stream: Optional[Callable] = None
self._local_rank: Optional[int] = None
self._log_dir: Optional[str] = None
self._stage: Optional[str] = None
@contextmanager
def profile(self, action_name: str) -> Generator:
"""
Yields a context manager to encapsulate the scope of a profiled action.
Example::
with self.profile('load training data'):
# load training data code
The profiler will start once you've entered the context and will automatically
stop once you exit the code block.
"""
try:
self.start(action_name)
yield action_name
finally:
self.stop(action_name)
def profile_iterable(self, iterable: Iterable, action_name: str) -> Generator:
iterator = iter(iterable)
while True:
try:
self.start(action_name)
value = next(iterator)
self.stop(action_name)
yield value
except StopIteration:
self.stop(action_name)
break
def _rank_zero_info(self, *args, **kwargs) -> None:
if self._local_rank in (None, 0):
log.info(*args, **kwargs)
def _prepare_filename(
self, action_name: Optional[str] = None, extension: str = ".txt", split_token: str = "-"
) -> str:
args = []
if self._stage is not None:
args.append(self._stage)
if self.filename:
args.append(self.filename)
if self._local_rank is not None:
args.append(str(self._local_rank))
if action_name is not None:
args.append(action_name)
filename = split_token.join(args) + extension
return filename
def _prepare_streams(self) -> None:
if self._write_stream is not None:
return
if self.filename:
filepath = os.path.join(self.dirpath, self._prepare_filename())
fs = get_filesystem(filepath)
file = fs.open(filepath, "a")
self._output_file = file
self._write_stream = file.write
else:
self._write_stream = self._rank_zero_info
def describe(self) -> None:
"""Logs a profile report after the conclusion of run."""
# there are pickling issues with open file handles in Python 3.6
# so to avoid them, we open and close the files within this function
# by calling `_prepare_streams` and `teardown`
self._prepare_streams()
summary = self.summary()
if summary:
self._write_stream(summary)
if self._output_file is not None:
self._output_file.flush()
self.teardown(stage=self._stage)
def _stats_to_str(self, stats: Dict[str, str]) -> str:
stage = f"{self._stage.upper()} " if self._stage is not None else ""
output = [stage + "Profiler Report"]
for action, value in stats.items():
header = f"Profile stats for: {action}"
if self._local_rank is not None:
header += f" rank: {self._local_rank}"
output.append(header)
output.append(value)
return os.linesep.join(output)
def setup(
self, stage: Optional[str] = None, local_rank: Optional[int] = None, log_dir: Optional[str] = None
) -> None:
"""Execute arbitrary pre-profiling set-up steps."""
self._stage = stage
self._local_rank = local_rank
self._log_dir = log_dir
self.dirpath = self.dirpath or log_dir
def teardown(self, stage: Optional[str] = None) -> None:
"""
Execute arbitrary post-profiling tear-down steps.
Closes the currently open file and stream.
"""
self._write_stream = None
if self._output_file is not None:
self._output_file.close()
self._output_file = None # can't pickle TextIOWrapper
def __del__(self) -> None:
self.teardown(stage=self._stage)
def start(self, action_name: str) -> None:
raise NotImplementedError
<|fim▁hole|>
def summary(self) -> str:
raise NotImplementedError
@property
def local_rank(self) -> int:
return 0 if self._local_rank is None else self._local_rank
class PassThroughProfiler(BaseProfiler):
"""
This class should be used when you don't want the (small) overhead of profiling.
The Trainer uses this class by default.
"""
def start(self, action_name: str) -> None:
pass
def stop(self, action_name: str) -> None:
pass
def summary(self) -> str:
return ""<|fim▁end|>
|
def stop(self, action_name: str) -> None:
raise NotImplementedError
|
<|file_name|>CIN.GETLINE-leitura-delimitada.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
using namespace std;
main(){
char palavra[] = ".";<|fim▁hole|>
cin.getline(palavra, 10, '\n'); // irá ler apenas 10 caracteres e vai ser delimitado até encontrar a quebra de linha
cout << endl << palavra << endl;
}<|fim▁end|>
| |
<|file_name|>scielo_wos_scieloci_update.py<|end_file_name|><|fim▁begin|># coding: utf-8
'''<|fim▁hole|>import logging
import models
from transform_date import *
from accent_remover import *
logging.basicConfig(filename='logs/scieloci.info.txt', level=logging.INFO)
logger = logging.getLogger(__name__)
# Add SciELO CI indicators for journals
def scieloci(filename):
sheet = pyexcel.get_sheet(
file_name=filename,
sheet_name='import',
name_columns_by_row=0)
sheet_json = sheet.to_records()
for rec in sheet_json:
# # remove empty keys
# rec = {k: v for k, v in rec.items() if v or v == 0}
query = models.Scielo.objects.filter(issn_list=rec['issn_scielo'])
if len(query) == 1:
print(query[0]['issn_scielo'])
doc = query[0]
data = {'scieloci': {}}
if 'scieloci' in doc:
data['scieloci'] = doc['scieloci']
data['scieloci'].update(dict(rec))
else:
print('não encontrou: ' + str(rec['issn_scielo']))
if data:
doc.modify(**data)
def main():
# SciELO docs counts Network xlsx
# scieloci('data/scielo/td_wos_all_downloads.xlsx')
# scieloci('data/wos/td_wos_all.xlsx')
scieloci('data/wos/td_wos_scieloci_2017_2018.xlsx')
if __name__ == "__main__":
main()<|fim▁end|>
|
This script reads data from various sources to process and store in MongoDB.
'''
import pyexcel
|
<|file_name|>dnd_grab.rs<|end_file_name|><|fim▁begin|>use std::{cell::RefCell, ops::Deref as _, rc::Rc};
use wayland_server::{
protocol::{wl_data_device_manager::DndAction, wl_data_offer, wl_data_source, wl_pointer, wl_surface},
Main,
};
use crate::{
utils::{Logical, Point},
wayland::{
seat::{AxisFrame, PointerGrab, PointerGrabStartData, PointerInnerHandle, Seat},
Serial,
},
};
use super::{with_source_metadata, DataDeviceData, SeatData};
pub(crate) struct DnDGrab {
start_data: PointerGrabStartData,
data_source: Option<wl_data_source::WlDataSource>,
current_focus: Option<wl_surface::WlSurface>,
pending_offers: Vec<wl_data_offer::WlDataOffer>,
offer_data: Option<Rc<RefCell<OfferData>>>,
icon: Option<wl_surface::WlSurface>,
origin: wl_surface::WlSurface,
callback: Rc<RefCell<dyn FnMut(super::DataDeviceEvent)>>,
seat: Seat,
}
impl DnDGrab {
pub(crate) fn new(
start_data: PointerGrabStartData,
source: Option<wl_data_source::WlDataSource>,
origin: wl_surface::WlSurface,
seat: Seat,
icon: Option<wl_surface::WlSurface>,
callback: Rc<RefCell<dyn FnMut(super::DataDeviceEvent)>>,
) -> DnDGrab {
DnDGrab {
start_data,
data_source: source,
current_focus: None,
pending_offers: Vec::with_capacity(1),
offer_data: None,
origin,
icon,
callback,
seat,
}
}
}
impl PointerGrab for DnDGrab {
fn motion(
&mut self,
handle: &mut PointerInnerHandle<'_>,
location: Point<f64, Logical>,
focus: Option<(wl_surface::WlSurface, Point<i32, Logical>)>,
serial: Serial,
time: u32,
) {
// While the grab is active, no client has pointer focus
handle.motion(location, None, serial, time);
let seat_data = self
.seat
.user_data()
.get::<RefCell<SeatData>>()
.unwrap()
.borrow_mut();
if focus.as_ref().map(|&(ref s, _)| s) != self.current_focus.as_ref() {
// focus changed, we need to make a leave if appropriate
if let Some(surface) = self.current_focus.take() {
// only leave if there is a data source or we are on the original client
if self.data_source.is_some() || self.origin.as_ref().same_client_as(surface.as_ref()) {
for device in &seat_data.known_devices {
if device.as_ref().same_client_as(surface.as_ref()) {
device.leave();
}
}
// disable the offers
self.pending_offers.clear();
if let Some(offer_data) = self.offer_data.take() {
offer_data.borrow_mut().active = false;
}
}
}
}
if let Some((surface, surface_location)) = focus {
// early return if the surface is no longer valid
let client = match surface.as_ref().client() {
Some(c) => c,
None => return,
};
let (x, y) = (location - surface_location.to_f64()).into();
if self.current_focus.is_none() {
// We entered a new surface, send the data offer if appropriate
if let Some(ref source) = self.data_source {
let offer_data = Rc::new(RefCell::new(OfferData {
active: true,
dropped: false,
accepted: true,
chosen_action: DndAction::empty(),
}));
for device in seat_data
.known_devices
.iter()
.filter(|d| d.as_ref().same_client_as(surface.as_ref()))
{
let action_choice = device
.as_ref()
.user_data()
.get::<DataDeviceData>()
.unwrap()
.action_choice
.clone();
// create a data offer
let offer = client
.create_resource::<wl_data_offer::WlDataOffer>(device.as_ref().version())
.map(|offer| {
implement_dnd_data_offer(
offer,
source.clone(),
offer_data.clone(),
action_choice,
)
})
.unwrap();
// advertize the offer to the client
device.data_offer(&offer);
with_source_metadata(source, |meta| {
for mime_type in meta.mime_types.iter().cloned() {
offer.offer(mime_type);
}
offer.source_actions(meta.dnd_action);
})
.unwrap();
device.enter(serial.into(), &surface, x, y, Some(&offer));
self.pending_offers.push(offer);
}
self.offer_data = Some(offer_data);
} else {
// only send if we are on a surface of the same client
if self.origin.as_ref().same_client_as(surface.as_ref()) {<|fim▁hole|> device.enter(serial.into(), &surface, x, y, None);
}
}
}
}
self.current_focus = Some(surface);
} else {
// make a move
if self.data_source.is_some() || self.origin.as_ref().same_client_as(surface.as_ref()) {
for device in &seat_data.known_devices {
if device.as_ref().same_client_as(surface.as_ref()) {
device.motion(time, x, y);
}
}
}
}
}
}
fn button(
&mut self,
handle: &mut PointerInnerHandle<'_>,
_button: u32,
_state: wl_pointer::ButtonState,
serial: Serial,
time: u32,
) {
if handle.current_pressed().is_empty() {
// the user dropped, proceed to the drop
let seat_data = self
.seat
.user_data()
.get::<RefCell<SeatData>>()
.unwrap()
.borrow_mut();
let validated = if let Some(ref data) = self.offer_data {
let data = data.borrow();
data.accepted && (!data.chosen_action.is_empty())
} else {
false
};
if let Some(ref surface) = self.current_focus {
if self.data_source.is_some() || self.origin.as_ref().same_client_as(surface.as_ref()) {
for device in &seat_data.known_devices {
if device.as_ref().same_client_as(surface.as_ref()) {
if validated {
device.drop();
} else {
device.leave();
}
}
}
}
}
if let Some(ref offer_data) = self.offer_data {
let mut data = offer_data.borrow_mut();
if validated {
data.dropped = true;
} else {
data.active = false;
}
}
if let Some(ref source) = self.data_source {
source.dnd_drop_performed();
if !validated {
source.cancelled();
}
}
(&mut *self.callback.borrow_mut())(super::DataDeviceEvent::DnDDropped {
seat: self.seat.clone(),
});
self.icon = None;
// in all cases abandon the drop
// no more buttons are pressed, release the grab
handle.unset_grab(serial, time);
}
}
fn axis(&mut self, handle: &mut PointerInnerHandle<'_>, details: AxisFrame) {
// we just forward the axis events as is
handle.axis(details);
}
fn start_data(&self) -> &PointerGrabStartData {
&self.start_data
}
}
struct OfferData {
active: bool,
dropped: bool,
accepted: bool,
chosen_action: DndAction,
}
fn implement_dnd_data_offer(
offer: Main<wl_data_offer::WlDataOffer>,
source: wl_data_source::WlDataSource,
offer_data: Rc<RefCell<OfferData>>,
action_choice: Rc<RefCell<dyn FnMut(DndAction, DndAction) -> DndAction + 'static>>,
) -> wl_data_offer::WlDataOffer {
use self::wl_data_offer::Request;
offer.quick_assign(move |offer, req, _| {
let mut data = offer_data.borrow_mut();
match req {
Request::Accept { mime_type, .. } => {
if let Some(mtype) = mime_type {
if let Err(crate::utils::UnmanagedResource) = with_source_metadata(&source, |meta| {
data.accepted = meta.mime_types.contains(&mtype);
}) {
data.accepted = false;
}
} else {
data.accepted = false;
}
}
Request::Receive { mime_type, fd } => {
// check if the source and associated mime type is still valid
let valid = with_source_metadata(&source, |meta| meta.mime_types.contains(&mime_type))
.unwrap_or(false)
&& source.as_ref().is_alive()
&& data.active;
if valid {
source.send(mime_type, fd);
}
let _ = ::nix::unistd::close(fd);
}
Request::Destroy => {}
Request::Finish => {
if !data.active {
offer.as_ref().post_error(
wl_data_offer::Error::InvalidFinish as u32,
"Cannot finish a data offer that is no longer active.".into(),
);
return;
}
if !data.accepted {
offer.as_ref().post_error(
wl_data_offer::Error::InvalidFinish as u32,
"Cannot finish a data offer that has not been accepted.".into(),
);
return;
}
if !data.dropped {
offer.as_ref().post_error(
wl_data_offer::Error::InvalidFinish as u32,
"Cannot finish a data offer that has not been dropped.".into(),
);
return;
}
if data.chosen_action.is_empty() {
offer.as_ref().post_error(
wl_data_offer::Error::InvalidFinish as u32,
"Cannot finish a data offer with no valid action.".into(),
);
return;
}
source.dnd_finished();
data.active = false;
}
Request::SetActions {
dnd_actions,
preferred_action,
} => {
let preferred_action = preferred_action;
// preferred_action must only contain one bitflag at the same time
if ![DndAction::None, DndAction::Move, DndAction::Copy, DndAction::Ask]
.contains(&preferred_action)
{
offer.as_ref().post_error(
wl_data_offer::Error::InvalidAction as u32,
"Invalid preferred action.".into(),
);
return;
}
let source_actions = with_source_metadata(&source, |meta| meta.dnd_action)
.unwrap_or_else(|_| DndAction::empty());
let possible_actions = source_actions & dnd_actions;
data.chosen_action = (&mut *action_choice.borrow_mut())(possible_actions, preferred_action);
// check that the user provided callback respects that one precise action should be chosen
debug_assert!(
[DndAction::None, DndAction::Move, DndAction::Copy, DndAction::Ask]
.contains(&data.chosen_action)
);
offer.action(data.chosen_action);
source.action(data.chosen_action);
}
_ => unreachable!(),
}
});
offer.deref().clone()
}<|fim▁end|>
|
for device in &seat_data.known_devices {
if device.as_ref().same_client_as(surface.as_ref()) {
|
<|file_name|>newmember.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'newmember.ui'
#
# Created: Sat Mar 29 19:36:48 2014
# by: PyQt5 UI code generator 5.2.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_NewMember(object):
def setupUi(self, NewMember):
NewMember.setObjectName("NewMember")
NewMember.resize(370, 540)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(NewMember)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.formLayout = QtWidgets.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtWidgets.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName("formLayout")
self.efternamnLabel = QtWidgets.QLabel(NewMember)
self.efternamnLabel.setObjectName("efternamnLabel")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.efternamnLabel)
self.surName_fld = QtWidgets.QLineEdit(NewMember)
self.surName_fld.setObjectName("surName_fld")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.surName_fld)
self.allaFRnamnLabel = QtWidgets.QLabel(NewMember)
self.allaFRnamnLabel.setObjectName("allaFRnamnLabel")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.allaFRnamnLabel)
self.givenNames_fld = QtWidgets.QLineEdit(NewMember)
self.givenNames_fld.setObjectName("givenNames_fld")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.givenNames_fld)
self.tilltalsnamnLabel = QtWidgets.QLabel(NewMember)
self.tilltalsnamnLabel.setObjectName("tilltalsnamnLabel")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.tilltalsnamnLabel)
self.preferredName_fld = QtWidgets.QLineEdit(NewMember)
self.preferredName_fld.setObjectName("preferredName_fld")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.preferredName_fld)
self.kNLabel = QtWidgets.QLabel(NewMember)
self.kNLabel.setObjectName("kNLabel")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.kNLabel)
self.gender_fld = QtWidgets.QComboBox(NewMember)
self.gender_fld.setObjectName("gender_fld")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.gender_fld)
self.birthDateLabel = QtWidgets.QLabel(NewMember)
self.birthDateLabel.setObjectName("birthDateLabel")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.birthDateLabel)
self.birthDate_fld = QtWidgets.QDateEdit(NewMember)
self.birthDate_fld.setObjectName("birthDate_fld")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.birthDate_fld)
self.adressLabel = QtWidgets.QLabel(NewMember)
self.adressLabel.setObjectName("adressLabel")
self.formLayout.setWidget(5, QtWidgets.QFormLayout.LabelRole, self.adressLabel)
self.streetAddress_fld = QtWidgets.QLineEdit(NewMember)
self.streetAddress_fld.setObjectName("streetAddress_fld")
self.formLayout.setWidget(5, QtWidgets.QFormLayout.FieldRole, self.streetAddress_fld)
self.postnummerLabel = QtWidgets.QLabel(NewMember)
self.postnummerLabel.setObjectName("postnummerLabel")
self.formLayout.setWidget(6, QtWidgets.QFormLayout.LabelRole, self.postnummerLabel)
self.postalCode_fld = QtWidgets.QLineEdit(NewMember)
self.postalCode_fld.setObjectName("postalCode_fld")
self.formLayout.setWidget(6, QtWidgets.QFormLayout.FieldRole, self.postalCode_fld)
self.postanstaltLabel = QtWidgets.QLabel(NewMember)
self.postanstaltLabel.setObjectName("postanstaltLabel")
self.formLayout.setWidget(7, QtWidgets.QFormLayout.LabelRole, self.postanstaltLabel)
self.city_fld = QtWidgets.QLineEdit(NewMember)
self.city_fld.setObjectName("city_fld")
self.formLayout.setWidget(7, QtWidgets.QFormLayout.FieldRole, self.city_fld)
self.telefonLabel = QtWidgets.QLabel(NewMember)
self.telefonLabel.setObjectName("telefonLabel")
self.formLayout.setWidget(8, QtWidgets.QFormLayout.LabelRole, self.telefonLabel)
self.phone_fld = QtWidgets.QLineEdit(NewMember)
self.phone_fld.setObjectName("phone_fld")
self.formLayout.setWidget(8, QtWidgets.QFormLayout.FieldRole, self.phone_fld)
self.emailLabel = QtWidgets.QLabel(NewMember)
self.emailLabel.setObjectName("emailLabel")
self.formLayout.setWidget(9, QtWidgets.QFormLayout.LabelRole, self.emailLabel)
self.email_fld = QtWidgets.QLineEdit(NewMember)<|fim▁hole|> self.avdelningLabel = QtWidgets.QLabel(NewMember)
self.avdelningLabel.setObjectName("avdelningLabel")
self.formLayout.setWidget(10, QtWidgets.QFormLayout.LabelRole, self.avdelningLabel)
self.department_comboBox = QtWidgets.QComboBox(NewMember)
self.department_comboBox.setEditable(True)
self.department_comboBox.setObjectName("department_comboBox")
self.formLayout.setWidget(10, QtWidgets.QFormLayout.FieldRole, self.department_comboBox)
self.anvNdarnamnLabel = QtWidgets.QLabel(NewMember)
self.anvNdarnamnLabel.setObjectName("anvNdarnamnLabel")
self.formLayout.setWidget(12, QtWidgets.QFormLayout.LabelRole, self.anvNdarnamnLabel)
self.username_fld = QtWidgets.QLineEdit(NewMember)
self.username_fld.setObjectName("username_fld")
self.formLayout.setWidget(12, QtWidgets.QFormLayout.FieldRole, self.username_fld)
self.gRTillPhuxLabel = QtWidgets.QLabel(NewMember)
self.gRTillPhuxLabel.setObjectName("gRTillPhuxLabel")
self.formLayout.setWidget(13, QtWidgets.QFormLayout.LabelRole, self.gRTillPhuxLabel)
self.makePhux_CheckBox = QtWidgets.QCheckBox(NewMember)
self.makePhux_CheckBox.setObjectName("makePhux_CheckBox")
self.formLayout.setWidget(13, QtWidgets.QFormLayout.FieldRole, self.makePhux_CheckBox)
self.studentId_label = QtWidgets.QLabel(NewMember)
self.studentId_label.setObjectName("studentId_label")
self.formLayout.setWidget(11, QtWidgets.QFormLayout.LabelRole, self.studentId_label)
self.studentId_fld = QtWidgets.QLineEdit(NewMember)
self.studentId_fld.setObjectName("studentId_fld")
self.formLayout.setWidget(11, QtWidgets.QFormLayout.FieldRole, self.studentId_fld)
self.verticalLayout.addLayout(self.formLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(NewMember)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.retranslateUi(NewMember)
self.buttonBox.accepted.connect(NewMember.accept)
self.buttonBox.rejected.connect(NewMember.reject)
QtCore.QMetaObject.connectSlotsByName(NewMember)
NewMember.setTabOrder(self.surName_fld, self.givenNames_fld)
NewMember.setTabOrder(self.givenNames_fld, self.preferredName_fld)
NewMember.setTabOrder(self.preferredName_fld, self.gender_fld)
NewMember.setTabOrder(self.gender_fld, self.birthDate_fld)
NewMember.setTabOrder(self.birthDate_fld, self.streetAddress_fld)
NewMember.setTabOrder(self.streetAddress_fld, self.postalCode_fld)
NewMember.setTabOrder(self.postalCode_fld, self.city_fld)
NewMember.setTabOrder(self.city_fld, self.phone_fld)
NewMember.setTabOrder(self.phone_fld, self.email_fld)
NewMember.setTabOrder(self.email_fld, self.department_comboBox)
NewMember.setTabOrder(self.department_comboBox, self.studentId_fld)
NewMember.setTabOrder(self.studentId_fld, self.username_fld)
NewMember.setTabOrder(self.username_fld, self.makePhux_CheckBox)
NewMember.setTabOrder(self.makePhux_CheckBox, self.buttonBox)
def retranslateUi(self, NewMember):
_translate = QtCore.QCoreApplication.translate
NewMember.setWindowTitle(_translate("NewMember", "Dialog"))
self.efternamnLabel.setText(_translate("NewMember", "Efternamn"))
self.allaFRnamnLabel.setText(_translate("NewMember", "Alla förnamn"))
self.tilltalsnamnLabel.setText(_translate("NewMember", "Tilltalsnamn"))
self.kNLabel.setText(_translate("NewMember", "Kön"))
self.birthDateLabel.setText(_translate("NewMember", "Födelsedatum"))
self.adressLabel.setText(_translate("NewMember", "Gatuadress"))
self.postnummerLabel.setText(_translate("NewMember", "Postnummer"))
self.postanstaltLabel.setText(_translate("NewMember", "Postanstalt"))
self.telefonLabel.setText(_translate("NewMember", "Telefon"))
self.emailLabel.setText(_translate("NewMember", "Email"))
self.avdelningLabel.setText(_translate("NewMember", "Avdelning"))
self.anvNdarnamnLabel.setText(_translate("NewMember", "Användarnamn"))
self.gRTillPhuxLabel.setText(_translate("NewMember", "Gör till Phux"))
self.studentId_label.setText(_translate("NewMember", "Studienummer"))<|fim▁end|>
|
self.email_fld.setObjectName("email_fld")
self.formLayout.setWidget(9, QtWidgets.QFormLayout.FieldRole, self.email_fld)
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Logger for parity executables
extern crate ethcore_util as util;
#[macro_use]
extern crate log as rlog;
extern crate isatty;
extern crate regex;
extern crate env_logger;
extern crate time;
#[macro_use]
extern crate lazy_static;
use std::{env, thread};
use std::sync::Arc;
use std::fs::File;
use std::io::Write;
use isatty::{stderr_isatty, stdout_isatty};<|fim▁hole|>
#[derive(Debug, PartialEq)]
pub struct Config {
pub mode: Option<String>,
pub color: bool,
pub file: Option<String>,
}
impl Default for Config {
fn default() -> Self {
Config {
mode: None,
color: !cfg!(windows),
file: None,
}
}
}
/// Sets up the logger
pub fn setup_log(config: &Config) -> Result<Arc<RotatingLogger>, String> {
use rlog::*;
let mut levels = String::new();
let mut builder = LogBuilder::new();
// Disable ws info logging by default.
builder.filter(Some("ws"), LogLevelFilter::Warn);
// Disable rustls info logging by default.
builder.filter(Some("rustls"), LogLevelFilter::Warn);
builder.filter(None, LogLevelFilter::Info);
if let Ok(lvl) = env::var("RUST_LOG") {
levels.push_str(&lvl);
levels.push_str(",");
builder.parse(&lvl);
}
if let Some(ref s) = config.mode {
levels.push_str(s);
builder.parse(s);
}
let isatty = stderr_isatty();
let enable_color = config.color && isatty;
let logs = Arc::new(RotatingLogger::new(levels));
let logger = logs.clone();
let maybe_file = match config.file.as_ref() {
Some(f) => Some(try!(File::create(f).map_err(|_| format!("Cannot write to log file given: {}", f)))),
None => None,
};
let format = move |record: &LogRecord| {
let timestamp = time::strftime("%Y-%m-%d %H:%M:%S %Z", &time::now()).unwrap();
let with_color = if max_log_level() <= LogLevelFilter::Info {
format!("{} {}", Colour::Black.bold().paint(timestamp), record.args())
} else {
let name = thread::current().name().map_or_else(Default::default, |x| format!("{}", Colour::Blue.bold().paint(x)));
format!("{} {} {} {} {}", Colour::Black.bold().paint(timestamp), name, record.level(), record.target(), record.args())
};
let removed_color = kill_color(with_color.as_ref());
let ret = match enable_color {
true => with_color,
false => removed_color.clone(),
};
if let Some(mut file) = maybe_file.as_ref() {
// ignore errors - there's nothing we can do
let _ = file.write_all(removed_color.as_bytes());
let _ = file.write_all(b"\n");
}
logger.append(removed_color);
if !isatty && record.level() <= LogLevel::Info && stdout_isatty() {
// duplicate INFO/WARN output to console
println!("{}", ret);
}
ret
};
builder.format(format);
builder.init().expect("Logger initialized only once.");
Ok(logs)
}
fn kill_color(s: &str) -> String {
lazy_static! {
static ref RE: Regex = Regex::new("\x1b\\[[^m]+m").unwrap();
}
RE.replace_all(s, "")
}
#[test]
fn should_remove_colour() {
let before = "test";
let after = kill_color(&Colour::Red.bold().paint(before));
assert_eq!(after, "test");
}
#[test]
fn should_remove_multiple_colour() {
let t = format!("{} {}", Colour::Red.bold().paint("test"), Colour::White.normal().paint("again"));
let after = kill_color(&t);
assert_eq!(after, "test again");
}<|fim▁end|>
|
use env_logger::LogBuilder;
use regex::Regex;
use util::RotatingLogger;
use util::log::Colour;
|
<|file_name|>fixture.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division, absolute_import
import difflib
import locale
import os
import pprint
import six
import sys
import tempfile
try:
import unittest2 as unittest
except ImportError:
import unittest
# just log py.warnings (and pygtk warnings in particular)
import logging
try:
# 2.7+
logging.captureWarnings(True)
except AttributeError:
pass
from mock import Mock, MagicMock, NonCallableMock, patch, mock_open
from contextlib import contextmanager
from . import stubs
import subscription_manager.injection as inj
import subscription_manager.managercli
from rhsmlib.services import config
# use instead of the normal pid file based ActionLock
from threading import RLock
if six.PY2:
OPEN_FUNCTION = '__builtin__.open'
else:
OPEN_FUNCTION = 'builtins.open'
@contextmanager
def open_mock(content=None, **kwargs):
content_out = six.StringIO()
m = mock_open(read_data=content)
with patch(OPEN_FUNCTION, m, create=True, **kwargs) as mo:
stream = six.StringIO(content)
rv = mo.return_value
rv.write = lambda x: content_out.write(x)
rv.content_out = lambda: content_out.getvalue()
rv.__iter__ = lambda x: iter(stream.readlines())
yield rv
@contextmanager
def open_mock_many(file_content_map=None, **kwargs):
"""
Mock out access to one or many files opened using the builtin "open".
:param file_content_map: A dictionary of path : file_contents
:type file_content_map: dict[str,str]
:param kwargs:
:return:
"""
file_content_map = file_content_map or {}
for key, value in file_content_map.items():
file_content_map[key] = (mock_open(read_data=value), value, six.StringIO())
def get_file(path, *args, **kwargs):
"""
The side effect that will allow us to "open" the right "file".
Not for use outside open_mock_many.
:param path: The path which is passed in to the built
:param args:
:param kwargs:
:return:
"""
try:
rv, file_contents, content_out = file_content_map[path]
except KeyError:
if six.PY2:
raise IOError(2, 'No such file or directory')
else:
raise OSError(2, 'No such file or directory')
rv = rv.return_value
rv.write = lambda x: content_out.write(x)
rv.content_out = lambda: content_out.getvalue()
return rv
with patch(OPEN_FUNCTION, **kwargs) as mo:
mo.side_effect = get_file
yield mo
@contextmanager
def temp_file(content, *args, **kwargs):
try:
kwargs['delete'] = False
kwargs.setdefault('prefix', 'sub-man-test')
fh = tempfile.NamedTemporaryFile(mode='w+', *args, **kwargs)
fh.write(content)
fh.close()
yield fh.name
finally:
os.unlink(fh.name)
@contextmanager
def locale_context(new_locale, category=None):
old_category = category or locale.LC_CTYPE
old_locale = locale.getlocale(old_category)
category = category or locale.LC_ALL
locale.setlocale(category, new_locale)
try:
yield
finally:
locale.setlocale(category, old_locale)
class FakeLogger(object):
def __init__(self):
self.expected_msg = ""
self.msg = None
self.logged_exception = None
def debug(self, buf, *args, **kwargs):
self.msg = buf
def error(self, buf, *args, **kwargs):
self.msg = buf
def exception(self, e, *args, **kwargs):
self.logged_exception = e
def set_expected_msg(self, msg):
self.expected_msg = msg
def info(self, buf, *args, **kwargs):
self.msg = buf
def warning(self, buf, *args, **kwargs):
self.msg = buf
class FakeException(Exception):
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
return repr(self.msg)
class Matcher(object):
@staticmethod
def set_eq(first, second):
"""Useful for dealing with sets that have been cast to or instantiated as lists."""
return set(first) == set(second)
def __init__(self, compare, some_obj):
self.compare = compare
self.some_obj = some_obj
def __eq__(self, other):
return self.compare(self.some_obj, other)
class SubManFixture(unittest.TestCase):
def set_facts(self):
"""Override if you need to set facts for a test."""
return {"mock.facts": "true"}
"""
Can be extended by any subscription manager test case to make
sure nothing on the actual system is read/touched, and appropriate
mocks/stubs are in place.
"""
def setUp(self):
# No matter what, stop all patching (even if we have a failure in setUp itself)
self.addCleanup(patch.stopall)
# Never attempt to use the actual managercli.cfg which points to a
# real file in etc.
self.mock_cfg_parser = stubs.StubConfig()
original_conf = subscription_manager.managercli.conf
def unstub_conf():
subscription_manager.managercli.conf = original_conf
# Mock makes it damn near impossible to mock a module attribute (which we shouldn't be using
# in the first place because it's terrible) so we monkey-patch it ourselves.
# TODO Fix this idiocy by not reading the damn config on module import
subscription_manager.managercli.conf = config.Config(self.mock_cfg_parser)
self.addCleanup(unstub_conf)
facts_host_patcher = patch('rhsmlib.dbus.facts.FactsClient', auto_spec=True)
self.mock_facts_host = facts_host_patcher.start()
self.mock_facts_host.return_value.GetFacts.return_value = self.set_facts()
# By default mock that we are registered. Individual test cases
# can override if they are testing disconnected scenario.
id_mock = NonCallableMock(name='FixtureIdentityMock')
id_mock.exists_and_valid = Mock(return_value=True)
id_mock.uuid = 'fixture_identity_mock_uuid'
id_mock.name = 'fixture_identity_mock_name'
id_mock.cert_dir_path = "/not/a/real/path/to/pki/consumer/"
id_mock.keypath.return_value = "/not/a/real/key/path"
id_mock.certpath.return_value = "/not/a/real/cert/path"<|fim▁hole|>
# Avoid trying to read real /etc/yum.repos.d/redhat.repo
self.mock_repofile_path_exists_patcher = patch('subscription_manager.repolib.YumRepoFile.path_exists')
mock_repofile_path_exists = self.mock_repofile_path_exists_patcher.start()
mock_repofile_path_exists.return_value = True
inj.provide(inj.IDENTITY, id_mock)
inj.provide(inj.PRODUCT_DATE_RANGE_CALCULATOR, self.mock_calc)
inj.provide(inj.ENTITLEMENT_STATUS_CACHE, stubs.StubEntitlementStatusCache())
inj.provide(inj.POOL_STATUS_CACHE, stubs.StubPoolStatusCache())
inj.provide(inj.PROD_STATUS_CACHE, stubs.StubProductStatusCache())
inj.provide(inj.CONTENT_ACCESS_MODE_CACHE, stubs.StubContentAccessModeCache())
inj.provide(inj.SUPPORTED_RESOURCES_CACHE, stubs.StubSupportedResourcesCache())
inj.provide(inj.SYSPURPOSE_VALID_FIELDS_CACHE, stubs.StubSyspurposeValidFieldsCache())
inj.provide(inj.CURRENT_OWNER_CACHE, stubs.StubCurrentOwnerCache)
inj.provide(inj.OVERRIDE_STATUS_CACHE, stubs.StubOverrideStatusCache())
inj.provide(inj.RELEASE_STATUS_CACHE, stubs.StubReleaseStatusCache())
inj.provide(inj.AVAILABLE_ENTITLEMENT_CACHE, stubs.StubAvailableEntitlementsCache())
inj.provide(inj.PROFILE_MANAGER, stubs.StubProfileManager())
# By default set up an empty stub entitlement and product dir.
# Tests need to modify or create their own but nothing should hit
# the system.
self.ent_dir = stubs.StubEntitlementDirectory()
inj.provide(inj.ENT_DIR, self.ent_dir)
self.prod_dir = stubs.StubProductDirectory()
inj.provide(inj.PROD_DIR, self.prod_dir)
# Installed products manager needs PROD_DIR injected first
inj.provide(inj.INSTALLED_PRODUCTS_MANAGER, stubs.StubInstalledProductsManager())
self.stub_cp_provider = stubs.StubCPProvider()
self._release_versions = []
self.stub_cp_provider.content_connection.get_versions = self._get_release_versions
inj.provide(inj.CP_PROVIDER, self.stub_cp_provider)
inj.provide(inj.CERT_SORTER, stubs.StubCertSorter())
# setup and mock the plugin_manager
plugin_manager_mock = MagicMock(name='FixturePluginManagerMock')
plugin_manager_mock.runiter.return_value = iter([])
inj.provide(inj.PLUGIN_MANAGER, plugin_manager_mock)
inj.provide(inj.DBUS_IFACE, Mock(name='FixtureDbusIfaceMock'))
pooltype_cache = Mock()
inj.provide(inj.POOLTYPE_CACHE, pooltype_cache)
# don't use file based locks for tests
inj.provide(inj.ACTION_LOCK, RLock)
self.stub_facts = stubs.StubFacts()
inj.provide(inj.FACTS, self.stub_facts)
content_access_cache_mock = MagicMock(name='ContentAccessCacheMock')
inj.provide(inj.CONTENT_ACCESS_CACHE, content_access_cache_mock)
self.dbus_patcher = patch('subscription_manager.managercli.CliCommand._request_validity_check')
self.dbus_patcher.start()
# No tests should be trying to connect to any configure or test server
# so really, everything needs this mock. May need to be in __init__, or
# better, all test classes need to use SubManFixture
self.is_valid_server_patcher = patch("subscription_manager.managercli.is_valid_server_info")
is_valid_server_mock = self.is_valid_server_patcher.start()
is_valid_server_mock.return_value = True
# No tests should be trying to test the proxy connection
# so really, everything needs this mock. May need to be in __init__, or
# better, all test classes need to use SubManFixture
self.test_proxy_connection_patcher = patch("subscription_manager.managercli.CliCommand.test_proxy_connection")
test_proxy_connection_mock = self.test_proxy_connection_patcher.start()
test_proxy_connection_mock.return_value = True
self.syncedstore_patcher = patch('subscription_manager.syspurposelib.SyncedStore')
syncedstore_mock = self.syncedstore_patcher.start()
set_up_mock_sp_store(syncedstore_mock)
self.files_to_cleanup = []
def tearDown(self):
if not hasattr(self, 'files_to_cleanup'):
return
for f in self.files_to_cleanup:
# Assuming these are tempfile.NamedTemporaryFile, created with
# the write_tempfile() method in this class.
f.close()
def write_tempfile(self, contents):
"""
Write out a tempfile and append it to the list of those to be
cleaned up in tearDown.
"""
fid = tempfile.NamedTemporaryFile(mode='w+', suffix='.tmp')
fid.write(contents)
fid.seek(0)
self.files_to_cleanup.append(fid)
return fid
def set_consumer_auth_cp(self, consumer_auth_cp):
cp_provider = inj.require(inj.CP_PROVIDER)
cp_provider.consumer_auth_cp = consumer_auth_cp
def get_consumer_cp(self):
cp_provider = inj.require(inj.CP_PROVIDER)
consumer_cp = cp_provider.get_consumer_auth_cp()
return consumer_cp
# The ContentConnection used for reading release versions from
# the cdn. The injected one uses this.
def _get_release_versions(self, listing_path):
return self._release_versions
# For changing injection consumer id to one that fails "is_valid"
def _inject_mock_valid_consumer(self, uuid=None):
"""For changing injected consumer identity to one that passes is_valid()
Returns the injected identity if it need to be examined.
"""
identity = NonCallableMock(name='ValidIdentityMock')
identity.uuid = uuid or "VALIDCONSUMERUUID"
identity.is_valid = Mock(return_value=True)
identity.cert_dir_path = "/not/a/real/path/to/pki/consumer/"
inj.provide(inj.IDENTITY, identity)
return identity
def _inject_mock_invalid_consumer(self, uuid=None):
"""For chaining injected consumer identity to one that fails is_valid()
Returns the injected identity if it need to be examined.
"""
invalid_identity = NonCallableMock(name='InvalidIdentityMock')
invalid_identity.is_valid = Mock(return_value=False)
invalid_identity.uuid = uuid or "INVALIDCONSUMERUUID"
invalid_identity.cert_dir_path = "/not/a/real/path/to/pki/consumer/"
inj.provide(inj.IDENTITY, invalid_identity)
return invalid_identity
# use our naming convention here to make it clear
# this is our extension. Note that python 2.7 adds a
# assertMultilineEquals that assertEqual of strings does
# automatically
def assert_string_equals(self, expected_str, actual_str, msg=None):
if expected_str != actual_str:
expected_lines = expected_str.splitlines(True)
actual_lines = actual_str.splitlines(True)
delta = difflib.unified_diff(expected_lines, actual_lines, "expected", "actual")
message = ''.join(delta)
if msg:
message += " : " + msg
self.fail("Multi-line strings are unequal:\n" + message)
def assert_equal_dict(self, expected_dict, actual_dict):
mismatches = []
missing_keys = []
extra = []
for key in expected_dict:
if key not in actual_dict:
missing_keys.append(key)
continue
if expected_dict[key] != actual_dict[key]:
mismatches.append((key, expected_dict[key], actual_dict[key]))
for key in actual_dict:
if key not in expected_dict:
extra.append(key)
message = ""
if missing_keys or extra:
message += "Keys in only one dict: \n"
if missing_keys:
for key in missing_keys:
message += "actual_dict: %s\n" % key
if extra:
for key in extra:
message += "expected_dict: %s\n" % key
if mismatches:
message += "Unequal values: \n"
for info in mismatches:
message += "%s: %s != %s\n" % info
# pprint the dicts
message += "\n"
message += "expected_dict:\n"
message += pprint.pformat(expected_dict)
message += "\n"
message += "actual_dict:\n"
message += pprint.pformat(actual_dict)
if mismatches or missing_keys or extra:
self.fail(message)
def assert_items_equals(self, a, b):
"""Assert that two lists contain the same items regardless of order."""
if sorted(a, key=lambda item: str(item)) != sorted(b, key=lambda item: str(item)):
self.fail("%s != %s" % (a, b))
return True
class Capture(object):
class Tee(object):
def __init__(self, stream, silent):
self.buf = six.StringIO()
self.stream = stream
self.silent = silent
def write(self, data):
self.buf.write(data)
if not self.silent:
self.stream.write(data)
def flush(self):
pass
def getvalue(self):
return self.buf.getvalue()
def isatty(self):
return False
def __init__(self, silent=False):
self.silent = silent
def __enter__(self):
self.buffs = (self.Tee(sys.stdout, self.silent), self.Tee(sys.stderr, self.silent))
self.stdout = sys.stdout
self.stderr = sys.stderr
sys.stdout, sys.stderr = self.buffs
return self
@property
def out(self):
return self.buffs[0].getvalue()
@property
def err(self):
return self.buffs[1].getvalue()
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.stdout
sys.stderr = self.stderr
def set_up_mock_sp_store(mock_sp_store):
"""
Sets up the mock syspurpose store with methods that are mock versions of the real deal.
Allows us to test in the absence of the syspurpose module.
This documents the essential expected behaviour of the methods subman relies upon
from the syspurpose codebase.
:return:
"""
contents = {}
mock_sp_store_contents = contents
def set(item, value):
contents[item] = value
def read(path, raise_on_error=False):
return mock_sp_store
def unset(item):
contents[item] = None
def add(item, value):
current = contents.get(item, [])
if value not in current:
current.append(value)
contents[item] = current
def remove(item, value):
current = contents.get(item)
if current is not None and isinstance(current, list) and value in current:
current.remove(value)
def get_local_contents():
return contents
def get_cached_contents():
return contents
def update_local(data):
global contents
contents = data
mock_sp_store.return_value.set = Mock(side_effect=set)
mock_sp_store.return_value.read = Mock(side_effect=read)
mock_sp_store.return_value.unset = Mock(side_effect=unset)
mock_sp_store.return_value.add = Mock(side_effect=add)
mock_sp_store.return_value.remove = Mock(side_effect=remove)
mock_sp_store.return_value.local_contents = mock_sp_store_contents
mock_sp_store.return_value.get_local_contents = Mock(side_effect=get_local_contents)
mock_sp_store.return_value.update_local = Mock(side_effect=update_local)
mock_sp_store.return_value.get_cached_contents = Mock(side_effect=get_cached_contents)
return mock_sp_store, mock_sp_store_contents<|fim▁end|>
|
# Don't really care about date ranges here:
self.mock_calc = NonCallableMock()
self.mock_calc.calculate.return_value = None
|
<|file_name|>test_rhythmtreetools_RhythmTreeNode_duration.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from abjad.tools.durationtools import Duration
from abjad.tools.rhythmtreetools import RhythmTreeContainer, RhythmTreeLeaf
def test_rhythmtreetools_RhythmTreeNode_duration_01():
tree = RhythmTreeContainer(preprolated_duration=1, children=[
RhythmTreeLeaf(preprolated_duration=1),
RhythmTreeContainer(preprolated_duration=2, children=[
RhythmTreeLeaf(preprolated_duration=3),
RhythmTreeLeaf(preprolated_duration=2)
]),
RhythmTreeLeaf(preprolated_duration=2)
])
assert tree.duration == Duration(1)
assert tree[0].duration == Duration(1, 5)
assert tree[1].duration == Duration(2, 5)
assert tree[1][0].duration == Duration(6, 25)
assert tree[1][1].duration == Duration(4, 25)<|fim▁hole|> tree[1].append(tree.pop())
assert tree.duration == Duration(1)
assert tree[0].duration == Duration(1, 3)
assert tree[1].duration == Duration(2, 3)
assert tree[1][0].duration == Duration(2, 7)
assert tree[1][1].duration == Duration(4, 21)
assert tree[1][2].duration == Duration(4, 21)
tree.preprolated_duration = 19
assert tree.duration == Duration(19)
assert tree[0].duration == Duration(19, 3)
assert tree[1].duration == Duration(38, 3)
assert tree[1][0].duration == Duration(38, 7)
assert tree[1][1].duration == Duration(76, 21)
assert tree[1][2].duration == Duration(76, 21)<|fim▁end|>
|
assert tree[2].duration == Duration(2, 5)
|
<|file_name|>goto.py<|end_file_name|><|fim▁begin|># goto_assignments command tests are different in syntax
definition = 3
#! 0 ['a = definition']
a = definition
#! []
b
#! ['a = definition']
a
b = a
c = b
#! ['c = b']
c
cd = 1
#! 1 ['cd = c']
cd = c
#! 0 ['cd = e']
cd = e
#! ['module math']
import math
#! ['import math']
math
#! ['import math']
b = math
#! ['b = math']
b
class C(object):
def b(self):
#! ['b = math']
b
#! ['def b']
self.b
#! 14 ['def b']
self.b()
#! 11 ['self']
self.b
return 1
#! ['def b']
b
#! ['b = math']
b
#! ['def b']
C.b
#! ['def b']
C().b
#! 0 ['class C']
C().b
#! 0 ['class C']
C().b
D = C
#! ['def b']
D.b
#! ['def b']
D().b
#! 0 ['D = C']
D().b
#! 0 ['D = C']
D().b
def c():
return ''
#! ['def c']
c
#! 0 ['def c']
c()
class ClassVar():
x = 3
#! ['x = 3']
ClassVar.x
#! ['x = 3']
ClassVar().x
# before assignments
#! 10 ['x = 3']
ClassVar.x = ''
#! 12 ['x = 3']
ClassVar().x = ''
# Recurring use of the same var name, github #315
def f(t=None):
#! 9 ['t = None']
t = t or 1
# -----------------
# imports
# -----------------
#! ['module import_tree']
import import_tree
#! ["a = ''"]
import_tree.a
#! ['module mod1']
import import_tree.mod1
#! ['a = 1']
import_tree.mod1.a
#! ['module pkg']
import import_tree.pkg
#! ['a = list']
import_tree.pkg.a
#! ['module mod1']
import import_tree.pkg.mod1
#! ['a = 1.0']
import_tree.pkg.mod1.a
#! ["a = ''"]
import_tree.a
#! ['module mod1']
from import_tree.pkg import mod1
#! ['a = 1.0']
mod1.a
#! ['module mod1']
from import_tree import mod1
#! ['a = 1']
mod1.a
#! ['a = 1.0']
from import_tree.pkg.mod1 import a
#! ['import os']<|fim▁hole|>from .imports import os
#! ['some_variable = 1']
from . import some_variable
# -----------------
# anonymous classes
# -----------------
def func():
class A():
def b(self):
return 1
return A()
#! 8 ['def b']
func().b()
# -----------------
# on itself
# -----------------
#! 7 ['class ClassDef']
class ClassDef():
""" abc """
pass
# -----------------
# params
# -----------------
param = ClassDef
#! 8 ['param']
def ab1(param): pass
#! 9 ['param']
def ab2(param): pass
#! 11 ['param = ClassDef']
def ab3(a=param): pass
ab1(ClassDef);ab2(ClassDef);ab3(ClassDef)
# -----------------
# for loops
# -----------------
for i in range(1):
#! ['for i in range(1): i']
i
for key, value in [(1,2)]:
#! ['for key,value in [(1, 2)]: key']
key
for i in []:
#! ['for i in []: i']
i<|fim▁end|>
| |
<|file_name|>config.js<|end_file_name|><|fim▁begin|><|fim▁hole|>module.exports = {
flyers: {
testRail: {
projectId: 6,
},
confluence: {
space: '~adam.petrie'
}
}
}<|fim▁end|>
| |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include "barista_core/barista_core.h"
#include "decafs_types/ip_address.h"
#include "network_fake.h"
#define MIN_ARGS 6
#define STRIPE_SIZE 1
#define CHUNK_SIZE 2
#define METADATA 3
#define NUM_ESPRESSOS 4
#define PORT 5
int main(int argc, char** argv) {
// barista_core_init()
// set_node_up x 4 or whatever you want
<|fim▁hole|> char* args[] = {(char*)"", (char*)"1024", (char*)"512", (char*)"barista_metadata", (char*)"4", (char*)"1024"};
barista_core_init(6, args);
set_node_up(1);
set_node_up(2);
set_node_up(3);
set_node_up(4);
struct ip_address ipaddr1 = ip_address((char*)"192.168.1.1");
ConnectionToClient* ctc = NULL;
struct client client1 = client(ipaddr1, 1, ctc);
//open_dir("hello.txt", client1);
open_file("hello.txt", O_RDWR, client1);
std::string teststr("testwrite");
write_file(1, teststr.c_str(), teststr.size(), client1);
close_file(1, client1);
open_file("hello.txt", O_RDWR, client1);
read_file(2, teststr.size(), client1);
}<|fim▁end|>
| |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import json
from util import d
import os
__home = os.path.expanduser("~").replace('\\', '/') + "/PixelWeb/"
BASE_SERVER_CONFIG = d({
"id":"server_config",
"display": "server_config",
"preconfig": False,
"presets":[],
"params": [{
"id": "external_access",
"label": "Allow External Access",
"type": "bool",
"default": True,
"help":"On: Other computers on your network can access PixelWeb. Off: LocalHost access only."
},{
"id": "port",
"label": "Server Port",
"type": "int",
"default": 8080,
"help":"Port to listen on."
},{
"id": "load_defaults",
"label": "Load Last Config on Start",
"type": "bool",
"default": False,
"help":"Load last driver/controller configuration on application start."
},
{
"id": "show_debug",
"label": "Show Debug in Console",
"type": "bool",
"default": False,
"help":"Show BiblioPixel debug in server console (not in main UI)."
},{
"id": "mod_dirs",
"label": "Module Directories",
"type": "str_multi",
"default": [],
"help":"Directories from which to load modules (animations, drivers, controllers, pre-configs).",
"replace": {"\\":"/"}
},
{
"id": "off_anim_time",
"label": "All Off Timeout",
"type": "int",
"default": 10,
"min": 0,
"max": 3600,
"help":"Keep display off when not running an animation by actively turning all pixels off every X seconds. Set to 0 to disable."
},]
});
def setHome(home):
global __home
__home = home
def genDefaultConfig(params):
c = {}
for p in params:
p = d(p)
c[p.id] = p.default
return c
def initConfig():
try:
if not os.path.exists(__home):
print "Creating {}".format(__home)
os.makedirs(__home)
except:
print "Failed to initialize PixelWeb config!"
def readConfig(file, key = None, path=None):
if not path:
path = __home
data = {}
try:
with open(path + "/" + file + ".json", "r") as fp:
data = json.load(fp, encoding='utf-8')
if key:
if key in data:
data = data[key]
else:
data = {}
except Exception, e:
pass
return d(data)
def writeConfig(file, data, key = None, path=None):
if not path:
path = __home
base = data
if key:
base = readConfig(file, path=path)
base[key] = data
with open(path + "/" + file + ".json", "w") as fp:
json.dump(base, fp, indent=4, sort_keys=True)
def paramsToDict(params):
data = {}<|fim▁hole|> return data
def readServerConfig():
data = readConfig("config", path=__home)
base = paramsToDict(BASE_SERVER_CONFIG.params)
if len(data.keys()) == 0:
data = paramsToDict(BASE_SERVER_CONFIG.params)
elif len(data.keys()) != len(base.keys()):
data.upgrade(base)
return d(data)
def writeServerConfig(data):
writeConfig("config", data)
def upgradeServerConfig():
b = genDefaultConfig(BASE_SERVER_CONFIG.params)
cfg = readServerConfig()
cfg.upgrade(b)
writeServerConfig(cfg)<|fim▁end|>
|
for p in params:
if "default" not in p:
p.default = None
data[p.id] = p.default
|
<|file_name|>chips.spec.ts<|end_file_name|><|fim▁begin|>import {Component, DebugElement, ViewChild} from '@angular/core';
import {ComponentFixture, TestBed, fakeAsync, flush} from '@angular/core/testing';
import {By} from '@angular/platform-browser';
import {FormsModule} from '@angular/forms';
import {ENTER} from '@angular/cdk/keycodes';
import {dispatchFakeEvent, dispatchMouseEvent} from '@angular-mdc/web/testing';
import {
MdcChipsModule,
MdcChip,
MdcChipSet,
MdcChipInteractionEvent,
MdcChipSelectionEvent,
MdcChipSetChange
} from './index';
import {MdcIcon} from '@angular-mdc/web/icon';
describe('Chips', () => {
let fixture: ComponentFixture<any>;
beforeEach(fakeAsync(() => {
TestBed.configureTestingModule({
imports: [MdcChipsModule, FormsModule],
declarations: [
ChipTest,
ChipValue,
TestChip,
ChipModel,
ChipInput,
ChipChoice,
ChipFilter
]
});
TestBed.compileComponents();
}));
describe('MdcChip', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChip;
let testComponent: ChipTest;
beforeEach(() => {
fixture = TestBed.createComponent(ChipTest);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChip));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should have mdc-chip by default', () => {
expect(testDebugElement.nativeElement.classList).toContain('mdc-chip');
});
it('handles transitionend event', fakeAsync(() => {
dispatchFakeEvent(testInstance.elementRef.nativeElement, 'transitionend');
}));
it('#should have leading icon', () => {
expect(testInstance.leadingIcon).toBeDefined();
});
it('#should have trailing icon', () => {
expect(testInstance._icons.last.trailing).toBe(true);
});
// it('should emit icon click event', () => {
// spyOn(testComponent, 'iconInteraction');
// testComponent.trailingIcon.elementRef.nativeElement.click();
// fixture.detectChanges();
// expect(testComponent.iconInteraction).toHaveBeenCalledTimes(1);
// });
it('expect disableRipple to be false', () => {
expect(testInstance.disableRipple).toBe(false);
});
it('handles click event', () => {
testNativeElement.click();
fixture.detectChanges();
});
it('is removable', () => {
testComponent.removable = false;
fixture.detectChanges();
});
});
describe('MdcChipSet', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipTest;
beforeEach(() => {
fixture = TestBed.createComponent(ChipTest);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should have mdc-chip-set by default', () => {
expect(testDebugElement.nativeElement.classList).toContain('mdc-chip-set');
});
it('#should apply filter class modifier', () => {
expect(testDebugElement.nativeElement.classList.contains('mdc-chip-set--filter')).toBe(true);
expect(testInstance.filter).toBe(true);
});
it('#should have selected chip ids defined', fakeAsync(() => {
testInstance.choice = true;
fixture.detectChanges();
flush();
testInstance.select('newsChip');
fixture.detectChanges();
flush();
expect(testInstance.getSelectedChipIds()).toBeDefined();
}));
it('#should select a chip, and select another chip', fakeAsync(() => {
testInstance.select('newsChip');
fixture.detectChanges();
flush();
testInstance.select('removableChip');
fixture.detectChanges();
flush();
}));
it('#should select chip', fakeAsync(() => {
testInstance.select('newsChip');
fixture.detectChanges();
flush();
}));
});
describe('MdcChip', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChip;
let testComponent: TestChip;
beforeEach(() => {
fixture = TestBed.createComponent(TestChip);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChip));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should set primary focus on chip', () => {
testInstance.focusPrimaryAction();
fixture.detectChanges();
});
it('#should set trailing icon focus on chip', () => {
testInstance.focusTrailingAction();
fixture.detectChanges();
});
it('#should default be removable', () => {
expect(testInstance.removable).toBe(true);
});
it('#should not be input', () => {
expect(testInstance.input).toBe(false);
});
it('#should run beginExit', () => {
testInstance.remove();
fixture.detectChanges();
});
it('#should not run beginExit', () => {
testInstance.removable = false;
fixture.detectChanges();
testInstance.remove();
fixture.detectChanges();
});
it('#should focus on chip when focus() is called', () => {
testInstance.focus();
fixture.detectChanges();
});
});
describe('MdcChip value', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipValue;
beforeEach(() => {
fixture = TestBed.createComponent(ChipValue);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should set value on chip', () => {
testComponent.value = 'directions-1';
fixture.detectChanges();
expect(testInstance.value).toBe('directions-1');
expect(testInstance.chips.first.value).toBe('directions-1');
});
<|fim▁hole|> expect(testInstance.value).toBe('weather-1');
});
it('#should set value on chip with selectByValue', () => {
testInstance.selectByValue('weather-1');
fixture.detectChanges();
expect(testInstance.value).toBe('weather-1');
});
it('#should set value on chip with selectByValue', () => {
testInstance.selectByValue(['weather-1']);
fixture.detectChanges();
expect(testInstance.value).toEqual(['weather-1']);
});
it('should emit changed event', () => {
spyOn(testComponent, 'onChipSetChange');
fixture.detectChanges();
testInstance.chips.first.elementRef.nativeElement.click();
fixture.detectChanges();
expect(testComponent.onChipSetChange).toHaveBeenCalledTimes(1);
});
});
describe('MdcChip Model', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipModel;
beforeEach(() => {
fixture = TestBed.createComponent(ChipModel);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('should select first chip', () => {
testInstance.chips.first.elementRef.nativeElement.click();
fixture.detectChanges();
});
it('should set ngModel value', () => {
testComponent.selectedFood = 'steak-1';
fixture.detectChanges();
});
});
describe('MdcChip Input', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipInput;
beforeEach(() => {
fixture = TestBed.createComponent(ChipInput);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should have mdc-chip by default', () => {
expect(testDebugElement.nativeElement.classList).toContain('mdc-chip-set');
});
it('#should apply input class modifier', () => {
expect(testDebugElement.nativeElement.classList.contains('mdc-chip-set--input')).toBe(true);
expect(testInstance.input).toBe(true);
});
it('#should have textContent as value', () => {
expect(testInstance.chips.first.value).toBeDefined();
});
it('should emit trailingIconInteraction event', () => {
spyOn(testComponent, 'onTrailingIconInteraction');
fixture.detectChanges();
testInstance.chips.first._icons.first.elementRef.nativeElement.click();
fixture.detectChanges();
expect(testComponent.onTrailingIconInteraction).toHaveBeenCalledTimes(1);
});
});
describe('MdcChip Filter', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipFilter;
beforeEach(() => {
fixture = TestBed.createComponent(ChipFilter);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should have mdc-chip by default', () => {
expect(testDebugElement.nativeElement.classList).toContain('mdc-chip-set');
});
it('#should apply filter class modifier', () => {
expect(testDebugElement.nativeElement.classList.contains('mdc-chip-set--filter')).toBe(true);
expect(testInstance.filter).toBe(true);
expect(testInstance.chips.first.filter).toBe(true);
dispatchMouseEvent(testComponent.icon.elementRef.nativeElement, 'keydown', ENTER);
fixture.detectChanges();
});
});
describe('MdcChip Choice', () => {
let testDebugElement: DebugElement;
let testNativeElement: HTMLElement;
let testInstance: MdcChipSet;
let testComponent: ChipChoice;
beforeEach(() => {
fixture = TestBed.createComponent(ChipChoice);
fixture.detectChanges();
testDebugElement = fixture.debugElement.query(By.directive(MdcChipSet));
testNativeElement = testDebugElement.nativeElement;
testInstance = testDebugElement.componentInstance;
testComponent = fixture.debugElement.componentInstance;
});
it('#should have mdc-chip by default', () => {
expect(testDebugElement.nativeElement.classList).toContain('mdc-chip-set');
});
it('#should apply choice class modifier', () => {
expect(testDebugElement.nativeElement.classList.contains('mdc-chip-set--choice')).toBe(true);
expect(testInstance.choice).toBe(true);
expect(testInstance.chips.first.choice).toBe(true);
});
});
});
@Component({
template: `
<mdc-chip-set filter>
<mdc-chip
[removable]="removable"
[touch]="touch"
(trailingIconInteraction)="iconInteraction()"
(selectionChange)="chipSelectionChange($event)">
<mdc-chip-icon leading>face</mdc-chip-icon>
<mdc-chip-text>Get Directions</mdc-chip-text>
<mdc-chip-icon #trailingIcon trailing>more_vert</mdc-chip-icon>
</mdc-chip>
<mdc-chip id="removableChip" [removable]="removable" [disableRipple]="disableRipple">
<mdc-chip-text>Get Weather</mdc-chip-text>
</mdc-chip>
<mdc-chip id="newsChip">
<mdc-chip-text>Get News</mdc-chip-text>
</mdc-chip>
</mdc-chip-set>`,
})
class ChipTest {
removable: boolean = true;
disableRipple: boolean;
touch: boolean;
@ViewChild('trailingIcon', {static: false}) trailingIcon: MdcIcon;
chipSelectionChange: (event?: MdcChipSelectionEvent) => void = () => {};
iconInteraction: () => void = () => {};
}
@Component({
template: `<mdc-chip-set filter>
<mdc-chip>
<mdc-chip-icon #icon leading>face</mdc-chip-icon>
<mdc-chip-text>Alice</mdc-chip-text>
</mdc-chip>
</mdc-chip-set>`
})
class ChipFilter {
@ViewChild('icon', {static: false}) icon: MdcIcon;
}
@Component({
template: `<mdc-chip-set input>
<mdc-chip label="Alice" (trailingIconInteraction)="onTrailingIconInteraction($event)">
<mdc-chip-icon trailing>cancel</mdc-chip-icon>
</mdc-chip>
</mdc-chip-set>`
})
class ChipInput {
onTrailingIconInteraction: (event?: MdcChipInteractionEvent) => void = () => {};
}
@Component({
template: `<mdc-chip>
<mdc-chip-icon trailing>cancel</mdc-chip-icon>
</mdc-chip>`
})
class TestChip {
}
@Component({
template: `<mdc-chip-set choice>
<mdc-chip>
<mdc-chip-text>Get Directions</mdc-chip-text>
</mdc-chip>
</mdc-chip-set>`
})
class ChipChoice {
chipSelectionChange: (event?: MdcChipSelectionEvent) => void = () => {};
}
@Component({
template: `
<mdc-chip-set [choice]="choice" [value]="value" (change)="onChipSetChange($event)">
<mdc-chip value="directions-1">
<mdc-chip-text>Get Directions</mdc-chip-text>
</mdc-chip>
<mdc-chip value="weather-1">
<mdc-chip-text>Get Weather</mdc-chip-text>
</mdc-chip>
<mdc-chip value="news-1">
<mdc-chip-text>Get News</mdc-chip-text>
</mdc-chip>
</mdc-chip-set>`,
})
class ChipValue {
choice: boolean = true;
value: any;
onChipSetChange: (event?: MdcChipSetChange) => void = () => {};
}
@Component({
template: `<mdc-chip-set [(ngModel)]="selectedFood">
<mdc-chip *ngFor="let food of foods" [value]="food.value" [selected]="food.selected">
{{food.viewValue}}
</mdc-chip>
</mdc-chip-set>`
})
class ChipModel {
selectedFood: any;
foods: any[] = [
{value: 'steak-0', viewValue: 'Steak', selected: false},
{value: 'pizza-1', viewValue: 'Pizza', selected: false},
{value: 'tacos-2', viewValue: 'Tacos', selected: false},
];
}<|fim▁end|>
|
it('#should set value on chip', () => {
testComponent.value = 'weather-1';
fixture.detectChanges();
|
<|file_name|>disk_sub.py<|end_file_name|><|fim▁begin|>import sys
import numpy as np
import pylab
import matplotlib.pyplot as plt
import scipy.integrate
import scipy.optimize
from collections import namedtuple
import geo
import astro_help as ah
import disk_sub as disk
RADIAN=57.29598
C=2.997925e10
MSOL=1.979e33
G=6.670e-8
YR=3.1556925e7
EPSILON=1e-6
PI=3.1416
STEFAN_BOLTZMANN=5.669e-5
def tdisk (m, mdot, r):
t = 3. * G / (8. * PI * STEFAN_BOLTZMANN) * m * mdot / (r * r * r)
t = pow (t, 0.25)
return (t)
def teff (t, x):
q = (1.e0 - (x ** -0.5e0)) / (x * x * x);
q = t * (q ** 0.25e0);
return (q)
def spec_disk (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=(f2/f1)*100
freq=np.linspace(f1,f2,nfreq)
spec=np.empty(nfreq)
dfreq=freq[1]-freq[0]
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)):
spec[i]=spec[i]+(ah.planck_nu(t,freq[i])*area*PI*2)
return (freq,spec)
def spec_disk1 (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=1000
freq=np.logspace(np.log10(f1),np.log10(f2),nfreq)
spec=np.empty(nfreq)
dfreq=freq[1]-freq[0]
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)-1):
spec[i]=spec[i]+(ah.planck_nu(t,(freq[i+1]+freq[i])/2.0)*area*PI*2*(freq[i+1]-freq[i]))
return (freq,spec)
def lnu_disk (f,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
lnu=0.0
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
lnu=lnu+(ah.planck_nu(t,f)*area*PI*2.0)
return (lnu)
def llamb_disk (lamb,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)
rdisk=[]
llamb=0.0
for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
llamb=llamb+(ah.planck_lamb(t,lamb)*area*PI*2.0)
return (llamb)
def spec_disk2 (f1,f2,m,mdot,rmin,rmax):
tref=tdisk(m, mdot, rmin)
nfreq=10
f1a=10**float(int(np.log10(f1)))
f2a=10**float(int(np.log10(f2))+1)
nrange=int(np.log10((f2a/f1a)))
freq=[]
dfreq=[]
ftemp=f1a
df=f1a/nfreq
for i in range(nrange):
for j in range(nfreq*9):
ftemp=ftemp+df
if ftemp > f2:
break
if ftemp >= f1:
freq.append(ftemp)
df=df*10.0
#print freq[0],freq[len(freq)-1]
spec=np.zeros(len(freq))
rtemp=np.logspace(np.log10(rmin),np.log10(rmax),num=100)<|fim▁hole|> for j in range(len(rtemp)-1):
rdisk.append((rtemp[j]+rtemp[j+1])/2.0)
r=rdisk[j]/rmin
area=PI*(rtemp[j+1]*rtemp[j+1]-rtemp[j]*rtemp[j])
t=(disk.teff(tref,r))
for i in range(len(freq)-1):
spec[i]=spec[i]+(ah.planck_nu(t,freq[i])*area*PI*2)
return (freq,spec)<|fim▁end|>
|
rdisk=[]
|
<|file_name|>upscale_relperm.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2010 Statoil ASA.
This file is part of The Open Porous Media project (OPM).
OPM is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OPM is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OPM. If not, see <http://www.gnu.org/licenses/>.
*/
/**
@file upscale_relperm.C
@brief Upscales relative permeability as a fuction of water saturation assuming capillary equilibrium.
Description:
Reads in a lithofacies geometry in Eclipse format, reads in J(S_w)
and relpermcurve(S_w) for each stone type, and calculates upscaled
(three directions) relative permeability curves as a function of Sw.
The relative permeability computation is based on
- Capillary equilibrium, p_c is spatially invariant.
- Optional gravitational effects. If gravity is not specified,
gravity will be assumed to be zero.
Units handling:
- Assumes cornerpoint file reports lengths in cm.
- Input surface tension is in dynes/cm
- Input density is in g/cm^3
- The denominator \sigma * cos(\phi) in J-function scaling
is what we call "surface tension". If angle dependency is to be
included, calculate the "surface tension" yourself.
- Outputted capillary pressure is in Pascals.
Steps in the code:
1: Process command line options.
2: Read Eclipse file
3: Read relperm- and J-function for each stone-type.
4: Tesselate the grid (Sintef code)
5: Find minimum and maximum capillary pressure from the
J-functions in each cell.
6: Upscale water saturation as a function of capillary pressure
7: Upscale single phase permeability.
8: Upscale phase permeability for capillary pressures
that corresponds to a uniform saturation grid, and
compute relative permeability.
9: Print output to screen and optionally to file.
*/
#include <config.h>
#include <iostream>
#include <fstream>
#include <sstream>
#include <iomanip>
#include <ctime>
#include <cmath>
#include <cfloat> // for DBL_MAX/DBL_MIN
#include <map>
#include <sys/utsname.h>
#include <dune/common/version.hh>
#if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 3)
#include <dune/common/parallel/mpihelper.hh>
#else
#include <dune/common/mpihelper.hh>
#endif
#include <opm/core/utility/Units.hpp>
#include <opm/upscaling/SinglePhaseUpscaler.hpp>
#include <opm/upscaling/ParserAdditions.hpp>
#include <opm/upscaling/RelPermUtils.hpp>
using namespace Opm;
using namespace std;
static void usage()
{
cerr << "Usage: upscale_relperm <options> <eclipsefile> stoneA.txt stoneB.txt ..." << endl <<
"where the options are:" << endl <<
" -bc <string> -- which boundary conditions to use." << endl <<
" Possible values are f (fixed), l (linear)" << endl <<
" and p (periodic). Default f (fixed)." << endl <<
" -points <integer> -- Number of saturation points to upscale for." << endl <<
" Uniformly distributed within saturation endpoints." << endl <<
" Default 30." << endl <<
" -relPermCurve <integer> -- For isotropic input, the column number in the stone-files" << endl <<
" that represents the phase to be upscaled," << endl <<
" typically 2 (default) for water and 3 for oil." << endl <<
" -jFunctionCurve <integer> -- the column number in the stone-files that" << endl <<
" represent the Leverett J-function. Default 4." << endl <<
" -upscaleBothPhases <bool> -- If this is true, relperm for both phases will be upscaled" << endl <<
" and both will be outputted to Eclipse format. Default true." << endl <<
" For isotropic input, relPermCurves is assumed to be 2 and 3," << endl <<
" for anisotropic input, relPermCurves are assumed to be 3-5" << endl <<
" and 6-8 respectively for the two phases" << endl <<
" -gravity <float> -- use 9.81 for standard gravity. Default zero. Unit m/s^2." << endl <<
" -surfaceTension <float> -- Surface tension to use in J-function/Pc conversion." << endl <<
" Default 11 dynes/cm (oil-water systems). In absence of" << endl <<
" a correct value, the surface tension for gas-oil systems " << endl <<
" could be 22.5 dynes/cm." << endl <<
" -waterDensity <float> -- density of water, only applicable to non-zero" << endl <<
" gravity, g/cm³. Default 1" << endl <<
" -oilDensity <float> -- density of oil, only applicable to non-zero" << endl <<
" gravity, g/cm³. Default 0.6" << endl <<
" -output <string> -- filename for where to write upscaled values." << endl <<
" If not supplied, output will only go to " << endl <<
" the terminal (standard out)." << endl <<
" -interpolate <integer> -- If supplied, the output data points will be" << endl <<
" interpolated using monotone cubic interpolation" << endl <<
" on a uniform grid with the specified number of" << endl <<
" points. Suggested value: 1000." << endl <<
" -maxPermContrast <float> -- maximal permeability contrast in model." << endl <<
" Default 10^7" << endl <<
" -minPerm <float> -- Minimum floating point value allowed for" << endl <<
" phase permeability in computations. If set to zero," << endl <<
" some models can end up singular. Default 10^-12" << endl <<
" -maxPerm <float> -- Maximum floating point value allowed for" << endl <<
" permeability. " << endl <<
" Default 100000. Unit Millidarcy." << endl <<
" -fluids <string> -- Either ow for oil/water systems or go for gas/oil systems. Default ow." << endl <<
" In case of go, the waterDensity option should be set to gas density" << endl <<
" Also remember to specify the correct surface tension" << endl <<
" -krowxswirr <float> -- Oil relative permeability in x-direction at Swirr(from SWOF table)." << endl <<
" In case of oil/gas, this value is needed to ensure consistensy" << endl <<
" between SWOF and SGOF tables. Only has affect if fluids is set to go" << endl <<
" and upscaleBothPhases is true." << endl <<
" If not set, the point is not inserted into the final table." << endl <<
" -krowyswirr <float> -- Oil relative permeability in y-direction at Swirr(from SWOF table). See krowxswirr." << endl <<
" -krowzswirr <float> -- Oil relative permeability in z-direction at Swirr(from SWOF table). See krowxswirr." << endl <<
" -doEclipseCheck <bool> -- Default true. Check that input relperm curves includes relperms at critical" << endl <<
" saturation points, i.e. that krw(swcrit)=0 and krow(swmax) = 0 and similar for oil/gas." << endl <<
" -critRelpermThresh <float> -- If minimum relperm values are less than this threshold, they are set to zero" << endl <<
" and will pass the EclipseCheck. Default 10^-6" << endl <<
"If only one stone-file is supplied, it is used for all stone-types defined" << endl <<
"in the geometry. If more than one, it corresponds to the SATNUM-values." << endl;
// "minPoro" intentionally left undocumented
// "saturationThreshold" also
}
static void usageandexit() {
usage();
exit(1);
}
//! \brief Parse command line arguments into string map.
//! \param[in,out] options The map of options. Should be filled with default values on entry.
//! \param[in] varnum Number of arguments
//! \param[in] vararg The arguments
//! \param[in] verbose Whether or not to print parsed command line arguments
//! \returns index of input file if positive, negated index to offending argument on failure.
static int parseCommandLine(std::map<std::string,std::string>& options,
int varnum, char** vararg, bool verbose)
{
int argeclindex = 0;
for (int argidx = 1; argidx < varnum; argidx += 2) {
if (string(vararg[argidx]).substr(0,1) == "-") {
string searchfor = string(vararg[argidx]).substr(1); // Chop off leading '-'
/* Check if it is a match */
if (options.count(searchfor) == 1) {
options[searchfor] = string(vararg[argidx+1]);
if (verbose)
cout << "Parsed command line option: "
<< searchfor << " := " << vararg[argidx+1] << endl;
argeclindex = argidx + 2;
}
else
return -argidx;
}
else {
// if vararg[argidx] does not start in '-',
// assume we have found the position of the Eclipse-file.
argeclindex = argidx;
break; // out of for-loop,
}
}
return argeclindex;
}
//! \brief Return eclipse-style output filename.
//! \param[in] opfname Base output file name.
//! \param[in] comp Component (X, Y, Z).
//! \param[in] sat Fluid system type.
//! \return Eclipse-style filename for requested component/fluid system combination.
static std::string getEclipseOutputFile(const std::string& opfname, char comp, char sat)
{
string fnbase = opfname.substr(0,opfname.find_first_of('.'));
return fnbase + "-" +comp + ".S" + sat + "OF";
}
//! \brief Write eclipse-style output file.
//! \param[in] RelPermValues RelPerm values to write.
//! \param[in] Satvalues Saturation values to write.
//! \param[in] Pvalues Pressure values to write.
//! \param[in] options Option structure.
//! \param[in] component Component to write (0..2).
//! \param[in] owsystem Fluid system type.
template<class Lazy>
static void writeEclipseOutput(Lazy& RelPermValues,
const std::vector<double>& Satvalues,
const std::vector<double>& Pvalues,
std::map<std::string,std::string>& options,
int component, bool owsystem)
{
std::stringstream swof;
char sat = (owsystem?'W':'G');
char comp = 'x'+component;
std::string krowstring = std::string("krow") + comp + "swirr";
double krowswirr = atof(options[krowstring].c_str());
const int outputprecision = atoi(options["outputprecision"].c_str());
const int fieldwidth = outputprecision + 8;
// x-direction
swof << "-- This file is based on the results in " << endl
<< "-- " << options["output"] << endl
<< "-- for relperm in " << comp << "-direction." << endl
<< "-- Pressure values (Pc) given in bars." << endl
<< "-- S" << (char)std::tolower(sat) << " Kr"
<< (char)std::tolower(sat) << comp << comp
<< " Kro" << (char)std::tolower(sat) << comp << comp
<< " Pc(bar)" << endl
<< "--S" << sat << "OF" << endl;
if (krowswirr > 0) {
swof << showpoint << setw(fieldwidth) << setprecision(outputprecision) << 0
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << 0
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << krowswirr
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << 0 << endl;
}
for (size_t i=0; i < Satvalues.size(); ++i) {
swof << showpoint << setw(fieldwidth) << setprecision(outputprecision) << Satvalues[i]
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << RelPermValues[0][component][i]
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << RelPermValues[1][component][i]
<< showpoint << setw(fieldwidth) << setprecision(outputprecision) << Pvalues[i]/100000.0 << endl;
}
swof << "/" << endl;
std::ofstream file;
file.open(getEclipseOutputFile(options["output"], std::toupper(comp), sat),
std::ios::out | std::ios::trunc);
file << swof.str();
file.close();
}
int main(int varnum, char** vararg)
try
{
// Variables used for timing/profiling:
clock_t start, finish;
double timeused = 0.0, timeused_tesselation = 0.0;
double timeused_upscale_wallclock = 0.0;
/******************************************************************************
* Step 1:
* Process command line options
*/
Dune::MPIHelper& mpi=Dune::MPIHelper::instance(varnum, vararg);
const int mpi_rank = mpi.rank();
#ifdef HAVE_MPI
const int mpi_nodecount = mpi.size();
#endif
if (varnum == 1) { /* If no arguments supplied ("upscale_relperm" is the first "argument") */
usage();
exit(1);
}
/*
Populate options-map with default values
*/
map<string,string> options =
{{"bc", "f"}, // Fixed boundary conditions
{"points", "30"}, // Number of saturation points (uniformly distributed within saturation endpoints)
{"relPermCurve", "2"}, // Which column in the rock types are upscaled
{"upscaleBothPhases", "true"}, // Whether to upscale for both phases in the same run. Default true.
{"jFunctionCurve", "4"}, // Which column in the rock type file is the J-function curve
{"surfaceTension", "11"}, // Surface tension given in dynes/cm
{"output", ""}, // If this is set, output goes to screen and to this file.
{"gravity", "0.0"}, // default is no gravitational effects
{"waterDensity", "1.0"}, // default density of water, only applicable to gravity
{"oilDensity", "0.6"}, // ditto
{"interpolate", "0"}, // default is not to interpolate
{"maxpoints", "1000"}, // maximal number of saturation points.
{"outputprecision", "4"}, // number of significant numbers to print
{"maxPermContrast", "1e7"}, // maximum allowed contrast in each single-phase computation
{"minPerm", "1e-12"}, // absolute minimum for allowed cell permeability
{"maxPerm", "100000"}, // maximal allowed cell permeability
{"minPoro", "0.0001"}, // this limit is necessary for pcmin/max computation
{"saturationThreshold", "0.00001"}, // accuracy threshold for saturation, we ignore Pc values that
// give so small contributions near endpoints.
{"linsolver_tolerance", "1e-12"}, // residual tolerance for linear solver
{"linsolver_verbosity", "0"}, // verbosity level for linear solver
{"linsolver_max_iterations", "0"}, // Maximum number of iterations allow, specify 0 for default
{"linsolver_type", "3"}, // Type of linear solver: 0 = ILU0/CG, 1 = AMG/CG, 2 KAMG/CG, 3 FAST_AMG/CG
{"linsolver_prolongate_factor", "1.0"}, // Prolongation factor in AMG
{"linsolver_smooth_steps", "1"}, // Number of smoothing steps in AMG
{"fluids", "ow"}, // Whether upscaling for oil/water (ow) or gas/oil (go)
{"krowxswirr", "-1"}, // Relative permeability in x direction of oil in corresponding oil/water system
{"krowyswirr", "-1"}, // Relative permeability in y direction of oil in corresponding oil/water system
{"krowzswirr", "-1"}, // Relative permeability in z direction of oil in corresponding oil/water system
{"doEclipseCheck", "true"}, // Check if minimum relpermvalues in input are zero (specify critical saturations)
{"critRelpermThresh", "1e-6"}};// Threshold for setting minimum relperm to 0 (thus specify critical saturations)
/* Check first if there is anything on the command line to look for */
if (varnum == 1) {
if (mpi_rank == 0)
cout << "Error: No Eclipsefile or stonefiles found on command line." << endl;
usageandexit();
}
/*
'argeclindex' is so that vararg[argeclindex] = the eclipse filename.
*/
int argeclindex = parseCommandLine(options, varnum, vararg, mpi_rank == 0);
if (argeclindex < 0) {
if (mpi_rank == 0)
cout << "Option -" << vararg[-argeclindex] << " unrecognized." << endl;
usageandexit();
}
RelPermUpscaleHelper helper(mpi_rank, options);
bool owsystem = helper.saturationstring == "Sw";
// argeclindex should now point to the eclipse file
static char* ECLIPSEFILENAME(vararg[argeclindex]);
argeclindex += 1; // argeclindex jumps to next input argument, now it points to the stone files.
// argeclindex now points to the first J-function. This index is not
// to be touched now.
static int rockfileindex = argeclindex;
/* Check if at least one J-function is supplied on command line */
if (varnum <= rockfileindex)
throw std::runtime_error("Error: No J-functions found on command line.");
/* Check validity of boundary conditions chosen, and make booleans
for boundary conditions, this allows more readable code later. */
helper.setupBoundaryConditions();
bool isFixed = helper.boundaryCondition == SinglePhaseUpscaler::Fixed,
isLinear = helper.boundaryCondition == SinglePhaseUpscaler::Linear,
isPeriodic = helper.boundaryCondition == SinglePhaseUpscaler::Periodic;
// If this number is 1 or higher, the output will be interpolated, if not
// the computed data is untouched.
const int interpolationPoints = atoi(options["interpolate"].c_str());
bool doInterpolate = false;
if (interpolationPoints > 1) {
doInterpolate = true;
}
/***********************************************************************
* Step 2:
* Load geometry and data from Eclipse file
*/
// Read data from the Eclipse file and
// populate our vectors with data from the file
// Test if filename exists and is readable
ifstream eclipsefile(ECLIPSEFILENAME, ios::in);
if (eclipsefile.fail()) {
std::stringstream str;
str << "Error: Filename " << ECLIPSEFILENAME << " not found or not readable.";
throw str.str();
}
eclipsefile.close();
if (helper.isMaster) cout << "Parsing Eclipse file <" << ECLIPSEFILENAME << "> ... ";
flush(cout); start = clock();
Opm::ParseMode parseMode;
Opm::ParserPtr parser(new Opm::Parser());
Opm::addNonStandardUpscalingKeywords(parser);
Opm::DeckConstPtr deck(parser->parseFile(ECLIPSEFILENAME , parseMode));
finish = clock(); timeused = (double(finish)-double(start))/CLOCKS_PER_SEC;
if (helper.isMaster) cout << " (" << timeused <<" secs)" << endl;
Opm::DeckRecordConstPtr specgridRecord = deck->getKeyword("SPECGRID")->getRecord(0);
std::array<int,3> res;
res[0] = specgridRecord->getItem("NX")->getInt(0);
res[1] = specgridRecord->getItem("NY")->getInt(0);
res[2] = specgridRecord->getItem("NZ")->getInt(0);
const double minPerm = atof(options["minPerm"].c_str());
const double maxPerm = atof(options["maxPerm"].c_str());
const double minPoro = atof(options["minPoro"].c_str());
helper.sanityCheckInput(deck, minPerm, maxPerm, minPoro);
/***************************************************************************
* Step 3:
* Load relperm- and J-function-curves for the stone types.
* We read columns from text-files, syntax allowed is determined
* by MonotCubicInterpolator which actually opens and parses the
* text files.
*
* If a standard eclipse data file is given as input, the data columns
* should be:
* Sw Krw Kro J-func
* (In this case, the option -relPermCurve determines which of Krw or Kro is used)
*
* If output from this very program is given as input, then the data columns read
* Pc Sw Krx Kry Krz
*
* (and the option -relPermCurve and -jFunctionCurve are ignored)
*
* How do we determine which mode of operation?
* - If PERMY and PERMZ are present in grdecl-file, we are in the anisotropic mode
*
*/
// Number of stone-types is max(satnums):
// If there is only one J-function supplied on the command line,
// use that for all stone types.
int stone_types = int(*(max_element(helper.satnums.begin(), helper.satnums.end())));
std::vector<string> JfunctionNames; // Placeholder for the names of the loaded J-functions.
// This decides whether we are upscaling water or oil relative permeability
const int relPermCurve = atoi(options["relPermCurve"].c_str());
// This decides whether we are upscaling both phases in this run or only one
helper.upscaleBothPhases = (options["upscaleBothPhases"] == "true");
const int jFunctionCurve = atoi(options["jFunctionCurve"].c_str());
helper.points = atoi(options["points"].c_str());
const double gravity = atof(options["gravity"].c_str());
// Input for surfaceTension is dynes/cm
// SI units are Joules/square metre
const double surfaceTension = atof(options["surfaceTension"].c_str()) * 1e-3; // multiply with 10^-3 to obtain SI units
const bool includeGravity = (fabs(gravity) > DBL_MIN); // true for non-zero gravity
const int outputprecision = atoi(options["outputprecision"].c_str());
// Handle two command line input formats, either one J-function for all stone types
// or one each. If there is only one stone type, both code blocks below are equivalent.
if (varnum != rockfileindex + stone_types && varnum != rockfileindex + 1)
throw std::runtime_error("Error: Wrong number of stone-functions provided.");
for (int i=0 ; i < stone_types; ++i) {
const char* ROCKFILENAME = vararg[rockfileindex+stone_types==varnum?rockfileindex+i:rockfileindex];
// Check if rock file exists and is readable:
ifstream rockfile(ROCKFILENAME, ios::in);
if (rockfile.fail()) {
std::stringstream str;
str << "Error: Filename " << ROCKFILENAME << " not found or not readable.";
throw std::runtime_error(str.str());
}
rockfile.close();
if (! helper.anisotropic_input) {
MonotCubicInterpolator Jtmp;
try {
Jtmp = MonotCubicInterpolator(ROCKFILENAME, 1, jFunctionCurve);
}
catch (const char * errormessage) {
std::stringstream str;
str << "Error: " << errormessage << endl
<< "Check filename and -jFunctionCurve" << endl;
throw std::runtime_error(str.str());
}
// Invert J-function, now we get saturation as a function of pressure:
if (Jtmp.isStrictlyMonotone()) {
helper.InvJfunctions.push_back(MonotCubicInterpolator(Jtmp.get_fVector(), Jtmp.get_xVector()));
JfunctionNames.push_back(ROCKFILENAME);
if (helper.upscaleBothPhases) {
helper.Krfunctions[0][0].push_back(MonotCubicInterpolator(ROCKFILENAME, 1, 2));
helper.Krfunctions[0][1].push_back(MonotCubicInterpolator(ROCKFILENAME, 1, 3));
}
else {
helper.Krfunctions[0][0].push_back(MonotCubicInterpolator(ROCKFILENAME, 1, relPermCurve));
}
}
else {
std::stringstream str;
str << "Error: Jfunction " << i+1 << " in rock file " << ROCKFILENAME << " was not invertible.";
throw std::runtime_error(str.str());
}
}
else { // If input is anisotropic, then we are in second mode with different input file format
MonotCubicInterpolator Pctmp;
try {
Pctmp = MonotCubicInterpolator(ROCKFILENAME, 2, 1);
}
catch (const char * errormessage) {
std::stringstream str;
str << "Error: " << errormessage << endl
<< "Check filename and columns 1 and 2 (Pc and " << helper.saturationstring <<")";
throw str.str();
}
// Invert Pc(Sw) curve into Sw(Pc):
if (Pctmp.isStrictlyMonotone()) {
helper.SwPcfunctions.push_back(MonotCubicInterpolator(Pctmp.get_fVector(), Pctmp.get_xVector()));
JfunctionNames.push_back(ROCKFILENAME);
helper.Krfunctions[0][0].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 3));
helper.Krfunctions[1][0].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 4));
helper.Krfunctions[2][0].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 5));
if (helper.upscaleBothPhases) {
helper.Krfunctions[0][1].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 6));
helper.Krfunctions[1][1].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 7));
helper.Krfunctions[2][1].push_back(MonotCubicInterpolator(ROCKFILENAME, 2, 8));
}
}
else {
std::stringstream str;
str << "Error: Pc(" << helper.saturationstring << ") curve " << i+1 << " in rock file " << ROCKFILENAME << " was not invertible.";
throw std::runtime_error(str.str());
}
}
}
// Check if input relperm curves satisfy Eclipse requirement of specifying critical saturations
if (helper.doEclipseCheck)
helper.checkCriticalSaturations();
/*****************************************************************************
* Step 4:
* Generate tesselated grid:
* This is a step needed for the later discretization code to figure out which
* cells are connected to which. Each cornerpoint-cell is tesselated into 8 tetrahedrons.
*
* In case of non-zero gravity, calculate z-values of every cell:
* 1) Compute height of model by averaging z-values of the top layer corners.
* 2) Calculate density difference between phases in SI-units
* 3) Go through each cell and find the z-values of the eight corners of the cell.
* Set height of cell equal to average of z-values of the corners minus half of
* model height. Now the cell height is relative to model centre.
* Set pressure difference for the cell equal to density difference times gravity
* constant times cell height times factor 10^-7 to obtain bars (same as p_c)
*/
timeused_tesselation = helper.tesselateGrid(deck);
/* If gravity is to be included, calculate z-values of every cell: */
if (includeGravity)
helper.calculateCellPressureGradients(res);
/******************************************************************************
* Step 5:
* Go through each cell and calculate the minimum and
* maximum capillary pressure possible in the cell, given poro,
* perm and the J-function for the cell. This depends on the
* J-function in that they represent all possible saturations,
* ie. we do not want to extrapolate the J-functions (but we might
* have to do that later in the computations).
*
* The user-supplied surface tension is ignored until
* the final output of results.
*/
helper.calculateMinMaxCapillaryPressure();
/***************************************************************************
* Step 6:
* Upscale capillary pressure function.
*
* This is upscaled in advance in order to be able to have uniformly distributed
* saturation points for which upscaling is performed.
*
* Capillary pressure points are chosen heuristically in order to
* ensure largest saturation interval between two saturation points
* is 1/500 of the saturation interval. Monotone cubic interpolation
* will be used afterwards for accessing the tabulated values.
*/
helper.upscaleCapillaryPressure();
/*****************************************************************************
* Step 7:
* Upscale single phase permeability
* This uses the PERMX in the eclipse file as data, and upscales using
* fixed boundary (no-flow) conditions
*
* In an MPI-environment, this is only done on the master node.
*/
helper.upscaleSinglePhasePermeability();
/*****************************************************************
* Step 8:
*
* Loop through a given number of uniformly distributed saturation points
* and upscale relative permeability for each of them.
* a: Make vector of capillary pressure points corresponding to uniformly
* distributed water saturation points between saturation endpoints.
* b: Loop over capillary pressure points
* 1) Loop over all cells to find the saturation value given the
* capillary pressure found in (a). Given the saturation value, find the
* phase permeability in the cell given input relperm curve and input
* permeability values.
* 2) Upscale phase permeability for the geometry.
* c: Calculate relperm tensors from all the phase perm tensors.
*/
double avg_upscaling_time_pr_point;
std::tie(timeused_upscale_wallclock, avg_upscaling_time_pr_point) =
helper.upscalePermeability(mpi_rank);
/*
* Step 8c: Make relperm values from phaseperms
* (only master node can do this)
*/
std::array<vector<vector<double>>,2> RelPermValues;
RelPermValues[0] = helper.getRelPerm(0);
if (helper.upscaleBothPhases)
RelPermValues[1] = helper.getRelPerm(1);
/*********************************************************************************
* Step 9
*
* Output results to stdout and optionally to file. Note, we only output to
* file if the '-outputWater'-option and/or '-outputOil' has been set, as this option is an
* empty string by default.
*/
if (helper.isMaster) {
stringstream outputtmp;
// Print a table of all computed values:
outputtmp << "######################################################################" << endl;
outputtmp << "# Results from upscaling relative permeability."<< endl;
outputtmp << "#" << endl;
#if HAVE_MPI
outputtmp << "# (MPI-version)" << endl;
#endif
time_t now = std::time(NULL);
outputtmp << "# Finished: " << asctime(localtime(&now));
utsname hostname; uname(&hostname);
outputtmp << "# Hostname: " << hostname.nodename << endl;
outputtmp << "#" << endl;
outputtmp << "# Eclipse file: " << ECLIPSEFILENAME << endl;
outputtmp << "# cells: " << helper.tesselatedCells << endl;
outputtmp << "# Pore volume: " << helper.poreVolume << endl;
outputtmp << "# volume: " << helper.volume << endl;
outputtmp << "# Porosity: " << helper.poreVolume/helper.volume << endl;
outputtmp << "#" << endl;
if (! helper.anisotropic_input) {
for (int i=0; i < stone_types ; ++i) {
outputtmp << "# Stone " << i+1 << ": " << JfunctionNames[i] << " (" << helper.InvJfunctions[i].getSize() << " points)" << endl;
}
outputtmp << "# jFunctionCurve: " << options["jFunctionCurve"] << endl;
if (!helper.upscaleBothPhases) outputtmp << "# relPermCurve: " << options["relPermCurve"] << endl;
}
else { // anisotropic input, not J-functions that are supplied on command line (but vector JfunctionNames is still used)
for (int i=0; i < stone_types ; ++i) {
outputtmp << "# Stone " << i+1 << ": " << JfunctionNames[i] << " (" << helper.Krfunctions[0][0][i].getSize() << " points)" << endl;
}
}
outputtmp << "#" << endl;
outputtmp << "# Timings: Tesselation: " << timeused_tesselation << " secs" << endl;
outputtmp << "# Upscaling: " << timeused_upscale_wallclock << " secs";
#ifdef HAVE_MPI
outputtmp << " (wallclock time)" << endl;
outputtmp << "# " << avg_upscaling_time_pr_point << " secs pr. saturation point" << endl;
outputtmp << "# MPI-nodes: " << mpi_nodecount << endl;
// Single phase upscaling time is included here, in possibly a hairy way.
double speedup = (avg_upscaling_time_pr_point * (helper.points + 1) + timeused_tesselation)/(timeused_upscale_wallclock + avg_upscaling_time_pr_point + timeused_tesselation);
outputtmp << "# Speedup: " << speedup << ", efficiency: " << speedup/mpi_nodecount << endl;
#else
outputtmp << ", " << avg_upscaling_time_pr_point << " secs avg for " << helper.points << " runs" << endl;
#endif
outputtmp << "# " << endl;
outputtmp << "# Options used:" << endl;
outputtmp << "# Boundary conditions: ";
if (isFixed) outputtmp << "Fixed (no-flow)" << endl;<|fim▁hole|> if (isPeriodic) outputtmp << "Periodic" << endl;
if (isLinear) outputtmp << "Linear" << endl;
outputtmp << "# points: " << options["points"] << endl;
outputtmp << "# maxPermContrast: " << options["maxPermContrast"] << endl;
outputtmp << "# minPerm: " << options["minPerm"] << endl;
outputtmp << "# minPoro: " << options["minPoro"] << endl;
outputtmp << "# surfaceTension: " << options["surfaceTension"] << " dynes/cm" << endl;
if (includeGravity) {
outputtmp << "# gravity: " << options["gravity"] << " m/s²" << endl;
if (owsystem) outputtmp << "# waterDensity: " << options["waterDensity"] << " g/cm³" << endl;
else outputtmp << "# gasDensity: " << options["waterDensity"] << " g/cm³" << endl;
outputtmp << "# oilDensity: " << options["oilDensity"] << " g/cm³" << endl;
}
else {
outputtmp << "# gravity: 0" << endl;
}
if (doInterpolate) {
outputtmp << "# interpolate: " << options["interpolate"] << " points" << endl;
}
outputtmp << "# " << endl;
outputtmp << "# Single phase permeability" << endl;
outputtmp << "# |Kxx Kxy Kxz| = " << helper.permTensor(0,0) << " " << helper.permTensor(0,1) << " " << helper.permTensor(0,2) << endl;
outputtmp << "# |Kyx Kyy Kyz| = " << helper.permTensor(1,0) << " " << helper.permTensor(1,1) << " " << helper.permTensor(1,2) << endl;
outputtmp << "# |Kzx Kzy Kzz| = " << helper.permTensor(2,0) << " " << helper.permTensor(2,1) << " " << helper.permTensor(2,2) << endl;
outputtmp << "# " << endl;
if (doInterpolate) {
outputtmp << "# NB: Data points shown are interpolated." << endl;
}
outputtmp << "######################################################################" << endl;
if (helper.upscaleBothPhases) {
string phase1, phase2;
if (owsystem) phase1="w"; else phase1="g";
phase2="o";
if (isFixed) {
outputtmp << "# Pc (Pa) " << helper.saturationstring << " Kr" << phase1 << "xx Kr" << phase1 << "yy Kr" << phase1 << "zz"
<< " Kr" << phase2 << "xx Kr" << phase2 << "yy Kr" << phase2 << "zz" << endl;
}
else if (isPeriodic || isLinear) {
outputtmp << "# Pc (Pa) " << helper.saturationstring << " Kr" << phase1 << "xx Kr" << phase1 << "yy Kr" << phase1 << "zz Kr"
<< phase1 << "yz Kr" << phase1 << "xz Kr" << phase1 << "xy Kr" << phase1 << "zy Kr" << phase1 << "zx Kr" << phase1 << "yx"
<< " Kr" << phase2 << "xx Kr" << phase2 << "yy Kr" << phase2 << "zz Kr"
<< phase2 << "yz Kr" << phase2 << "xz Kr" << phase2 << "xy Kr" << phase2 << "zy Kr" << phase2 << "zx Kr" << phase2 << "yx" << endl;
}
}
else {
if (isFixed) {
outputtmp << "# Pc (Pa) " << helper.saturationstring << " Krxx Kryy Krzz" << endl;
}
else if (isPeriodic || isLinear) {
outputtmp << "# Pc (Pa) " << helper.saturationstring << " Krxx Kryy Krzz Kryz Krxz Krxy Krzy Krzx Kryx" << endl;
}
}
vector<double> Pvalues = helper.pressurePoints;
// Multiply all pressures with the surface tension (potentially) supplied
// at the command line. This multiplication has been postponed to here
// to avoid division by zero and to avoid special handling of negative
// capillary pressure in the code above.
std::transform(Pvalues.begin(), Pvalues.end(), Pvalues.begin(),
std::bind1st(std::multiplies<double>(), surfaceTension));
vector<double> Satvalues = helper.WaterSaturation; //.get_fVector();
// If user wants interpolated output, do monotone cubic interpolation
// by modifying the data vectors that are to be printed
if (doInterpolate) {
// Find min and max for saturation values
double xmin = +DBL_MAX;
double xmax = -DBL_MAX;
for (unsigned int i = 0; i < Satvalues.size(); ++i) {
if (Satvalues[i] < xmin) {
xmin = Satvalues[i];
}
if (Satvalues[i] > xmax) {
xmax = Satvalues[i];
}
}
// Make uniform grid in saturation axis
vector<double> SatvaluesInterp;
for (int i = 0; i < interpolationPoints; ++i) {
SatvaluesInterp.push_back(xmin + ((double)i)/((double)interpolationPoints-1)*(xmax-xmin));
}
// Now capillary pressure and computed relperm-values must be viewed as functions
// of saturation, and then interpolated on the uniform saturation grid.
// Now overwrite existing Pvalues and relperm-data with interpolated data:
MonotCubicInterpolator PvaluesVsSaturation(Satvalues, Pvalues);
Pvalues.clear();
for (int i = 0; i < interpolationPoints; ++i) {
Pvalues.push_back(PvaluesVsSaturation.evaluate(SatvaluesInterp[i]));
}
for (int voigtIdx = 0; voigtIdx < helper.tensorElementCount; ++voigtIdx) {
MonotCubicInterpolator RelPermVsSaturation(Satvalues, RelPermValues[0][voigtIdx]);
RelPermValues[0][voigtIdx].clear();
for (int i=0; i < interpolationPoints; ++i) {
RelPermValues[0][voigtIdx].push_back(RelPermVsSaturation.evaluate(SatvaluesInterp[i]));
}
}
if (helper.upscaleBothPhases) {
for (int voigtIdx = 0; voigtIdx < helper.tensorElementCount; ++voigtIdx) {
MonotCubicInterpolator RelPermVsSaturation(Satvalues, RelPermValues[1][voigtIdx]);
RelPermValues[1][voigtIdx].clear();
for (int i=0; i < interpolationPoints; ++i) {
RelPermValues[1][voigtIdx].push_back(RelPermVsSaturation.evaluate(SatvaluesInterp[i]));
}
}
}
// Now also overwrite Satvalues
Satvalues.clear();
Satvalues = SatvaluesInterp;
}
// The code below does not care whether the data is interpolated or not.
const int fieldwidth = outputprecision + 8;
for (unsigned int i=0; i < Satvalues.size(); ++i) {
outputtmp << showpoint << setw(fieldwidth) << setprecision(outputprecision) << Pvalues[i];
outputtmp << showpoint << setw(fieldwidth) << setprecision(outputprecision) << Satvalues[i];
for (int voigtIdx = 0; voigtIdx < helper.tensorElementCount; ++voigtIdx) {
outputtmp << showpoint << setw(fieldwidth) << setprecision(outputprecision)
<< RelPermValues[0][voigtIdx][i];
}
if (helper.upscaleBothPhases) {
for (int voigtIdx = 0; voigtIdx < helper.tensorElementCount; ++voigtIdx) {
outputtmp << showpoint << setw(fieldwidth) << setprecision(outputprecision)
<< RelPermValues[1][voigtIdx][i];
}
}
outputtmp << endl;
}
cout << outputtmp.str();
if (options["output"] != "") {
cout << "Writing results to " << options["output"] << endl;
ofstream outfile;
outfile.open(options["output"].c_str(), ios::out | ios::trunc);
outfile << outputtmp.str();
outfile.close();
}
// If both phases are upscaled and output is specyfied, create SWOF or SGOF files for Eclipse
if (options["output"] != "" && helper.upscaleBothPhases) {
cout << "Writing Eclipse compatible files to "
<< getEclipseOutputFile(options["output"],'X',owsystem?'W':'G')
<< ", " << getEclipseOutputFile(options["output"],'Y',owsystem?'W':'G')
<< " and " << getEclipseOutputFile(options["output"],'Z',owsystem?'W':'G')<< endl;
for (int comp=0;comp<3;++comp)
writeEclipseOutput(RelPermValues, Satvalues, Pvalues, options, comp, owsystem);
}
}
return 0;
}
catch (const std::exception &e) {
std::cerr << e.what() << "\n";
usageandexit();
}<|fim▁end|>
| |
<|file_name|>step_f32_relaxed.rs<|end_file_name|><|fim▁begin|>#include "step_f32.rs"<|fim▁hole|><|fim▁end|>
|
#pragma rs_fp_relaxed
|
<|file_name|>src.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = {
<|fim▁hole|> this.setProperty('src', v);
},
get: function () {
return this.getPropertyValue('src');
},
enumerable: true
};<|fim▁end|>
|
set: function (v) {
|
<|file_name|>codegen_does_not_add_save_changes_method_on_structs_without_primary_key.rs<|end_file_name|><|fim▁begin|>extern crate diesel;
use diesel::*;
table! {
users {
id -> Integer,
name -> VarChar,
hair_color -> VarChar,
}
}
#[derive(Queryable, AsChangeset)]
#[table_name = "users"]
pub struct User {
name: String,
hair_color: String,
}
<|fim▁hole|>fn main() {
let connection = PgConnection::establish("").unwrap();
let mut user = User {
name: "Sean".to_string(),
hair_color: "black".to_string(),
};
user.save_changes(&connection);
}<|fim▁end|>
| |
<|file_name|>auto_restart_configuration.py<|end_file_name|><|fim▁begin|>from bitmovin.utils import Serializable
class AutoRestartConfiguration(Serializable):
def __init__(self, segments_written_timeout: float = None, bytes_written_timeout: float = None,
frames_written_timeout: float = None, hls_manifests_update_timeout: float = None,
dash_manifests_update_timeout: float = None, schedule_expression: str = None):
super().__init__()
self.segmentsWrittenTimeout = segments_written_timeout
self.bytesWrittenTimeout = bytes_written_timeout<|fim▁hole|> self.scheduleExpression = schedule_expression<|fim▁end|>
|
self.framesWrittenTimeout = frames_written_timeout
self.hlsManifestsUpdateTimeout = hls_manifests_update_timeout
self.dashManifestsUpdateTimeout = dash_manifests_update_timeout
|
<|file_name|>charset.py<|end_file_name|><|fim▁begin|># uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: charset.py
__all__ = [
'Charset',
'add_alias',
'add_charset',
'add_codec']
import codecs
import email.base64mime
import email.quoprimime
from email import errors
from email.encoders import encode_7or8bit
QP = 1
BASE64 = 2
SHORTEST = 3
MISC_LEN = 7
DEFAULT_CHARSET = 'us-ascii'
CHARSETS = {'iso-8859-1': (
QP, QP, None),
'iso-8859-2': (
QP, QP, None),
'iso-8859-3': (
QP, QP, None),
'iso-8859-4': (
QP, QP, None),
'iso-8859-9': (
QP, QP, None),
'iso-8859-10': (
QP, QP, None),
'iso-8859-13': (
QP, QP, None),
'iso-8859-14': (
QP, QP, None),
'iso-8859-15': (
QP, QP, None),
'iso-8859-16': (
QP, QP, None),
'windows-1252': (
QP, QP, None),
'viscii': (
QP, QP, None),
'us-ascii': (None, None, None),
'big5': (
BASE64, BASE64, None),
'gb2312': (
BASE64, BASE64, None),
'euc-jp': (
BASE64, None, 'iso-2022-jp'),
'shift_jis': (
BASE64, None, 'iso-2022-jp'),
'iso-2022-jp': (
BASE64, None, None),
'koi8-r': (
BASE64, BASE64, None),
'utf-8': (
SHORTEST, BASE64, 'utf-8'),
'8bit': (
None, BASE64, 'utf-8')
}
ALIASES = {'latin_1': 'iso-8859-1',
'latin-1': 'iso-8859-1',
'latin_2': 'iso-8859-2',
'latin-2': 'iso-8859-2',
'latin_3': 'iso-8859-3',
'latin-3': 'iso-8859-3',
'latin_4': 'iso-8859-4',
'latin-4': 'iso-8859-4',
'latin_5': 'iso-8859-9',
'latin-5': 'iso-8859-9',
'latin_6': 'iso-8859-10',
'latin-6': 'iso-8859-10',
'latin_7': 'iso-8859-13',
'latin-7': 'iso-8859-13',
'latin_8': 'iso-8859-14',
'latin-8': 'iso-8859-14',
'latin_9': 'iso-8859-15',
'latin-9': 'iso-8859-15',
'latin_10': 'iso-8859-16',
'latin-10': 'iso-8859-16',
'cp949': 'ks_c_5601-1987',
'euc_jp': 'euc-jp',
'euc_kr': 'euc-kr',
'ascii': 'us-ascii'
}
CODEC_MAP = {'gb2312': 'eucgb2312_cn',
'big5': 'big5_tw',
'us-ascii': None
}
def add_charset(charset, header_enc=None, body_enc=None, output_charset=None):
"""Add character set properties to the global registry.
charset is the input character set, and must be the canonical name of a
character set.
Optional header_enc and body_enc is either Charset.QP for
quoted-printable, Charset.BASE64 for base64 encoding, Charset.SHORTEST for
the shortest of qp or base64 encoding, or None for no encoding. SHORTEST
is only valid for header_enc. It describes how message headers and
message bodies in the input charset are to be encoded. Default is no
encoding.
Optional output_charset is the character set that the output should be
in. Conversions will proceed from input charset, to Unicode, to the
output charset when the method Charset.convert() is called. The default
is to output in the same character set as the input.
Both input_charset and output_charset must have Unicode codec entries in
the module's charset-to-codec mapping; use add_codec(charset, codecname)
to add codecs the module does not know about. See the codecs module's
documentation for more information.
"""
if body_enc == SHORTEST:
raise ValueError('SHORTEST not allowed for body_enc')
CHARSETS[charset] = (
header_enc, body_enc, output_charset)
def add_alias(alias, canonical):
"""Add a character set alias.
alias is the alias name, e.g. latin-1
canonical is the character set's canonical name, e.g. iso-8859-1
"""
ALIASES[alias] = canonical
def add_codec(charset, codecname):
"""Add a codec that map characters in the given charset to/from Unicode.
charset is the canonical name of a character set. codecname is the name
of a Python codec, as appropriate for the second argument to the unicode()
built-in, or to the encode() method of a Unicode string.
"""
CODEC_MAP[charset] = codecname
class Charset:
"""Map character sets to their email properties.
This class provides information about the requirements imposed on email
for a specific character set. It also provides convenience routines for
converting between character sets, given the availability of the
applicable codecs. Given a character set, it will do its best to provide
information on how to use that character set in an email in an
RFC-compliant way.
Certain character sets must be encoded with quoted-printable or base64
when used in email headers or bodies. Certain character sets must be
converted outright, and are not allowed in email. Instances of this
module expose the following information about a character set:
input_charset: The initial character set specified. Common aliases
are converted to their `official' email names (e.g. latin_1
is converted to iso-8859-1). Defaults to 7-bit us-ascii.
header_encoding: If the character set must be encoded before it can be
used in an email header, this attribute will be set to
Charset.QP (for quoted-printable), Charset.BASE64 (for
base64 encoding), or Charset.SHORTEST for the shortest of
QP or BASE64 encoding. Otherwise, it will be None.
body_encoding: Same as header_encoding, but describes the encoding for the
mail message's body, which indeed may be different than the
header encoding. Charset.SHORTEST is not allowed for
body_encoding.
output_charset: Some character sets must be converted before the can be
used in email headers or bodies. If the input_charset is
one of them, this attribute will contain the name of the
charset output will be converted to. Otherwise, it will
be None.
input_codec: The name of the Python codec used to convert the
input_charset to Unicode. If no conversion codec is
necessary, this attribute will be None.
output_codec: The name of the Python codec used to convert Unicode
to the output_charset. If no conversion codec is necessary,
this attribute will have the same value as the input_codec.
"""
def __init__(self, input_charset=DEFAULT_CHARSET):
try:
if isinstance(input_charset, unicode):
input_charset.encode('ascii')
else:
input_charset = unicode(input_charset, 'ascii')
except UnicodeError:
raise errors.CharsetError(input_charset)
input_charset = input_charset.lower().encode('ascii')
if not (input_charset in ALIASES or input_charset in CHARSETS):
try:
input_charset = codecs.lookup(input_charset).name
except LookupError:
pass
self.input_charset = ALIASES.get(input_charset, input_charset)
henc, benc, conv = CHARSETS.get(self.input_charset, (
SHORTEST, BASE64, None))
if not conv:
conv = self.input_charset
self.header_encoding = henc
self.body_encoding = benc
self.output_charset = ALIASES.get(conv, conv)
self.input_codec = CODEC_MAP.get(self.input_charset, self.input_charset)
self.output_codec = CODEC_MAP.get(self.output_charset, self.output_charset)
return
def __str__(self):
return self.input_charset.lower()
__repr__ = __str__
def __eq__(self, other):
return str(self) == str(other).lower()
def __ne__(self, other):
return not self.__eq__(other)
def get_body_encoding(self):
"""Return the content-transfer-encoding used for body encoding.
This is either the string `quoted-printable' or `base64' depending on
the encoding used, or it is a function in which case you should call
the function with a single argument, the Message object being
encoded. The function should then set the Content-Transfer-Encoding
header itself to whatever is appropriate.
Returns "quoted-printable" if self.body_encoding is QP.
Returns "base64" if self.body_encoding is BASE64.
Returns "7bit" otherwise.
"""
if self.body_encoding == QP:
return 'quoted-printable'
else:
if self.body_encoding == BASE64:
return 'base64'
return encode_7or8bit
def convert(self, s):
"""Convert a string from the input_codec to the output_codec."""
if self.input_codec != self.output_codec:
return unicode(s, self.input_codec).encode(self.output_codec)
else:
return s
def to_splittable(self, s):
"""Convert a possibly multibyte string to a safely splittable format.
Uses the input_codec to try and convert the string to Unicode, so it
can be safely split on character boundaries (even for multibyte
characters).
Returns the string as-is if it isn't known how to convert it to
Unicode with the input_charset.
Characters that could not be converted to Unicode will be replaced
with the Unicode replacement character U+FFFD.
"""
if isinstance(s, unicode) or self.input_codec is None:
return s
else:
try:
return unicode(s, self.input_codec, 'replace')
except LookupError:
return s
return
def from_splittable(self, ustr, to_output=True):
"""Convert a splittable string back into an encoded string.
Uses the proper codec to try and convert the string from Unicode back
into an encoded format. Return the string as-is if it is not Unicode,
or if it could not be converted from Unicode.
Characters that could not be converted from Unicode will be replaced
with an appropriate character (usually '?').
If to_output is True (the default), uses output_codec to convert to an
encoded format. If to_output is False, uses input_codec.
"""
if to_output:
codec = self.output_codec
else:
codec = self.input_codec
if not isinstance(ustr, unicode) or codec is None:
return ustr
else:
try:
return ustr.encode(codec, 'replace')
except LookupError:
return ustr
return
def get_output_charset(self):
"""Return the output character set.
This is self.output_charset if that is not None, otherwise it is
self.input_charset.
"""
return self.output_charset or self.input_charset
def encoded_header_len(self, s):
"""Return the length of the encoded header string."""
cset = self.get_output_charset()
if self.header_encoding == BASE64:
return email.base64mime.base64_len(s) + len(cset) + MISC_LEN
else:
if self.header_encoding == QP:
return email.quoprimime.header_quopri_len(s) + len(cset) + MISC_LEN
if self.header_encoding == SHORTEST:
lenb64 = email.base64mime.base64_len(s)
lenqp = email.quoprimime.header_quopri_len(s)
return min(lenb64, lenqp) + len(cset) + MISC_LEN
return len(s)
def header_encode(self, s, convert=False):
"""Header-encode a string, optionally converting it to output_charset.
If convert is True, the string will be converted from the input
charset to the output charset automatically. This is not useful for
multibyte character sets, which have line length issues (multibyte
characters must be split on a character, not a byte boundary); use the
high-level Header class to deal with these issues. convert defaults
to False.
The type of encoding (base64 or quoted-printable) will be based on
self.header_encoding.
"""
cset = self.get_output_charset()
if convert:
s = self.convert(s)
if self.header_encoding == BASE64:
return email.base64mime.header_encode(s, cset)
else:
if self.header_encoding == QP:
return email.quoprimime.header_encode(s, cset, maxlinelen=None)
if self.header_encoding == SHORTEST:
lenb64 = email.base64mime.base64_len(s)
lenqp = email.quoprimime.header_quopri_len(s)
if lenb64 < lenqp:
return email.base64mime.header_encode(s, cset)
else:
return email.quoprimime.header_encode(s, cset, maxlinelen=None)
else:
return s
return None
def body_encode(self, s, convert=True):
"""Body-encode a string and convert it to output_charset.
If convert is True (the default), the string will be converted from
the input charset to output charset automatically. Unlike
header_encode(), there are no issues with byte boundaries and
multibyte charsets in email bodies, so this is usually pretty safe.
<|fim▁hole|> self.body_encoding.
"""
if convert:
s = self.convert(s)
if self.body_encoding is BASE64:
return email.base64mime.body_encode(s)
else:
if self.body_encoding is QP:
return email.quoprimime.body_encode(s)
return s<|fim▁end|>
|
The type of encoding (base64 or quoted-printable) will be based on
|
<|file_name|>1dlbp_tests.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pylab as plt
from numba import cuda, uint8, int32, uint32, jit
from timeit import default_timer as timer
@cuda.jit('void(uint8[:], int32, int32[:], int32[:])')<|fim▁hole|> i += neighborhood
for j in range(i - neighborhood, i):
if input[j] >= input[i]:
r += powers[j - i + neighborhood]
for j in range(i + 1, i + neighborhood + 1):
if input[j] >= input[i]:
r += powers[j - i + neighborhood - 1]
cuda.atomic.add(h, r, 1)
def extract_1dlbp_gpu(input, neighborhood, d_powers):
maxThread = 512
blockDim = maxThread
d_input = cuda.to_device(input)
hist = np.zeros(2 ** (2 * neighborhood), dtype='int32')
gridDim = (len(input) - 2 * neighborhood + blockDim) / blockDim
d_hist = cuda.to_device(hist)
lbp_kernel[gridDim, blockDim](d_input, neighborhood, d_powers, d_hist)
d_hist.to_host()
return hist
def extract_1dlbp_gpu_debug(input, neighborhood, powers, res):
maxThread = 512
blockDim = maxThread
gridDim = (len(input) - 2 * neighborhood + blockDim) / blockDim
for block in range(0, gridDim):
for thread in range(0, blockDim):
r = 0
i = blockDim * block + thread
if i < input.shape[0] - 2 * neighborhood:
i += neighborhood
for j in range(i - neighborhood, i):
if input[j] >= input[i]:
r += powers[j - i + neighborhood]
for j in range(i + 1, i + neighborhood + 1):
if input[j] >= input[i]:
r += powers[j - i + neighborhood - 1]
res[r] += 1
return res
@jit("int32[:](uint8[:], int64, int32[:], int32[:])", nopython=True)
def extract_1dlbp_cpu_jit(input, neighborhood, powers, res):
maxThread = 512
blockDim = maxThread
gridDim = (len(input) - 2 * neighborhood + blockDim) / blockDim
for block in range(0, gridDim):
for thread in range(0, blockDim):
r = 0
i = blockDim * block + thread
if i < input.shape[0] - 2 * neighborhood:
i += neighborhood
for j in range(i - neighborhood, i):
if input[j] >= input[i]:
r += powers[j - i + neighborhood]
for j in range(i + 1, i + neighborhood + 1):
if input[j] >= input[i]:
r += powers[j - i + neighborhood - 1]
res[r] += 1
return res
def extract_1dlbp_cpu(input, neighborhood, p):
"""
Extract the 1d lbp pattern on CPU
"""
res = np.zeros(1 << (2 * neighborhood))
for i in range(neighborhood, len(input) - neighborhood):
left = input[i - neighborhood : i]
right = input[i + 1 : i + neighborhood + 1]
both = np.r_[left, right]
res[np.sum(p [both >= input[i]])] += 1
return res
X = np.arange(3, 7)
X = 10 ** X
neighborhood = 4
cpu_times = np.zeros(X.shape[0])
cpu_times_simple = cpu_times.copy()
cpu_times_jit = cpu_times.copy()
gpu_times = np.zeros(X.shape[0])
p = 1 << np.array(range(0, 2 * neighborhood), dtype='int32')
d_powers = cuda.to_device(p)
for i, x in enumerate(X):
input = np.random.randint(0, 256, size = x).astype(np.uint8)
print "Length: {0}".format(x)
print "--------------"
start = timer()
h_cpu = extract_1dlbp_cpu(input, neighborhood, p)
cpu_times[i] = timer() - start
print "Finished on CPU: time: {0:3.5f}s".format(cpu_times[i])
res = np.zeros(1 << (2 * neighborhood), dtype='int32')
start = timer()
h_cpu_simple = extract_1dlbp_gpu_debug(input, neighborhood, p, res)
cpu_times_simple[i] = timer() - start
print "Finished on CPU (simple): time: {0:3.5f}s".format(cpu_times_simple[i])
res = np.zeros(1 << (2 * neighborhood), dtype='int32')
start = timer()
h_cpu_jit = extract_1dlbp_cpu_jit(input, neighborhood, p, res)
cpu_times_jit[i] = timer() - start
print "Finished on CPU (numba: jit): time: {0:3.5f}s".format(cpu_times_jit[i])
start = timer()
h_gpu = extract_1dlbp_gpu(input, neighborhood, d_powers)
gpu_times[i] = timer() - start
print "Finished on GPU: time: {0:3.5f}s".format(gpu_times[i])
print "All h_cpu == h_gpu: ", (h_cpu_jit == h_gpu).all() and (h_cpu_simple == h_cpu_jit).all() and (h_cpu == h_cpu_jit).all()
print ''
f = plt.figure(figsize=(10, 5))
plt.plot(X, cpu_times, label = "CPU")
plt.plot(X, cpu_times_simple, label = "CPU non-vectorized")
plt.plot(X, cpu_times_jit, label = "CPU jit")
plt.plot(X, gpu_times, label = "GPU")
plt.yscale('log')
plt.xscale('log')
plt.xlabel('input length')
plt.ylabel('time, sec')
plt.legend()
plt.show()<|fim▁end|>
|
def lbp_kernel(input, neighborhood, powers, h):
i = cuda.grid(1)
r = 0
if i < input.shape[0] - 2 * neighborhood:
|
<|file_name|>reception_data.py<|end_file_name|><|fim▁begin|>import glob
import operational_instruments
from astropy.io import fits
from numpy.fft import fft2, ifft2
import sewpy
from astropy import wcs
from astropy.table import Table
from astropy.io import ascii
from astropy.time import Time
import pytz
import numpy as np
import os
import time
import log_utilities
import datetime
import rome_telescopes_dict
import rome_filters_dict
import shutil
import api_tools
import socket
import config_parser
import pwd
# Django modules had to be removed to make API compatible and run outside
# the docker container. Timezone applied at the API endpoint.
#from django.utils import timezone
class QuantityLimits(object):
def __init__(self):
self.sky_background_median_limit = 10000.0
self.sky_background_std_limit = 200
self.sky_background_minimum = 100
self.sky_background_maximum = 5000
self.minimum_moon_sep = 10
self.minimum_number_of_stars = {'gp': 1000, 'rp': 2000, 'ip' : 4000}
self.maximum_ellipticity = 0.4
self.maximum_seeing = 2.0
class Image(object):
def __init__(self, image_directory, image_output_origin_directory,
image_name, logger ):
self.image_directory = image_directory
self.image_name = image_name
self.origin_directory = image_output_origin_directory
self.logger = logger
self.banzai_bpm = None
self.banzai_catalog = None
try:
images = fits.open(os.path.join(self.image_directory,self.image_name))
for image in images:
try :
if image.header['EXTNAME'] == 'BPM':
self.banzai_bpm = image
logger.info('Loaded the bad pixel mask')
if image.header['EXTNAME'] == 'SCI':
science_image = image
self.data = science_image.data
self.header = science_image.header
self.oldheader = science_image.header.copy()
logger.info('Loaded the science data')
if image.header['EXTNAME'] == 'CAT':
self.banzai_catalog = image
logger.info('Loaded the BANZAI catalogue')
except :
pass
except:
logger.error('I cannot load the image!')
# self.data = science_image.data
# self.header = science_image.header
# self.oldheader = science_image.header.copy()
self.camera = None
self.sky_level = None
self.sky_level_std = None
self.sky_minimum_level = None
self.sky_maximum_level = None
self.number_of_stars = None
self.ellipticity = None
self.seeing = None
self.quality_flags = []
self.thumbnail_box_size = 60
self.field_name = None
self.x_shift = 0
self.y_shift = 0
self.header_date_obs = '1986-04-04T00:00:00.00' #dummy value
self.header_telescope_site = None
self.header_dome_id = None
self.header_group_id = ''
self.header_track_id = ''
self.header_request_id = ''
self.header_object_name = None
self.header_moon_distance = None
self.header_moon_status = False
self.header_moon_fraction = None
self.header_airmass = None
self.header_seeing = None
self.header_ccd_temp = None
self.header_ellipticity = None
self.header_sky_level = None
self.header_sky_temperature = None
self.header_sky_measured_mag = None
self.find_camera()
self.find_object_and_field_name()
self.quantity_limits = QuantityLimits()
def process_the_image(self):
#self.extract_header_statistics()
#self.find_wcs_template()
#self.generate_sextractor_catalog()
#self.
#self.update_image_wcs()
#self.move_frame()
pass
def update_image_wcs(self):
try:
hdutemplate = fits.open(os.path.join(self.template_directory,self.template_name))
templateheader=hdutemplate[0].header
hdutemplate.close()
imageheader=self.header
#STORE OLD FITSHEADER AND ADJUST BASED ON TEMPLATE
imageheader['DPXCORR'] = self.x_shift
imageheader['DPYCORR'] = self.y_shift
imageheader['WCSRFCAT'] = templateheader['WCSRFCAT']
imageheader['RA'] = templateheader['RA']
imageheader['DEC'] = templateheader['DEC']
imageheader['CRPIX1'] = templateheader['CRPIX1']
imageheader['CRPIX2'] = templateheader['CRPIX2']
imageheader['CRVAL1'] = templateheader['CRVAL1']
imageheader['CRVAL2'] = templateheader['CRVAL2']
imageheader['CD1_1'] = templateheader['CD1_1']
imageheader['CD1_2'] = templateheader['CD1_2']
imageheader['CD2_1'] = templateheader['CD2_1']
imageheader['CD2_2'] = templateheader['CD2_2']
imageheader['CRPIX1'] = self.x_new_center
imageheader['CRPIX2'] = self.y_new_center
imageheader['CDELT1'] = templateheader['CDELT1']
imageheader['CDELT2'] = templateheader['CDELT2']
imageheader['CROTA1'] = templateheader['CROTA1']
imageheader['CROTA2'] = templateheader['CROTA2']
imageheader['SECPIX1'] = templateheader['SECPIX1']
imageheader['SECPIX2'] = templateheader['SECPIX2']
imageheader['WCSSEP'] = templateheader['WCSSEP']
self.logger.info('WCS header successfully updated')
except:
self.logger.error('WCS header successfully updated')
def find_wcs_template(self):
field_name = self.field_name.replace('ROME-','')
template_name = 'WCS_template_' + field_name + '.fits'
thumbnail_name = 'WCS_template_' + field_name + '.thumbnail'
origin_directory = self.origin_directory
template_directory = origin_directory + 'wcs_templates/'
self.template_name = template_name
self.template_directory = template_directory
try:
coord=np.loadtxt(os.path.join(self.template_directory,thumnail_name))
self.x_center_thumbnail_world=coord[0]
self.y_center_thumbnail_world=coord[1]
except:
self.x_center_thumbnail_world=self.header['CRVAL1']
self.y_center_thumbnail_world=self.header['CRVAL2']
self.logger.info('Extracted WCS information')
def find_camera(self):
try:
self.camera_name = self.image_name[9:13]
self.camera = operational_instruments.define_instrument(self.camera_name)
self.filter = self.header[self.camera.header_dictionnary['filter']]
self.logger.info('Successfully identified the associated camera, '+str(self.camera_name))
except:
self.logger.error('I do not recognise camera '+str(self.camera_name))
def find_object_and_field_name(self):
try:
self.object_name = self.header[self.camera.header_dictionnary['object']]
self.field_name = self.object_name
self.logger.info('Object name is : '+self.object_name)
self.logger.info('And so the assiocated field : '+self.field_name)
except:
self.logger.error('I cannot recognize the object name or/and field name!')
def determine_the_output_directory(self):
try:
origin_directory = self.origin_directory
if len(self.quality_flags) == 0:
quality_directory = 'good/'
else:
quality_directory = 'bad/'
if 'ROME' in self.header_group_id:
mode_directory = 'rome/'
else:
mode_directory = 'rea/'
site_directory = self.header_telescope_site +'/'
the_filter = self.camera.filter_convention[self.filter]
filter_directory = the_filter +'/'
camera_directory = self.camera.name +'/'
field_directory = self.field_name +'/'
output_directory = os.path.join(origin_directory,quality_directory,
mode_directory,site_directory,
camera_directory, filter_directory,
field_directory)
self.output_directory = output_directory
self.catalog_directory = origin_directory.replace('images','catalog0')
if os.path.isdir(self.output_directory) == False:
os.makedirs(self.output_directory)
if os.path.isdir(self.catalog_directory) == False:
os.makedirs(self.catalog_directory)
self.logger.info('Successfully built the output directory : '+self.output_directory)
self.logger.info('Successfully built the catalog directory : '+self.catalog_directory)
except:
self.logger.error('I can not construct the output directory!')
def find_or_construct_the_output_directory(self):
try :
flag = os.path.isdir(self.output_directory)
if flag == True:
self.logger.info('Successfully found the output directory : '+self.output_directory)
else :
os.makedirs(self.output_directory)
self.logger.info('Successfully mkdir the output directory : '+self.output_directory)
except:
self.logger.error('I cannot find or mkdir the output directory!')
def find_WCS_offset(self):
try:
self.x_new_center,self.y_new_center,self.x_shift,self.y_shift = xycorr(os.path.join(self.template_directory,self.template_name), self.data, 0.4)
self.update_image_wcs()
self.x_shift = int(self.x_shift)
self.y_shift = int(self.y_shift)
self.logger.info('Successfully found the WCS correction')
except:
self.x_shift = 0
self.y_shift = 0
self.logger.error('I failed to find a WCS correction')
def generate_sextractor_catalog(self, config):
"""
extracting a catalog from a WCS-recalibrated (!) image
calling it through logging to obtain an astropy
compliant output with logging...
"""
try:
extractor_parameters=['X_IMAGE','Y_IMAGE','BACKGROUND',
'ELLIPTICITY','FWHM_WORLD','X_WORLD',
'Y_WORLD','MAG_APER','MAGERR_APER']
extractor_config={'DETECT_THRESH':2.5,
'ANALYSIS_THRESH':2.5,
'FILTER':'Y',
'DEBLEND_NTHRESH':32,
'DEBLEND_MINCOUNT':0.005,
'CLEAN':'Y',
'CLEAN_PARAM':1.0,
'PIXEL_SCALE':self.camera.pix_scale,
'SATUR_LEVEL':self.camera.ADU_high,
'PHOT_APERTURES':10,
'DETECT_MINAREA':7,
'GAIN':self.camera.gain,
'SEEING_FWHM':self.header_seeing,
'BACK_FILTERSIZE':3}
sew = sewpy.SEW(workdir=os.path.join(self.image_directory,'sewpy'),
sexpath=config['sextractor_path'],
params=extractor_parameters,
config=extractor_config)
sewoutput = sew(os.path.join(self.image_directory,self.image_name))
#APPEND JD, ATTEMPTING TO CALIBRATE MAGNITUDES..
catalog=sewoutput['table']
tobs=Time([self.header['DATE-OBS']],format='isot',scale='utc')
calibration_pars={'gp':[1.0281267,29.315002],'ip':[1.0198562,28.13711],'rp':[1.020762,28.854443]}
if self.filter!=None:
calmag=catalog['MAG_APER']*calibration_pars[self.filter][0]+calibration_pars[self.filter][1]
calmag[np.where(catalog['MAG_APER']==99.)]=99.
catalog['MAG_APER_CAL']=calmag
catalog['FILTER']=[self.filter]*len(calmag)
catalog['JD']=np.ones(len(catalog))*tobs.jd
#APPEND JD AND CALIBRATED MAGNITUDES...
#ROUGH CALIBRATION TO VPHAS+
#gmag=instmag*1.0281267+29.315002
#imag=instmag*1.0198562+28.13711
#rmag=instmag*1.020762+28.854443
self.compute_stats_from_catalog(catalog)
self.catalog = catalog
#ascii.write(catalog,os.path.join('./',catname))
self.logger.info('Sextractor catalog successfully produced')
except:
self.logger.error('I cannot produce the Sextractor catalog!')
def create_image_control_region(self):
w = wcs.WCS(self.header)
py,px = w.wcs_world2pix(self.x_center_thumbnail_world,
self.y_center_thumbnail_world,1)
py = int(py)
px = int(px)
try:
self.thumbnail=self.data[px-self.thumbnail_box_size/2:px+self.thumbnail_box_size/2,py-self.thumbnail_box_size/2:py+self.thumbnail_box_size/2]
self.logger.info('Thumbnail successfully produce around the good position')
except:
self.thumbnail=np.zeros((self.thumbnail_box_size,self.thumbnail_box_size))
self.logger.info('Thumbnail successfully produce around the center of the image')
def compute_stats_from_catalog(self,catalog):
try:
self.sky_level=np.median(catalog['BACKGROUND'])
self.sky_level_std=np.std(catalog['BACKGROUND'])
self.sky_minimum_level=np.percentile(catalog['BACKGROUND'],1)
self.sky_maximum_level=np.max(catalog['BACKGROUND'])
self.number_of_stars=len(catalog)
self.ellipticity=np.median(catalog['ELLIPTICITY'])
self.seeing=np.median(catalog['FWHM_WORLD']*3600)
self.logger.info('Image quality statistics well updated')
except:
self.logger.error('For some reason, I can not update the image quality statistics!')
def extract_header_statistics(self):
desired_quantities = [ key for key,value in self.__dict__.items() if 'header' in key]
for quantity in desired_quantities :
try:
dictionnary_key = quantity.replace('header_','')
setattr(self, quantity, self.header[self.camera.header_dictionnary[dictionnary_key]])
except:
pass
self.logger.info('Successfully obtained image header_quality statistics')
def assess_image_quality(self):
try:
self.check_background()
self.check_Moon()
self.check_Nstars()
self.check_ellipticity()
self.check_seeing()
self.logger.info('Quality flags well produced')
except:
self.logger.error('I can not assess the image quality, no quality flags produced!')
def check_background(self):
if self.sky_level:
if self.sky_level > self.quantity_limits.sky_background_median_limit:
self.quality_flags.append('High sky background')
else:
self.quality_flags.append('No sky level measured!')
if self.sky_level_std :
if self.sky_level_std > self.quantity_limits.sky_background_std_limit:
self.quality_flags.append('High sky background variations')
else:
self.quality_flags.append('No sky level variations measured!')
if self.sky_minimum_level:
if self.sky_minimum_level < self.quantity_limits.sky_background_minimum:
self.quality_flags.append('Low minimum background')
else:
self.quality_flags.append('No minimum sky level measured!')
if self.quality_flags.append('No sky level variations measured!'):
if self.sky_maximum_level > self.quantity_limits.sky_background_maximum:
self.quality_flags.append('High maximum background')
else:
self.quality_flags.append('No maximum sky level measured!')
def check_Moon(self):
if self.header_moon_distance:
if self.header_moon_distance < self.quantity_limits.minimum_moon_sep:
self.quality_flags.append('Moon too close')
else:
self.quality_flags.append('No Moon distance measured!')
def check_Nstars(self):
if self.number_of_stars:
if self.number_of_stars < self.quantity_limits.minimum_number_of_stars[self.filter]:
self.quality_flags.append('Low number of stars')
else:
self.quality_flags.append('No stars measured!')
def check_ellipticity(self):
if self.ellipticity:
if self.ellipticity > self.quantity_limits.maximum_ellipticity:
self.quality_flags.append('High ellipticity')
else:
self.quality_flags.append('No ellipticity measured!')
def check_seeing(self):
if self.seeing:
if self.seeing > self.quantity_limits.maximum_seeing:
self.quality_flags.append('Bad seeing')
else:
self.quality_flags.append('No seeing measured!')
def check_if_image_in_database(self):
return None
def ingest_the_image_in_the_database(self,config):
quality_flag = ' ; '.join(self.quality_flags)
observing_date = datetime.datetime.strptime(self.header_date_obs,'%Y-%m-%dT%H:%M:%S.%f')
observing_date = observing_date.replace(tzinfo=pytz.UTC)
observing_date = observing_date.strftime("%Y-%m-%dT%H:%M:%S")
try:
telescope = self.header_telescope_site + self.header_dome_id
telescope_name = rome_telescopes_dict.telescope_dict[telescope]
except:
telescope_name = ''
try:
camera_filter = rome_filters_dict.filter_dict[self.filter]
except:
camera_filter = ''
try:
moon_status_dictionnary = {'UP':True,'DOWN':False}
moon_status = moon_status_dictionnary[self.header_moon_status]
except:
moon_status = False
params = {'field_name': self.field_name,
'image_name': self.image_name,
'date_obs': observing_date,
'timestamp': datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"),
'tel': telescope_name,
'inst': self.camera_name,
'filt': camera_filter,
'grp_id': self.header_group_id,
'track_id': self.header_track_id,
'req_id': self.header_request_id,
'airmass': self.header_airmass,
'avg_fwhm': self.seeing,
'avg_sky': self.sky_level,
'avg_sigsky': self.sky_level_std,
'moon_sep': self.header_moon_distance,
'moon_phase': self.header_moon_fraction,
'moon_up': moon_status,
'elongation': self.ellipticity,
'nstars': self.number_of_stars,
'ztemp': self.header_ccd_temp,
'shift_x': int(self.x_shift),
'shift_y': int(self.y_shift),
'quality': quality_flag}
try :
message = api_tools.check_image_in_db(config,params,
testing=config['testing'],
verbose=config['verbose'])
self.logger.info('Image known to the DB? '+str(message))
if 'true' in str(message).lower():
response = api_tools.submit_image_record(config,params,
testing=config['testing'],
verbose=config['verbose'])
if 'success' in str(response).lower():
ingest_success = True
self.logger.info('Image successfully updated in the DB')
else:
ingest_success = False
self.logger.warning('ERROR during update of image data in the DB:')
self.logger.warning(str(response))
else:
response = api_tools.submit_image_record(config,params,
testing=config['testing'],
verbose=config['verbose'])
if 'success' in str(response).lower():
ingest_success = True
self.logger.info('Image successfully ingested into the DB')
else:
ingest_success = False
self.logger.warning('ERROR while ingesting a new image into the DB:')
self.logger.warning(str(response))
except:
ingest_success = False
self.logger.warning('Image NOT ingested or updated in the DB, something went really wrong!')
return ingest_success
def class_the_image_in_the_directory(self):
#import pdb; pdb.set_trace()
try :
new_hdul = fits.HDUList()
calibrated_image = fits.ImageHDU(self.data, header=self.header, name='calibrated')
thumbnail_image = fits.ImageHDU(self.thumbnail, header=self.header, name='thumbnail')
original_header = fits.PrimaryHDU(header=self.oldheader)
new_hdul.append(calibrated_image)
new_hdul.append(thumbnail_image)
new_hdul.append(original_header)
if self.banzai_catalog:
new_hdul.append(self.banzai_catalog)
if self.banzai_bpm:
new_hdul.append(self.banzai_bpm)
new_hdul.writeto(self.output_directory+self.image_name, overwrite=True)
self.logger.info('Image '+self.image_name+' successfully place in the directory '+self.output_directory)
sorting_success = True
except :
self.logger.error('Something goes wrong when move the image to the directory!')
sorting_success = False
return sorting_success
def class_the_catalog_in_the_directory(self):
try:
catname=self.image_name.replace('.fits','.cat')
ascii.write(self.catalog,os.path.join(self.catalog_directory,catname),overwrite=True)
self.logger.info('Catalog successfully moved to the catalog directory')
except:
self.logger.error('The catalog cannot be written to the good directory!')
def find_frames_to_process(new_frames_directory, logger):
IncomingList = [i for i in os.listdir(new_frames_directory) if ('.fits' in i) and ('.fz' not in i)]
if len(IncomingList) == 0 :
return
else :
logger.info('I found '+str(len(IncomingList))+' frames to process')
return IncomingList
def read_config():
"""Function to read the XML configuration file for Obs_Control"""
host_name = socket.gethostname()
userid = pwd.getpwuid( os.getuid() ).pw_name
if 'rachel' in str(host_name).lower():
config_file_path = os.path.join('/Users/rstreet/.robonet_site/reception_config.xml')
elif 'einstein' in str(host_name).lower() and userid == 'robouser':
config_file_path = '/data/romerea/configs/reception_config.xml'
else:
config_file_path = os.path.join('/home/',userid,'.robonet_site','reception_config.xml')
if os.path.isfile(config_file_path) == False:
raise IOError('Cannot find configuration file, looking for:'+config_file_path)
config = config_parser.read_config(config_file_path)
for key, value in config.items():
if str(value).lower() == 'true':
config[key] = True
elif str(value).lower() == 'false':
config[key] = False
return config
def process_new_images():
"""Main driver routine for reception_data
Designed to process all incoming images and extract information from the
image header as well as quick photometry of the image, for later use
in quality assessment.
"""
config = read_config()
logger = log_utilities. start_day_log( config, 'reception', console=False )
logger.info("Testing mode: "+repr(config['testing']))
logger.info("Verbosity mode: "+repr(config['verbose']))
NewFrames = find_frames_to_process(config['new_frames_directory'], logger)
if os.path.isdir(os.path.join(config['new_frames_directory'],'Processed')) == False:
os.makedirs(os.path.join(config['new_frames_directory'],'Processed'))
if NewFrames :
for newframe in NewFrames :
start = time.time()
newframe = newframe.replace(config['new_frames_directory'], '')
logger.info('')
logger.info('Working on frame: '+newframe)
image = Image(config['new_frames_directory'],
config['image_output_origin_directory'],
newframe, logger)
image.extract_header_statistics()
image.find_wcs_template()
image.create_image_control_region()
image.find_WCS_offset()
image.generate_sextractor_catalog(config)
image.assess_image_quality()
image.determine_the_output_directory()
image.find_or_construct_the_output_directory()
success = image.ingest_the_image_in_the_database(config)
if success == True:
image.class_the_catalog_in_the_directory()
sorting_success = image.class_the_image_in_the_directory()
if sorting_success == True:
src = os.path.join(config['new_frames_directory'],newframe)
dest = os.path.join(config['new_frames_directory'],'Processed',newframe)
shutil.move(src,dest)
logger.info('Successfully moved the frame in the Processed directory!')
else:
logger.info('NOT successfully moved the frame in the Processed directory!')
pass
if success == False:
logger.warning('The image cannot be update/ingest in the DB, aborting this frame! ')
log_utilities.end_day_log(logger)
else :
logger.info('')
logger.info('No frames to treat, halting!')
log_utilities.end_day_log(logger)
def convolve(image, psf, ft_psf=None, ft_image=None, no_ft=None, correlate=None, auto_correlation=None):
"""
NAME:
CONVOLVE
PURPOSE:
Convolution of an image with a Point Spread Function (PSF)
EXPLANATION:
The default is to compute the convolution using a product of
Fourier transforms (for speed).
CALLING SEQUENCE:
imconv = convolve( image1, psf, FT_PSF = psf_FT )
or:
correl = convolve( image1, image2, /CORREL )
or:
correl = convolve( image, /AUTO )
INPUTS:
image = 2-D np.array (matrix) to be convolved with psf
psf = the Point Spread Function, (size < or = to size of image).
OPTIONAL INPUT KEYWORDS:
FT_PSF = passes out/in the Fourier transform of the PSF,
(so that it can be re-used the next time function is called).
FT_IMAGE = passes out/in the Fourier transform of image.
/CORRELATE uses the np.conjugate of the Fourier transform of PSF,
to compute the cross-correlation of image and PSF,
(equivalent to IDL function convol() with NO rotation of PSF)
/AUTO_CORR computes the auto-correlation function of image using FFT.
/NO_FT overrides the use of FFT, using IDL function convol() instead.
(then PSF is rotated by 180 degrees to give same result)
METHOD:
When using FFT, PSF is centered & expanded to size of image.
HISTORY:
written, Frank Varosi, NASA/GSFC 1992.
Appropriate precision type for result depending on input image
Markus Hundertmark February 2006
Fix the bug causing the recomputation of FFT(psf) and/or FFT(image)
Sergey Koposov December 2006
"""
n_params = 2
psf_ft = ft_psf
imft = ft_image
noft = no_ft
auto = auto_correlation
sp = np.array(np.shape(psf_ft))
sif = np.array(np.shape(imft))
sim = np.array(np.shape(image))
sc = sim / 2
npix = np.array(image, copy=0).size
if image.ndim!=2 or noft!=None:
if (auto is not None):
message("auto-correlation only for images with FFT", inf=True)
return image
else:
if (correlate is not None):
return convol(image, psf)
else:
return convol(image, rotate(psf, 2))
if imft==None or (imft.ndim!=2) or imft.shape!=im.shape: #add the type check
imft = ifft2(image)
if (auto is not None):
return np.roll(np.roll(npix * np.real(fft2(imft * np.conjugate(imft))), sc[0], 0),sc[1],1)
if (ft_psf==None or ft_psf.ndim!=2 or ft_psf.shape!=image.shape or
ft_psf.dtype!=image.dtype):
sp = np.array(np.shape(psf))
loc = np.maximum((sc - sp / 2), 0) #center PSF in new np.array,
s = np.maximum((sp / 2 - sc), 0) #handle all cases: smaller or bigger
l = np.minimum((s + sim - 1), (sp - 1))
psf_ft = np.conjugate(image) * 0 #initialise with correct size+type according
#to logic of conj and set values to 0 (type of ft_psf is conserved)
psf_ft[loc[1]:loc[1]+l[1]-s[1]+1,loc[0]:loc[0]+l[0]-s[0]+1] = \
psf[s[1]:(l[1])+1,s[0]:(l[0])+1]
psf_ft = ifft2(psf_ft)
if (correlate is not None):
conv = npix * np.real(fft2(imft * np.conjugate(psf_ft)))
else:
conv = npix * np.real(fft2(imft * psf_ft))
sc = sc + (sim % 2) #shift correction for odd size images.
return np.roll(np.roll(conv, sc[0],0), sc[1],1)
def correl_shift(reference, image):
"""This function calculates the revised central pixel coordinate for
imgdata based on a cross correlation with refdata
"""
xcen = np.shape(reference)[0] / 2
ycen = np.shape(reference)[1] / 2
correl = convolve(np.matrix(reference),
np.matrix(image), correlate=1)
xshift, yshift = np.unravel_index(np.argmax(correl), np.shape(correl))
half = np.shape(correl)[0] / 2
return yshift-ycen,xshift-xcen
def xycorr(pathref, image, edgefraction):
"""
For a given reference image path pathref and a given image path pathimg
the central part with an edge length of edgefraction times full edge
<|fim▁hole|> template_data = hduref[0].data
noff = np.shape(template_data)
if noff != np.shape(image):
hduref.close()
return 0, 0, 0, 0
xcen = np.shape(template_data)[0] / 2
ycen = np.shape(template_data)[1] / 2
halfx = int(edgefraction * float(noff[0]))/2
halfy = int(edgefraction * float(noff[1]))/2
reduce_template = template_data[
xcen - halfx:xcen + halfx, ycen - halfy:ycen + halfy]
reduce_image = image[
xcen - halfx:xcen + halfx, ycen - halfy:ycen + halfy]
xc, yc = correl_shift(reduce_template, reduce_image)
hduref.close()
return -xc + xcen , -yc + ycen, xc, yc
if __name__ == '__main__':
process_new_images()<|fim▁end|>
|
length is used to correlate a revised central pixel position
"""
hduref = fits.open(pathref)
|
<|file_name|>0022_auto_20161212_0008.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-12 07:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):<|fim▁hole|> ]
operations = [
migrations.RenameField(
model_name='learningword',
old_name='updated_date',
new_name='update_date',
),
]<|fim▁end|>
|
dependencies = [
('mords_api', '0021_learningword'),
|
<|file_name|>simple.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
import json
from six import string_types<|fim▁hole|>from custodia.message.common import InvalidMessage
from custodia.message.common import MessageHandler
class SimpleKey(MessageHandler):
"""Handles 'simple' messages"""
def parse(self, msg, name):
"""Parses a simple message
:param req: ignored
:param msg: the json-decoded value
:raises UnknownMessageType: if the type is not 'simple'
:raises InvalidMessage: if the message cannot be parsed or validated
"""
# On requests we imply 'simple' if there is no input message
if msg is None:
return
if not isinstance(msg, string_types):
raise InvalidMessage("The 'value' attribute is not a string")
self.name = name
self.payload = msg
def reply(self, output):
if self.name.endswith('/'):
# directory listings are pass-through with simple messages
return output
return json.dumps({'type': 'simple', 'value': output},
separators=(',', ':'))<|fim▁end|>
| |
<|file_name|>fabfile.py<|end_file_name|><|fim▁begin|>from fabric.api import local
<|fim▁hole|><|fim▁end|>
|
def html():
local('hovercraft -t ./sixfeetup_hovercraft formation_flask.rst ./build/')
|
<|file_name|>launch.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
import app from './app';
app.run(process.argv);
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// main.rs
// Copyright 2016 Alexander Altman
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{collections, env, io};
extern crate konane;
use konane::{Game, Occupancy};
use konane::Position as Pos;
extern crate uuid;
use uuid::*;
#[macro_use]
extern crate error_chain;
extern crate image as piston_image;
extern crate gfx_core;
extern crate gfx_device_gl;
extern crate piston_window;
extern crate sprite;
extern crate drag_controller;
use piston_image::GenericImage;
use gfx_device_gl::{Factory as GLFactory, Resources as GLResources};
use piston_window::*;
use sprite::*;
use drag_controller::*;
#[macro_use]
extern crate clap;
extern crate rand;
use rand::{Rng, StdRng};
const TILE_SIZE: u32 = 75;
const WHITE_PIECE_DATA: &'static [u8] = include_bytes!("../resources/white_piece.png");
const BLACK_PIECE_DATA: &'static [u8] = include_bytes!("../resources/black_piece.png");
const EMPTY_PIECE_DATA: &'static [u8] = include_bytes!("../resources/empty_piece.png");<|fim▁hole|> .author(crate_authors!())
.about("The ancient polynesian game of kōnane")
.arg(clap::Arg::with_name("generate bash completions")
.short("G")
.long("gen-bash-completions")
.help("Generate a bash completion file to standard output"))
.setting(clap::AppSettings::ColoredHelp);
let matches = clap_app.clone().get_matches();
if matches.is_present("generate bash completions") {
clap_app.gen_completions_to(env::args().nth(0).expect("no executable name found"),
clap::Shell::Bash,
&mut io::stdout());
} else {
setup(matches).expect("kōnane encountered an error");
}
}
mod errors {
error_chain! {
types {
Error, ErrorKind, ChainErr, Result;
}
links {
::konane::errors::Error, ::konane::errors::ErrorKind, Game;
}
foreign_links {
::clap::Error, Clap, "clap error";
::uuid::ParseError, UUIDParse, "UUID parse error";
::std::io::Error, IO, "I/O error";
::piston_image::ImageError, PistonImage, "Piston engine image error";
::gfx_core::factory::CombinedError, GFXCombined, "GFX engine combined error";
}
errors {
PistonGlyph(inner: ::piston_window::GlyphError) {
description("Piston engine glyph error")
display("Piston engine glyph error: {:?}", inner)
}
}
}
impl From<::piston_window::GlyphError> for Error {
fn from(inner: ::piston_window::GlyphError) -> Error { ErrorKind::PistonGlyph(inner).into() }
}
}
struct GameContext<'a> {
args: clap::ArgMatches<'a>,
textures: SpriteTextures,
window: &'a mut PistonWindow,
drag_ctrl: &'a mut DragController,
scene: &'a mut Scene<Texture<GLResources>>,
sprite_map: &'a mut collections::HashMap<Pos, Uuid>,
game: &'a mut Game,
rng: &'a mut StdRng,
}
struct SpriteTextures {
white_piece: Texture<GLResources>,
black_piece: Texture<GLResources>,
empty_piece: Texture<GLResources>,
}
fn setup(matches: clap::ArgMatches) -> errors::Result<()> {
let mut window: PistonWindow =
try!(WindowSettings::new("kōnane", [TILE_SIZE * 10, TILE_SIZE * 10]).exit_on_esc(true).build());
let textures = SpriteTextures {
white_piece: try!(load_texture(WHITE_PIECE_DATA, &mut window.factory)),
black_piece: try!(load_texture(BLACK_PIECE_DATA, &mut window.factory)),
empty_piece: try!(load_texture(EMPTY_PIECE_DATA, &mut window.factory)),
};
let mut rng = try!(StdRng::new());
let cxt = GameContext {
args: matches,
textures: textures,
window: &mut window,
drag_ctrl: &mut DragController::new(),
scene: &mut Scene::new(),
sprite_map: &mut collections::HashMap::new(),
game: &mut if rng.gen() { Game::new_white() } else { Game::new_black() },
rng: &mut rng,
};
setup_scene(cxt).and_then(run)
}
fn load_texture(texture_data: &[u8], factory: &mut GLFactory) -> errors::Result<Texture<GLResources>> {
let texture_image = try!(piston_image::load_from_memory_with_format(texture_data,
piston_image::ImageFormat::PNG))
.resize(TILE_SIZE, TILE_SIZE, piston_image::Nearest);
let texture_buffer = texture_image.as_rgba8().cloned().unwrap_or_else(|| texture_image.to_rgba());
Ok(try!(Texture::from_image(factory, &texture_buffer, &TextureSettings::new())))
}
fn setup_scene(cxt: GameContext) -> errors::Result<GameContext> {
for x in 0..10u8 {
for y in 0..10u8 {
if (x + y) % 2 == 0 {
} else {
}
}
}
Ok(cxt)
}
fn run(cxt: GameContext) -> errors::Result<()> {
let mut events = cxt.window.events();
while let Some(event) = events.next(cxt.window) {
cxt.scene.event(&event);
}
Ok(())
}<|fim▁end|>
|
fn main() {
let mut clap_app = clap::App::new("kōnane")
.version(crate_version!())
|
<|file_name|>viewport_rule.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The [`@viewport`][at] at-rule and [`meta`][meta] element.
//!
//! [at]: https://drafts.csswg.org/css-device-adapt/#atviewport-rule
//! [meta]: https://drafts.csswg.org/css-device-adapt/#viewport-meta
use app_units::Au;
use context::QuirksMode;
use cssparser::CowRcStr;
use cssparser::{parse_important, AtRuleParser, DeclarationListParser, DeclarationParser, Parser};
use error_reporting::ContextualParseError;
use euclid::TypedSize2D;
use font_metrics::get_metrics_provider_for_product;
use media_queries::Device;
use parser::ParserContext;
use properties::StyleBuilder;
use rule_cache::RuleCacheConditions;
use selectors::parser::SelectorParseErrorKind;
use shared_lock::{SharedRwLockReadGuard, StylesheetGuards, ToCssWithGuard};
use std::borrow::Cow;
use std::cell::RefCell;
use std::fmt::{self, Write};
use std::iter::Enumerate;
use std::str::Chars;
use str::CssStringWriter;
use style_traits::viewport::{Orientation, UserZoom, ViewportConstraints, Zoom};
use style_traits::{CssWriter, ParseError, PinchZoomFactor, StyleParseErrorKind, ToCss};
use stylesheets::{Origin, StylesheetInDocument};
use values::computed::{Context, ToComputedValue};
use values::specified::{LengthOrPercentageOrAuto, NoCalcLength, ViewportPercentageLength};
/// Whether parsing and processing of `@viewport` rules is enabled.
#[cfg(feature = "servo")]
pub fn enabled() -> bool {
use servo_config::prefs::PREFS;
PREFS
.get("layout.viewport.enabled")
.as_boolean()
.unwrap_or(false)
}
/// Whether parsing and processing of `@viewport` rules is enabled.
#[cfg(not(feature = "servo"))]
pub fn enabled() -> bool {
false // Gecko doesn't support @viewport.
}
macro_rules! declare_viewport_descriptor {
( $( $variant_name: expr => $variant: ident($data: ident), )+ ) => {
declare_viewport_descriptor_inner!([] [ $( $variant_name => $variant($data), )+ ] 0);
};
}
macro_rules! declare_viewport_descriptor_inner {
(
[ $( $assigned_variant_name: expr =>
$assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ]
[
$next_variant_name: expr => $next_variant: ident($next_data: ident),
$( $variant_name: expr => $variant: ident($data: ident), )*
]
$next_discriminant: expr
) => {
declare_viewport_descriptor_inner! {
[
$( $assigned_variant_name => $assigned_variant($assigned_data) = $assigned_discriminant, )*
$next_variant_name => $next_variant($next_data) = $next_discriminant,
]
[ $( $variant_name => $variant($data), )* ]
$next_discriminant + 1
}
};
(
[ $( $assigned_variant_name: expr =>
$assigned_variant: ident($assigned_data: ident) = $assigned_discriminant: expr, )* ]
[ ]
$number_of_variants: expr
) => {
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
#[allow(missing_docs)]
pub enum ViewportDescriptor {
$(
$assigned_variant($assigned_data),
)+
}
const VIEWPORT_DESCRIPTOR_VARIANTS: usize = $number_of_variants;
impl ViewportDescriptor {
#[allow(missing_docs)]
pub fn discriminant_value(&self) -> usize {
match *self {
$(
ViewportDescriptor::$assigned_variant(..) => $assigned_discriminant,
)*
}
}
}
impl ToCss for ViewportDescriptor {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
$(
ViewportDescriptor::$assigned_variant(ref val) => {
dest.write_str($assigned_variant_name)?;
dest.write_str(": ")?;
val.to_css(dest)?;
},
)*
}
dest.write_str(";")
}
}
};
}
declare_viewport_descriptor! {
"min-width" => MinWidth(ViewportLength),
"max-width" => MaxWidth(ViewportLength),
"min-height" => MinHeight(ViewportLength),
"max-height" => MaxHeight(ViewportLength),
"zoom" => Zoom(Zoom),
"min-zoom" => MinZoom(Zoom),
"max-zoom" => MaxZoom(Zoom),
"user-zoom" => UserZoom(UserZoom),
"orientation" => Orientation(Orientation),
}
trait FromMeta: Sized {
fn from_meta(value: &str) -> Option<Self>;
}
/// ViewportLength is a length | percentage | auto | extend-to-zoom
/// See:
/// * http://dev.w3.org/csswg/css-device-adapt/#min-max-width-desc
/// * http://dev.w3.org/csswg/css-device-adapt/#extend-to-zoom
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
#[derive(Clone, Debug, PartialEq, ToCss)]
pub enum ViewportLength {
Specified(LengthOrPercentageOrAuto),
ExtendToZoom,
}
impl FromMeta for ViewportLength {
fn from_meta(value: &str) -> Option<ViewportLength> {
macro_rules! specified {
($value:expr) => {
ViewportLength::Specified(LengthOrPercentageOrAuto::Length($value))
};
}
Some(match value {
v if v.eq_ignore_ascii_case("device-width") => specified!(
NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vw(100.))
),
v if v.eq_ignore_ascii_case("device-height") => specified!(
NoCalcLength::ViewportPercentage(ViewportPercentageLength::Vh(100.))
),
_ => match value.parse::<f32>() {
Ok(n) if n >= 0. => specified!(NoCalcLength::from_px(n.max(1.).min(10000.))),
Ok(_) => return None,
Err(_) => specified!(NoCalcLength::from_px(1.)),
},
})
}
}
impl ViewportLength {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// we explicitly do not accept 'extend-to-zoom', since it is a UA
// internal value for <META> viewport translation
LengthOrPercentageOrAuto::parse_non_negative(context, input).map(ViewportLength::Specified)
}
}
impl FromMeta for Zoom {
fn from_meta(value: &str) -> Option<Zoom> {
Some(match value {
v if v.eq_ignore_ascii_case("yes") => Zoom::Number(1.),
v if v.eq_ignore_ascii_case("no") => Zoom::Number(0.1),
v if v.eq_ignore_ascii_case("device-width") => Zoom::Number(10.),
v if v.eq_ignore_ascii_case("device-height") => Zoom::Number(10.),
_ => match value.parse::<f32>() {
Ok(n) if n >= 0. => Zoom::Number(n.max(0.1).min(10.)),
Ok(_) => return None,
Err(_) => Zoom::Number(0.1),
},
})
}
}
impl FromMeta for UserZoom {
fn from_meta(value: &str) -> Option<UserZoom> {
Some(match value {
v if v.eq_ignore_ascii_case("yes") => UserZoom::Zoom,
v if v.eq_ignore_ascii_case("no") => UserZoom::Fixed,
v if v.eq_ignore_ascii_case("device-width") => UserZoom::Zoom,
v if v.eq_ignore_ascii_case("device-height") => UserZoom::Zoom,
_ => match value.parse::<f32>() {
Ok(n) if n >= 1. || n <= -1. => UserZoom::Zoom,
_ => UserZoom::Fixed,
},
})
}
}
struct ViewportRuleParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
#[allow(missing_docs)]
pub struct ViewportDescriptorDeclaration {
pub origin: Origin,
pub descriptor: ViewportDescriptor,
pub important: bool,
}
impl ViewportDescriptorDeclaration {
#[allow(missing_docs)]
pub fn new(
origin: Origin,
descriptor: ViewportDescriptor,
important: bool,
) -> ViewportDescriptorDeclaration {
ViewportDescriptorDeclaration {
origin: origin,
descriptor: descriptor,
important: important,
}
}
}
impl ToCss for ViewportDescriptorDeclaration {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.descriptor.to_css(dest)?;
if self.important {
dest.write_str(" !important")?;
}
dest.write_str(";")
}
}
fn parse_shorthand<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<(ViewportLength, ViewportLength), ParseError<'i>> {
let min = ViewportLength::parse(context, input)?;
match input.try(|i| ViewportLength::parse(context, i)) {
Err(_) => Ok((min.clone(), min)),
Ok(max) => Ok((min, max)),
}
}
impl<'a, 'b, 'i> AtRuleParser<'i> for ViewportRuleParser<'a, 'b> {
type PreludeNoBlock = ();
type PreludeBlock = ();
type AtRule = Vec<ViewportDescriptorDeclaration>;
type Error = StyleParseErrorKind<'i>;
}
impl<'a, 'b, 'i> DeclarationParser<'i> for ViewportRuleParser<'a, 'b> {
type Declaration = Vec<ViewportDescriptorDeclaration>;
type Error = StyleParseErrorKind<'i>;
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<Vec<ViewportDescriptorDeclaration>, ParseError<'i>> {
macro_rules! declaration {
($declaration:ident($parse:expr)) => {
declaration!($declaration(value: try!($parse(input)),
important: input.try(parse_important).is_ok()))
};
($declaration:ident(value: $value:expr, important: $important:expr)) => {
ViewportDescriptorDeclaration::new(
self.context.stylesheet_origin,
ViewportDescriptor::$declaration($value),
$important)
}
}
macro_rules! ok {
($declaration:ident($parse:expr)) => {
Ok(vec![declaration!($declaration($parse))])
};
(shorthand -> [$min:ident, $max:ident]) => {{
let shorthand = parse_shorthand(self.context, input)?;
let important = input.try(parse_important).is_ok();
Ok(vec![
declaration!($min(value: shorthand.0, important: important)),
declaration!($max(value: shorthand.1, important: important)),
])
}};
}
match_ignore_ascii_case! { &*name,
"min-width" => ok!(MinWidth(|i| ViewportLength::parse(self.context, i))),
"max-width" => ok!(MaxWidth(|i| ViewportLength::parse(self.context, i))),
"width" => ok!(shorthand -> [MinWidth, MaxWidth]),
"min-height" => ok!(MinHeight(|i| ViewportLength::parse(self.context, i))),
"max-height" => ok!(MaxHeight(|i| ViewportLength::parse(self.context, i))),
"height" => ok!(shorthand -> [MinHeight, MaxHeight]),
"zoom" => ok!(Zoom(Zoom::parse)),
"min-zoom" => ok!(MinZoom(Zoom::parse)),
"max-zoom" => ok!(MaxZoom(Zoom::parse)),
"user-zoom" => ok!(UserZoom(UserZoom::parse)),
"orientation" => ok!(Orientation(Orientation::parse)),
_ => Err(input.new_custom_error(SelectorParseErrorKind::UnexpectedIdent(name.clone()))),
}
}
}
/// A `@viewport` rule.
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
pub struct ViewportRule {
/// The declarations contained in this @viewport rule.
pub declarations: Vec<ViewportDescriptorDeclaration>,
}
/// Whitespace as defined by DEVICE-ADAPT § 9.2
// TODO: should we just use whitespace as defined by HTML5?
const WHITESPACE: &'static [char] = &['\t', '\n', '\r', ' '];
/// Separators as defined by DEVICE-ADAPT § 9.2
// need to use \x2c instead of ',' due to test-tidy
const SEPARATOR: &'static [char] = &['\x2c', ';'];
#[inline]
fn is_whitespace_separator_or_equals(c: &char) -> bool {
WHITESPACE.contains(c) || SEPARATOR.contains(c) || *c == '='
}
impl ViewportRule {
/// Parse a single @viewport rule.
///
/// TODO(emilio): This could use the `Parse` trait now.
pub fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let parser = ViewportRuleParser { context };
let mut cascade = Cascade::new();
let mut parser = DeclarationListParser::new(input, parser);
while let Some(result) = parser.next() {
match result {
Ok(declarations) => {
for declarations in declarations {
cascade.add(Cow::Owned(declarations))
}
},
Err((error, slice)) => {
let location = error.location;
let error = ContextualParseError::UnsupportedViewportDescriptorDeclaration(
slice, error,
);
context.log_css_error(location, error);
},
}
}
Ok(ViewportRule {
declarations: cascade.finish(),
})
}
}
impl ViewportRule {
#[allow(missing_docs)]
pub fn from_meta(content: &str) -> Option<ViewportRule> {
let mut declarations = vec![None; VIEWPORT_DESCRIPTOR_VARIANTS];
macro_rules! push_descriptor {
($descriptor:ident($value:expr)) => {{
let descriptor = ViewportDescriptor::$descriptor($value);
let discriminant = descriptor.discriminant_value();
declarations[discriminant] = Some(ViewportDescriptorDeclaration::new(
Origin::Author,
descriptor,
false,
));
}};
}
let mut has_width = false;
let mut has_height = false;
let mut has_zoom = false;
let mut iter = content.chars().enumerate();
macro_rules! start_of_name {
($iter:ident) => {
$iter
.by_ref()
.skip_while(|&(_, c)| is_whitespace_separator_or_equals(&c))
.next()
};
}
while let Some((start, _)) = start_of_name!(iter) {
let property = ViewportRule::parse_meta_property(content, &mut iter, start);
if let Some((name, value)) = property {
macro_rules! push {
($descriptor:ident($translate:path)) => {
if let Some(value) = $translate(value) {
push_descriptor!($descriptor(value));
}
};
}
match name {
n if n.eq_ignore_ascii_case("width") => {
if let Some(value) = ViewportLength::from_meta(value) {
push_descriptor!(MinWidth(ViewportLength::ExtendToZoom));
push_descriptor!(MaxWidth(value));
has_width = true;
}
},
n if n.eq_ignore_ascii_case("height") => {
if let Some(value) = ViewportLength::from_meta(value) {
push_descriptor!(MinHeight(ViewportLength::ExtendToZoom));
push_descriptor!(MaxHeight(value));
has_height = true;
}
},
n if n.eq_ignore_ascii_case("initial-scale") => {
if let Some(value) = Zoom::from_meta(value) {
push_descriptor!(Zoom(value));
has_zoom = true;
}
},
n if n.eq_ignore_ascii_case("minimum-scale") => push!(MinZoom(Zoom::from_meta)),
n if n.eq_ignore_ascii_case("maximum-scale") => push!(MaxZoom(Zoom::from_meta)),
n if n.eq_ignore_ascii_case("user-scalable") => {
push!(UserZoom(UserZoom::from_meta))
},
_ => {},
}
}
}
// DEVICE-ADAPT § 9.4 - The 'width' and 'height' properties
// http://dev.w3.org/csswg/css-device-adapt/#width-and-height-properties
if !has_width && has_zoom {
if has_height {
push_descriptor!(MinWidth(ViewportLength::Specified(
LengthOrPercentageOrAuto::Auto
)));
push_descriptor!(MaxWidth(ViewportLength::Specified(
LengthOrPercentageOrAuto::Auto
)));
} else {
push_descriptor!(MinWidth(ViewportLength::ExtendToZoom));
push_descriptor!(MaxWidth(ViewportLength::ExtendToZoom));
}
}
let declarations: Vec<_> = declarations.into_iter().filter_map(|entry| entry).collect();
if !declarations.is_empty() {
Some(ViewportRule {
declarations: declarations,
})
} else {
None
}
}
fn parse_meta_property<'a>(
content: &'a str,
iter: &mut Enumerate<Chars<'a>>,
start: usize,
) -> Option<(&'a str, &'a str)> {
fn end_of_token(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> {
iter.by_ref()
.skip_while(|&(_, c)| !is_whitespace_separator_or_equals(&c))
.next()
}
fn skip_whitespace(iter: &mut Enumerate<Chars>) -> Option<(usize, char)> {
iter.by_ref()
.skip_while(|&(_, c)| WHITESPACE.contains(&c))
.next()
}
// <name> <whitespace>* '='
let end = match end_of_token(iter) {
Some((end, c)) if WHITESPACE.contains(&c) => match skip_whitespace(iter) {
Some((_, c)) if c == '=' => end,
_ => return None,
},
Some((end, c)) if c == '=' => end,
_ => return None,
};
let name = &content[start..end];
// <whitespace>* <value>
let start = match skip_whitespace(iter) {
Some((start, c)) if !SEPARATOR.contains(&c) => start,
_ => return None,
};
let value = match end_of_token(iter) {
Some((end, _)) => &content[start..end],
_ => &content[start..],
};
Some((name, value))
}
}
impl ToCssWithGuard for ViewportRule {
// Serialization of ViewportRule is not specced.
fn to_css(&self, _guard: &SharedRwLockReadGuard, dest: &mut CssStringWriter) -> fmt::Result {
dest.write_str("@viewport { ")?;
let mut iter = self.declarations.iter();
iter.next().unwrap().to_css(&mut CssWriter::new(dest))?;
for declaration in iter {
dest.write_str(" ")?;
declaration.to_css(&mut CssWriter::new(dest))?;
}
dest.write_str(" }")
}
}
/// Computes the cascade precedence as according to
/// <http://dev.w3.org/csswg/css-cascade/#cascade-origin>
fn cascade_precendence(origin: Origin, important: bool) -> u8 {
match (origin, important) {
(Origin::UserAgent, true) => 1,
(Origin::User, true) => 2,
(Origin::Author, true) => 3,
(Origin::Author, false) => 4,
(Origin::User, false) => 5,
(Origin::UserAgent, false) => 6,
}
}
impl ViewportDescriptorDeclaration {
fn higher_or_equal_precendence(&self, other: &ViewportDescriptorDeclaration) -> bool {
let self_precedence = cascade_precendence(self.origin, self.important);
let other_precedence = cascade_precendence(other.origin, other.important);
self_precedence <= other_precedence
}
}
#[allow(missing_docs)]
pub struct Cascade {
declarations: Vec<Option<(usize, ViewportDescriptorDeclaration)>>,
count_so_far: usize,
}
#[allow(missing_docs)]
impl Cascade {
pub fn new() -> Self {
Cascade {
declarations: vec![None; VIEWPORT_DESCRIPTOR_VARIANTS],
count_so_far: 0,
}
}
pub fn from_stylesheets<'a, I, S>(
stylesheets: I,
guards: &StylesheetGuards,
device: &Device,
) -> Self
where
I: Iterator<Item = (&'a S, Origin)>,
S: StylesheetInDocument + 'static,
{
let mut cascade = Self::new();
for (stylesheet, origin) in stylesheets {
stylesheet.effective_viewport_rules(device, guards.for_origin(origin), |rule| {
for declaration in &rule.declarations {
cascade.add(Cow::Borrowed(declaration))
}
})
}
cascade
}
pub fn add(&mut self, declaration: Cow<ViewportDescriptorDeclaration>) {
let descriptor = declaration.descriptor.discriminant_value();
match self.declarations[descriptor] {
Some((ref mut order_of_appearance, ref mut entry_declaration)) => {
if declaration.higher_or_equal_precendence(entry_declaration) {
*entry_declaration = declaration.into_owned();
*order_of_appearance = self.count_so_far;
}
},
ref mut entry @ None => {
*entry = Some((self.count_so_far, declaration.into_owned()));
},
}
self.count_so_far += 1;
}
pub fn finish(mut self) -> Vec<ViewportDescriptorDeclaration> {
// sort the descriptors by order of appearance
self.declarations
.sort_by_key(|entry| entry.as_ref().map(|&(index, _)| index));
self.declarations
.into_iter()
.filter_map(|entry| entry.map(|(_, decl)| decl))
.collect()
}
}
/// Just a helper trait to be able to implement methods on ViewportConstraints.
pub trait MaybeNew {
/// Create a ViewportConstraints from a viewport size and a `@viewport`
/// rule.
fn maybe_new(
device: &Device,
rule: &ViewportRule,
quirks_mode: QuirksMode,
) -> Option<ViewportConstraints>;
}
impl MaybeNew for ViewportConstraints {
fn maybe_new(
device: &Device,
rule: &ViewportRule,
quirks_mode: QuirksMode,
) -> Option<ViewportConstraints> {
use std::cmp;
if rule.declarations.is_empty() {
return None;
}
let mut min_width = None;
let mut max_width = None;
let mut min_height = None;
let mut max_height = None;
let mut initial_zoom = None;
let mut min_zoom = None;
let mut max_zoom = None;
let mut user_zoom = UserZoom::Zoom;
let mut orientation = Orientation::Auto;
// collapse the list of declarations into descriptor values
for declaration in &rule.declarations {
match declaration.descriptor {
ViewportDescriptor::MinWidth(ref value) => min_width = Some(value),
ViewportDescriptor::MaxWidth(ref value) => max_width = Some(value),
ViewportDescriptor::MinHeight(ref value) => min_height = Some(value),
ViewportDescriptor::MaxHeight(ref value) => max_height = Some(value),
ViewportDescriptor::Zoom(value) => initial_zoom = value.to_f32(),
ViewportDescriptor::MinZoom(value) => min_zoom = value.to_f32(),
ViewportDescriptor::MaxZoom(value) => max_zoom = value.to_f32(),
ViewportDescriptor::UserZoom(value) => user_zoom = value,
ViewportDescriptor::Orientation(value) => orientation = value,
}
}
// TODO: return `None` if all descriptors are either absent or initial value
macro_rules! choose {
($op:ident, $opta:expr, $optb:expr) => {
match ($opta, $optb) {
(None, None) => None,
(a, None) => a,
(None, b) => b,
(Some(a), Some(b)) => Some(a.$op(b)),
}
};
}
macro_rules! min {
($opta:expr, $optb:expr) => {
choose!(min, $opta, $optb)
};
}
macro_rules! max {
($opta:expr, $optb:expr) => {
choose!(max, $opta, $optb)
};
}
// DEVICE-ADAPT § 6.2.1 Resolve min-zoom and max-zoom values
if min_zoom.is_some() && max_zoom.is_some() {
max_zoom = Some(min_zoom.unwrap().max(max_zoom.unwrap()))
}
// DEVICE-ADAPT § 6.2.2 Constrain zoom value to the [min-zoom, max-zoom] range
if initial_zoom.is_some() {
initial_zoom = max!(min_zoom, min!(max_zoom, initial_zoom));
}
// DEVICE-ADAPT § 6.2.3 Resolve non-auto lengths to pixel lengths
let initial_viewport = device.au_viewport_size();
let provider = get_metrics_provider_for_product();
let mut conditions = RuleCacheConditions::default();
let context = Context {
is_root_element: false,
// Note: DEVICE-ADAPT § 5. states that relative length values are
// resolved against initial values
builder: StyleBuilder::for_inheritance(device, None, None),
font_metrics_provider: &provider,
cached_system_font: None,
in_media_query: false,
quirks_mode: quirks_mode,
for_smil_animation: false,
for_non_inherited_property: None,
rule_cache_conditions: RefCell::new(&mut conditions),
};
// DEVICE-ADAPT § 9.3 Resolving 'extend-to-zoom'
let extend_width;
let extend_height;
if let Some(extend_zoom) = max!(initial_zoom, max_zoom) {
let scale_factor = 1. / extend_zoom;
extend_width = Some(initial_viewport.width.scale_by(scale_factor));
extend_height = Some(initial_viewport.height.scale_by(scale_factor));
} else {
extend_width = None;
extend_height = None;
}
macro_rules! to_pixel_length {
($value:ident, $dimension:ident, $extend_to:ident => $auto_extend_to:expr) => {
if let Some($value) = $value {
match *$value {
ViewportLength::Specified(ref length) => match *length {
LengthOrPercentageOrAuto::Length(ref value) => {
Some(Au::from(value.to_computed_value(&context)))
},
LengthOrPercentageOrAuto::Percentage(value) => {
Some(initial_viewport.$dimension.scale_by(value.0))
},
LengthOrPercentageOrAuto::Auto => None,
LengthOrPercentageOrAuto::Calc(ref calc) => calc
.to_computed_value(&context)
.to_used_value(Some(initial_viewport.$dimension)),
},
ViewportLength::ExtendToZoom => {
// $extend_to will be 'None' if 'extend-to-zoom' is 'auto'
match ($extend_to, $auto_extend_to) {
(None, None) => None,
(a, None) => a,
(None, b) => b,
(a, b) => cmp::max(a, b),
}
},
}
} else {
None
}
};
}
// DEVICE-ADAPT § 9.3 states that max-descriptors need to be resolved
// before min-descriptors.
// http://dev.w3.org/csswg/css-device-adapt/#resolve-extend-to-zoom
let max_width = to_pixel_length!(max_width, width, extend_width => None);
let max_height = to_pixel_length!(max_height, height, extend_height => None);
let min_width = to_pixel_length!(min_width, width, extend_width => max_width);
let min_height = to_pixel_length!(min_height, height, extend_height => max_height);<|fim▁hole|> ($min:ident, $max:ident, $initial:expr) => {
if $min.is_some() || $max.is_some() {
let max = match $max {
Some(max) => cmp::min(max, $initial),
None => $initial,
};
Some(match $min {
Some(min) => cmp::max(min, max),
None => max,
})
} else {
None
};
};
}
let width = resolve!(min_width, max_width, initial_viewport.width);
let height = resolve!(min_height, max_height, initial_viewport.height);
// DEVICE-ADAPT § 6.2.5 Resolve width value
let width = if width.is_none() && height.is_none() {
Some(initial_viewport.width)
} else {
width
};
let width = width.unwrap_or_else(|| match initial_viewport.height {
Au(0) => initial_viewport.width,
initial_height => {
let ratio = initial_viewport.width.to_f32_px() / initial_height.to_f32_px();
Au::from_f32_px(height.unwrap().to_f32_px() * ratio)
},
});
// DEVICE-ADAPT § 6.2.6 Resolve height value
let height = height.unwrap_or_else(|| match initial_viewport.width {
Au(0) => initial_viewport.height,
initial_width => {
let ratio = initial_viewport.height.to_f32_px() / initial_width.to_f32_px();
Au::from_f32_px(width.to_f32_px() * ratio)
},
});
Some(ViewportConstraints {
size: TypedSize2D::new(width.to_f32_px(), height.to_f32_px()),
// TODO: compute a zoom factor for 'auto' as suggested by DEVICE-ADAPT § 10.
initial_zoom: PinchZoomFactor::new(initial_zoom.unwrap_or(1.)),
min_zoom: min_zoom.map(PinchZoomFactor::new),
max_zoom: max_zoom.map(PinchZoomFactor::new),
user_zoom: user_zoom,
orientation: orientation,
})
}
}<|fim▁end|>
|
// DEVICE-ADAPT § 6.2.4 Resolve initial width and height from min/max descriptors
macro_rules! resolve {
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
angular
.module('personalWebsiteApp', [
'ngResource',
'ngRoute',
'ngAnimate',
'animatedBirdsDirective',
'scrollAnimDirective',
'swfTemplateDirective'
])
.config(['$routeProvider', function ($routeProvider) {
$routeProvider<|fim▁hole|> templateUrl: 'views/main.html',
controller: 'MainCtrl'
})
.when('/about', {
templateUrl: 'views/about.html'
})
.when('/resume', {
templateUrl: 'views/resume.html'
})
.when('/work', {
templateUrl: 'views/portfolio.html'
})
.when('/identity-design-case-study-rangleio', {
templateUrl: 'views/portfolio/identity-design-case-study-rangleio.html'
})
.when('/aid85', {
templateUrl: 'views/portfolio/aid85.html'
})
.otherwise({
redirectTo: '/'
});
}]);<|fim▁end|>
|
.when('/', {
|
<|file_name|>example_crbm.py<|end_file_name|><|fim▁begin|>from models.sampler import DynamicBlockGibbsSampler
from models.distribution import DynamicBernoulli
from models.optimizer import DynamicSGD
from utils.utils import prepare_frames
from scipy import io as matio<|fim▁hole|>
SIZE_BATCH = 10
EPOCHS = 100
SIZE_HIDDEN = 50
SIZE_VISIBLE = 150
# CRBM Constants
M_LAG_VISIBLE = 2
N_LAG_HIDDEN = 2
SIZE_LAG = max(M_LAG_VISIBLE, N_LAG_HIDDEN)+1
# load and prepare dataset from .mat
mat = matio.loadmat(MOCAP_SAMPLE)
dataset = mat['batchdatabinary']
# generate batches
batch_idx_list = prepare_frames(len(dataset), SIZE_LAG, SIZE_BATCH)
# load distribution
bernoulli = DynamicBernoulli(SIZE_VISIBLE, SIZE_HIDDEN, m_lag_visible=M_LAG_VISIBLE, n_lag_hidden=N_LAG_HIDDEN)
gibbs_sampler = DynamicBlockGibbsSampler(bernoulli, sampling_steps=1)
sgd = DynamicSGD(bernoulli)
for epoch in range(EPOCHS):
error = 0.0
for chunk_idx_list in batch_idx_list:
# get batch data set
data = np.zeros(shape=(SIZE_BATCH, SIZE_VISIBLE, SIZE_LAG))
for idx, (start, end) in enumerate(chunk_idx_list):
data[idx, :, :] = dataset[start:end, :].T
hidden_0_probs, hidden_0_states, \
hidden_k_probs, hidden_k_states, \
visible_k_probs, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
# compute deltas
d_weight_update, d_bias_hidden_update, \
d_bias_visible_update, d_vis_vis, d_vis_hid = sgd.optimize(data[:, :, 0], hidden_0_states, hidden_0_probs, hidden_k_probs,
hidden_k_states, visible_k_probs, visible_k_states, data[:, :, 1:])
# update model values
bernoulli.weights += d_weight_update
bernoulli.bias_hidden += d_bias_hidden_update
bernoulli.bias_visible += d_bias_visible_update
bernoulli.vis_vis_weights += d_vis_vis
bernoulli.vis_hid_weights += d_vis_hid
# compute reconstruction error
_, _, \
_, _, \
_, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
error += np.mean(np.abs(visible_k_states - data[:, :, 0]))
error = 1./len(batch_idx_list) * error;
print error<|fim▁end|>
|
from data.gwtaylor.path import *
import ipdb
import numpy as np
|
<|file_name|>test_iwords.py<|end_file_name|><|fim▁begin|>"""
Test functions in iwords.py. To run with pytest.
"""
import os
from tempfile import NamedTemporaryFile
import iwords
def test_get_freqs():
with NamedTemporaryFile() as fp:
fp.write(b'Hello world!')
fp.flush()
assert iwords.get_freqs([fp.name]) == {'hello': 0.5, 'world': 0.5}
def test_clean():
assert iwords.clean('Cómo estás') == 'como estas'
def test_run():
assert os.system('./iwords.py --help') == 0
with NamedTemporaryFile() as fp:
fp.write(b'Hello world!')
fp.flush()
assert os.system(f'./iwords.py {fp.name}') == 0<|fim▁hole|><|fim▁end|>
|
assert os.system(f'./iwords.py {fp.name} --show-score') == 0
assert os.system(f'./iwords.py {fp.name} --limit 1') == 0
assert os.system(f'./iwords.py {fp.name} --learn /usr/share/dict/words') == 0
|
<|file_name|>tuples.rs<|end_file_name|><|fim▁begin|>extern crate itertools;
use itertools::Itertools;
#[test]
fn tuples() {
let v = [1, 2, 3, 4, 5];
let mut iter = v.iter().cloned().tuples();
assert_eq!(Some((1,)), iter.next());
assert_eq!(Some((2,)), iter.next());
assert_eq!(Some((3,)), iter.next());
assert_eq!(Some((4,)), iter.next());
assert_eq!(Some((5,)), iter.next());
assert_eq!(None, iter.next());
assert_eq!(None, iter.into_buffer().next());
let mut iter = v.iter().cloned().tuples();
assert_eq!(Some((1, 2)), iter.next());
assert_eq!(Some((3, 4)), iter.next());
assert_eq!(None, iter.next());
itertools::assert_equal(vec![5], iter.into_buffer());
let mut iter = v.iter().cloned().tuples();
assert_eq!(Some((1, 2, 3)), iter.next());
assert_eq!(None, iter.next());
itertools::assert_equal(vec![4, 5], iter.into_buffer());
let mut iter = v.iter().cloned().tuples();
assert_eq!(Some((1, 2, 3, 4)), iter.next());<|fim▁hole|>}
#[test]
fn tuple_windows() {
let v = [1, 2, 3, 4, 5];
let mut iter = v.iter().cloned().tuple_windows();
assert_eq!(Some((1,)), iter.next());
assert_eq!(Some((2,)), iter.next());
assert_eq!(Some((3,)), iter.next());
let mut iter = v.iter().cloned().tuple_windows();
assert_eq!(Some((1, 2)), iter.next());
assert_eq!(Some((2, 3)), iter.next());
assert_eq!(Some((3, 4)), iter.next());
assert_eq!(Some((4, 5)), iter.next());
assert_eq!(None, iter.next());
let mut iter = v.iter().cloned().tuple_windows();
assert_eq!(Some((1, 2, 3)), iter.next());
assert_eq!(Some((2, 3, 4)), iter.next());
assert_eq!(Some((3, 4, 5)), iter.next());
assert_eq!(None, iter.next());
let mut iter = v.iter().cloned().tuple_windows();
assert_eq!(Some((1, 2, 3, 4)), iter.next());
assert_eq!(Some((2, 3, 4, 5)), iter.next());
assert_eq!(None, iter.next());
let v = [1, 2, 3];
let mut iter = v.iter().cloned().tuple_windows::<(_, _, _, _)>();
assert_eq!(None, iter.next());
}
#[test]
fn next_tuple() {
let v = [1, 2, 3, 4, 5];
let mut iter = v.iter();
assert_eq!(iter.next_tuple().map(|(&x, &y)| (x, y)), Some((1, 2)));
assert_eq!(iter.next_tuple().map(|(&x, &y)| (x, y)), Some((3, 4)));
assert_eq!(iter.next_tuple::<(_, _)>(), None);
}<|fim▁end|>
|
assert_eq!(None, iter.next());
itertools::assert_equal(vec![5], iter.into_buffer());
|
<|file_name|>test_mixed_modulestore.py<|end_file_name|><|fim▁begin|>"""
Unit tests for the Mixed Modulestore, with DDT for the various stores (Split, Draft, XML)
"""
import datetime
import itertools
import logging
import mimetypes
from collections import namedtuple
from contextlib import contextmanager
from shutil import rmtree
from tempfile import mkdtemp
from uuid import uuid4
import ddt
import pymongo
import pytest
import six
# Mixed modulestore depends on django, so we'll manually configure some django settings
# before importing the module
# TODO remove this import and the configuration -- xmodule should not depend on django!
from django.conf import settings
from mock import Mock, call, patch
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator, LibraryLocator
from pytz import UTC
from six.moves import range
from web_fragments.fragment import Fragment
from xblock.core import XBlockAside
from xblock.fields import Scope, ScopeIds, String
from xblock.runtime import DictKeyValueStore, KvsFieldData
from xblock.test.tools import TestRuntime
from openedx.core.lib.tests import attr
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES, UnsupportedRevisionError
from xmodule.modulestore.edit_info import EditInfoMixin
from xmodule.modulestore.exceptions import (
DuplicateCourseError,
ItemNotFoundError,
NoPathToItem,
ReferentialIntegrityError
)
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.search import navigation_index, path_to_location
from xmodule.modulestore.split_mongo.split import SplitMongoModuleStore
from xmodule.modulestore.store_utilities import DETACHED_XBLOCK_TYPES
from xmodule.modulestore.tests.factories import check_exact_number_of_calls, check_mongo_calls, mongo_uses_error_check
from xmodule.modulestore.tests.mongo_connection import MONGO_HOST, MONGO_PORT_NUM
from xmodule.modulestore.tests.test_asides import AsideTestType
from xmodule.modulestore.tests.utils import (
LocationMixin,
MongoContentstoreBuilder,
create_modulestore_instance,
mock_tab_from_json
)
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.xml_importer import import_course_from_xml
from xmodule.tests import DATA_DIR, CourseComparisonTest
from xmodule.x_module import XModuleMixin
if not settings.configured:
settings.configure()
log = logging.getLogger(__name__)
class CommonMixedModuleStoreSetup(CourseComparisonTest):
"""
Quasi-superclass which tests Location based apps against both split and mongo dbs (Locator and
Location-based dbs)
"""
HOST = MONGO_HOST
PORT = MONGO_PORT_NUM
DB = 'test_mongo_%s' % uuid4().hex[:5]
COLLECTION = 'modulestore'
ASSET_COLLECTION = 'assetstore'
FS_ROOT = DATA_DIR
DEFAULT_CLASS = 'xmodule.raw_module.RawDescriptor'
RENDER_TEMPLATE = lambda t_n, d, ctx=None, nsp='main': ''
MONGO_COURSEID = 'MITx/999/2013_Spring'
modulestore_options = {
'default_class': DEFAULT_CLASS,
'fs_root': DATA_DIR,
'render_template': RENDER_TEMPLATE,
'xblock_mixins': (EditInfoMixin, InheritanceMixin, LocationMixin, XModuleMixin),
}
DOC_STORE_CONFIG = {
'host': HOST,
'port': PORT,
'db': DB,
'collection': COLLECTION,
'asset_collection': ASSET_COLLECTION,
}
OPTIONS = {
'stores': [
{
'NAME': ModuleStoreEnum.Type.mongo,
'ENGINE': 'xmodule.modulestore.mongo.draft.DraftModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
{
'NAME': ModuleStoreEnum.Type.split,
'ENGINE': 'xmodule.modulestore.split_mongo.split_draft.DraftVersioningModuleStore',
'DOC_STORE_CONFIG': DOC_STORE_CONFIG,
'OPTIONS': modulestore_options
},
],
'xblock_mixins': modulestore_options['xblock_mixins'],
}
def _compare_ignore_version(self, loc1, loc2, msg=None):
"""
AssertEqual replacement for CourseLocator
"""
if loc1.for_branch(None) != loc2.for_branch(None):
self.fail(self._formatMessage(msg, u"{} != {}".format(six.text_type(loc1), six.text_type(loc2))))
def setUp(self):
"""
Set up the database for testing
"""
super(CommonMixedModuleStoreSetup, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.exclude_field(None, 'wiki_slug')
self.exclude_field(None, 'xml_attributes')
self.exclude_field(None, 'parent')
self.ignore_asset_key('_id')
self.ignore_asset_key('uploadDate')
self.ignore_asset_key('content_son')
self.ignore_asset_key('thumbnail_location')
self.options = getattr(self, 'options', self.OPTIONS)
self.connection = pymongo.MongoClient(
host=self.HOST,
port=self.PORT,
tz_aware=True,
)
self.connection.drop_database(self.DB)
self.addCleanup(self.connection.drop_database, self.DB)
self.addCleanup(self.connection.close)
self.addTypeEqualityFunc(BlockUsageLocator, '_compare_ignore_version')
self.addTypeEqualityFunc(CourseLocator, '_compare_ignore_version')
# define attrs which get set in initdb to quell pylint
self.writable_chapter_location = self.store = self.fake_location = None
self.course_locations = {}
self.user_id = ModuleStoreEnum.UserID.test
def _create_course(self, course_key, asides=None):
"""
Create a course w/ one item in the persistence store using the given course & item location.
"""
# create course
with self.store.bulk_operations(course_key):
self.course = self.store.create_course(course_key.org, course_key.course, course_key.run, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
if isinstance(self.course.id, CourseLocator):
self.course_locations[self.MONGO_COURSEID] = self.course.location
else:
assert self.course.id == course_key
# create chapter
chapter = self.store.create_child(self.user_id, self.course.location, 'chapter',
block_id='Overview', asides=asides)
self.writable_chapter_location = chapter.location
def _create_block_hierarchy(self):
"""
Creates a hierarchy of blocks for testing
Each block's (version_agnostic) location is assigned as a field of the class and can be easily accessed
"""
BlockInfo = namedtuple('BlockInfo', 'field_name, category, display_name, sub_tree')
trees = [
BlockInfo(
'chapter_x', 'chapter', 'Chapter_x', [
BlockInfo(
'sequential_x1', 'sequential', 'Sequential_x1', [
BlockInfo(
'vertical_x1a', 'vertical', 'Vertical_x1a', [
BlockInfo('problem_x1a_1', 'problem', 'Problem_x1a_1', []),
BlockInfo('problem_x1a_2', 'problem', 'Problem_x1a_2', []),
BlockInfo('problem_x1a_3', 'problem', 'Problem_x1a_3', []),
BlockInfo('html_x1a_1', 'html', 'HTML_x1a_1', []),
]
),
BlockInfo(
'vertical_x1b', 'vertical', 'Vertical_x1b', []
)
]
),
BlockInfo(
'sequential_x2', 'sequential', 'Sequential_x2', []
)
]
),
BlockInfo(
'chapter_y', 'chapter', 'Chapter_y', [
BlockInfo(
'sequential_y1', 'sequential', 'Sequential_y1', [
BlockInfo(
'vertical_y1a', 'vertical', 'Vertical_y1a', [
BlockInfo('problem_y1a_1', 'problem', 'Problem_y1a_1', []),
BlockInfo('problem_y1a_2', 'problem', 'Problem_y1a_2', []),
BlockInfo('problem_y1a_3', 'problem', 'Problem_y1a_3', []),
]
)
]
)
]
)
]
def create_sub_tree(parent, block_info):
"""
recursive function that creates the given block and its descendants
"""
block = self.store.create_child(
self.user_id, parent.location,
block_info.category, block_id=block_info.display_name,
fields={'display_name': block_info.display_name},
)
for tree in block_info.sub_tree:
create_sub_tree(block, tree)
setattr(self, block_info.field_name, block.location)
with self.store.bulk_operations(self.course.id):
for tree in trees:
create_sub_tree(self.course, tree)
def _course_key_from_string(self, string):
"""
Get the course key for the given course string
"""
return self.course_locations[string].course_key
def _has_changes(self, location):
"""
Helper function that loads the item before calling has_changes
"""
return self.store.has_changes(self.store.get_item(location))
def _initialize_mixed(self, mappings=None, contentstore=None):
"""
initializes the mixed modulestore.
"""
mappings = mappings or {}
self.store = MixedModuleStore(
contentstore, create_modulestore_instance=create_modulestore_instance,
mappings=mappings,
**self.options
)
self.addCleanup(self.store.close_all_connections)
def initdb(self, default):
"""
Initialize the database and create one test course in it
"""
# set the default modulestore
store_configs = self.options['stores']
for index in range(len(store_configs)): # lint-amnesty, pylint: disable=consider-using-enumerate
if store_configs[index]['NAME'] == default:
if index > 0:
store_configs[index], store_configs[0] = store_configs[0], store_configs[index]
break
self._initialize_mixed()
test_course_key = CourseLocator.from_string(self.MONGO_COURSEID)
test_course_key = test_course_key.make_usage_key('course', test_course_key.run).course_key
self.fake_location = self.store.make_course_key(
test_course_key.org,
test_course_key.course,
test_course_key.run
).make_usage_key('vertical', 'fake')
self._create_course(test_course_key)
assert default == self.store.get_modulestore_type(self.course.id)
class AsideFoo(XBlockAside):
"""
Test xblock aside class
"""
FRAG_CONTENT = u"<p>Aside Foo rendered</p>"
field11 = String(default="aside1_default_value1", scope=Scope.content)
field12 = String(default="aside1_default_value2", scope=Scope.settings)
@XBlockAside.aside_for('student_view')
def student_view_aside(self, block, context): # pylint: disable=unused-argument
"""Add to the student view"""
return Fragment(self.FRAG_CONTENT)
class AsideBar(XBlockAside):
"""
Test xblock aside class
"""
FRAG_CONTENT = u"<p>Aside Bar rendered</p>"
field21 = String(default="aside2_default_value1", scope=Scope.content)
field22 = String(default="aside2_default_value2", scope=Scope.settings)
@XBlockAside.aside_for('student_view')
def student_view_aside(self, block, context): # pylint: disable=unused-argument
"""Add to the student view"""
return Fragment(self.FRAG_CONTENT)
@ddt.ddt
@attr('mongo')
class TestMixedModuleStore(CommonMixedModuleStoreSetup):
"""
Tests of the MixedModulestore interface methods.
"""
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_modulestore_type(self, default_ms):
"""
Make sure we get back the store type we expect for given mappings
"""
self.initdb(default_ms)
assert self.store.get_modulestore_type(self._course_key_from_string(self.MONGO_COURSEID)) == default_ms
# try an unknown mapping, it should be the 'default' store
assert self.store.get_modulestore_type(CourseKey.from_string('foo/bar/2012_Fall')) == default_ms
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_modulestore_cache(self, default_ms):
"""
Make sure we cache discovered course mappings
"""
self.initdb(default_ms)
# unset mappings
self.store.mappings = {}
course_key = self.course_locations[self.MONGO_COURSEID].course_key
with check_exact_number_of_calls(self.store.default_modulestore, 'has_course', 1):
assert self.store.default_modulestore == self.store._get_modulestore_for_courselike(course_key) # pylint: disable=protected-access, line-too-long
assert course_key in self.store.mappings
assert self.store.default_modulestore == self.store._get_modulestore_for_courselike(course_key) # pylint: disable=protected-access, line-too-long
@ddt.data(*itertools.product(
(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split),
(True, False)
))
@ddt.unpack
def test_duplicate_course_error(self, default_ms, reset_mixed_mappings):
"""
Make sure we get back the store type we expect for given mappings
"""
self._initialize_mixed(mappings={})
with self.store.default_store(default_ms):
self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
if reset_mixed_mappings:
self.store.mappings = {}
with pytest.raises(DuplicateCourseError):
self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_duplicate_course_error_with_different_case_ids(self, default_store):
"""
Verify that course can not be created with same course_id with different case.
"""
self._initialize_mixed(mappings={})
with self.store.default_store(default_store):
self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
with pytest.raises(DuplicateCourseError):
self.store.create_course('ORG_X', 'COURSE_Y', 'RUN_Z', self.user_id)
# Draft:
# problem: One lookup to locate an item that exists
# fake: one w/ wildcard version
# split has one lookup for the course and then one for the course items
@ddt.data((ModuleStoreEnum.Type.mongo, [1, 1], 0), (ModuleStoreEnum.Type.split, [2, 2], 0))
@ddt.unpack
def test_has_item(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
with check_mongo_calls(max_find.pop(0), max_send):
assert self.store.has_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
# try negative cases
with check_mongo_calls(max_find.pop(0), max_send):
assert not self.store.has_item(self.fake_location)
# verify that an error is raised when the revision is not valid
with pytest.raises(UnsupportedRevisionError):
self.store.has_item(self.fake_location, revision=ModuleStoreEnum.RevisionOption.draft_preferred)
# draft queries:
# problem: find draft item, find all items pertinent to inheritance computation, find parent
# non-existent problem: find draft, find published
# split:
# problem: active_versions, structure
# non-existent problem: ditto
@ddt.data((ModuleStoreEnum.Type.mongo, [3, 2], 0), (ModuleStoreEnum.Type.split, [2, 2], 0))
@ddt.unpack
def test_get_item(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
with check_mongo_calls(max_find.pop(0), max_send):
assert self.store.get_item(self.problem_x1a_1) is not None # lint-amnesty, pylint: disable=no-member
# try negative cases
with check_mongo_calls(max_find.pop(0), max_send):
with pytest.raises(ItemNotFoundError):
self.store.get_item(self.fake_location)
# verify that an error is raised when the revision is not valid
with pytest.raises(UnsupportedRevisionError):
self.store.get_item(self.fake_location, revision=ModuleStoreEnum.RevisionOption.draft_preferred)
# Draft:
# wildcard query, 6! load pertinent items for inheritance calls, load parents, course root fetch (why)
# Split:
# active_versions (with regex), structure, and spurious active_versions refetch
@ddt.data((ModuleStoreEnum.Type.mongo, 14, 0), (ModuleStoreEnum.Type.split, 4, 0))
@ddt.unpack
def test_get_items(self, default_ms, max_find, max_send):
self.initdb(default_ms)
self._create_block_hierarchy()
course_locn = self.course_locations[self.MONGO_COURSEID]
with check_mongo_calls(max_find, max_send):
modules = self.store.get_items(course_locn.course_key, qualifiers={'category': 'problem'})
assert len(modules) == 6
# verify that an error is raised when the revision is not valid
with pytest.raises(UnsupportedRevisionError):
self.store.get_items(
self.course_locations[self.MONGO_COURSEID].course_key,
revision=ModuleStoreEnum.RevisionOption.draft_preferred
)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_version_on_block(self, default_ms):
self.initdb(default_ms)
self._create_block_hierarchy()
course = self.store.get_course(self.course.id)
course_version = course.course_version
if default_ms == ModuleStoreEnum.Type.split:
assert course_version is not None
else:
assert course_version is None
blocks = self.store.get_items(self.course.id, qualifiers={'category': 'problem'})
blocks.append(self.store.get_item(self.problem_x1a_1)) # lint-amnesty, pylint: disable=no-member
assert len(blocks) == 7
for block in blocks:
assert block.course_version == course_version
# ensure that when the block is retrieved from the runtime cache,
# the course version is still present
cached_block = course.runtime.load_item(block.location)
assert cached_block.course_version == block.course_version
@ddt.data((ModuleStoreEnum.Type.split, 2, False), (ModuleStoreEnum.Type.mongo, 3, True))
@ddt.unpack
def test_get_items_include_orphans(self, default_ms, expected_items_in_tree, orphan_in_items):
"""
Test `include_orphans` option helps in returning only those items which are present in course tree.
It tests that orphans are not fetched when calling `get_item` with `include_orphans`.
Params:
expected_items_in_tree:
Number of items that will be returned after `get_items` would be called with `include_orphans`.
In split, it would not get orphan items.
In mongo, it would still get orphan items because `include_orphans` would not have any impact on mongo
modulestore which will return same number of items as called without `include_orphans` kwarg.
orphan_in_items:
When `get_items` is called with `include_orphans` kwarg, then check if an orphan is returned or not.
False when called in split modulestore because in split get_items is expected to not retrieve orphans
now because of `include_orphans`.
True when called in mongo modulstore because `include_orphans` does not have any effect on mongo.
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
course_key = test_course.id
items = self.store.get_items(course_key)
# Check items found are either course or about type
assert set(['course', 'about']).issubset(set([item.location.block_type for item in items])) # pylint: disable=consider-using-set-comprehension, line-too-long
# Assert that about is a detached category found in get_items
assert [item.location.block_type for item in items if item.location.block_type == 'about'][0]\
in DETACHED_XBLOCK_TYPES
assert len(items) == 2
# Check that orphans are not found
orphans = self.store.get_orphans(course_key)
assert len(orphans) == 0
# Add an orphan to test course
orphan = course_key.make_usage_key('chapter', 'OrphanChapter')
self.store.create_item(self.user_id, orphan.course_key, orphan.block_type, block_id=orphan.block_id)
# Check that now an orphan is found
orphans = self.store.get_orphans(course_key)
assert orphan in orphans
assert len(orphans) == 1
# Check now `get_items` retrieves an extra item added above which is an orphan.
items = self.store.get_items(course_key)
assert orphan in [item.location for item in items]
assert len(items) == 3
# Check now `get_items` with `include_orphans` kwarg does not retrieves an orphan block.
items_in_tree = self.store.get_items(course_key, include_orphans=False)
# Check that course and about blocks are found in get_items
assert set(['course', 'about']).issubset({item.location.block_type for item in items_in_tree})
# Check orphan is found or not - this is based on mongo/split modulestore. It should be found in mongo.
assert (orphan in [item.location for item in items_in_tree]) == orphan_in_items
assert len(items_in_tree) == expected_items_in_tree
# draft: get draft, get ancestors up to course (2-6), compute inheritance
# sends: update problem and then each ancestor up to course (edit info)
# split: active_versions, definitions (calculator field), structures
# 2 sends to update index & structure (note, it would also be definition if a content field changed)
@ddt.data((ModuleStoreEnum.Type.mongo, 7, 5), (ModuleStoreEnum.Type.split, 3, 2))
@ddt.unpack
def test_update_item(self, default_ms, max_find, max_send):
"""
Update should succeed for r/w dbs
"""
self.initdb(default_ms)
self._create_block_hierarchy()
problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
# if following raised, then the test is really a noop, change it
assert problem.max_attempts != 2, 'Default changed making test meaningless'
problem.max_attempts = 2
with check_mongo_calls(max_find, max_send):
problem = self.store.update_item(problem, self.user_id)
assert problem.max_attempts == 2, "Update didn't persist"
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes_direct_only(self, default_ms):
"""
Tests that has_changes() returns false when a new xblock in a direct only category is checked
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create dummy direct only xblocks
chapter = self.store.create_item(
self.user_id,
test_course.id,
'chapter',
block_id='vertical_container'
)
# Check that neither xblock has changes
assert not self.store.has_changes(test_course)
assert not self.store.has_changes(chapter)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes(self, default_ms):
"""
Tests that has_changes() only returns true when changes are present
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
assert self.store.has_changes(xblock)
# Publish and verify that there are no unpublished changes
newXBlock = self.store.publish(xblock.location, self.user_id)
assert not self.store.has_changes(newXBlock)
# Change the component, then check that there now are changes
component = self.store.get_item(xblock.location)
component.display_name = 'Changed Display Name'
component = self.store.update_item(component, self.user_id)
assert self.store.has_changes(component)
# Publish and verify again
component = self.store.publish(component.location, self.user_id)
assert not self.store.has_changes(component)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_unit_stuck_in_draft_mode(self, default_ms):
"""
After revert_to_published() the has_changes() should return false if draft has no changes
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
assert self.store.has_changes(xblock)
# Publish and verify that there are no unpublished changes
component = self.store.publish(xblock.location, self.user_id)
assert not self.store.has_changes(component)
self.store.revert_to_published(component.location, self.user_id)
component = self.store.get_item(component.location)
assert not self.store.has_changes(component)
# Publish and verify again
component = self.store.publish(component.location, self.user_id)
assert not self.store.has_changes(component)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_unit_stuck_in_published_mode(self, default_ms):
"""
After revert_to_published() the has_changes() should return true if draft has changes
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
xblock = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
assert self.store.has_changes(xblock)
# Publish and verify that there are no unpublished changes
component = self.store.publish(xblock.location, self.user_id)
assert not self.store.has_changes(component)
# Discard changes and verify that there are no changes
self.store.revert_to_published(component.location, self.user_id)
component = self.store.get_item(component.location)
assert not self.store.has_changes(component)
# Change the component, then check that there now are changes
component = self.store.get_item(component.location)
component.display_name = 'Changed Display Name'
self.store.update_item(component, self.user_id)
# Verify that changes are present
assert self.store.has_changes(component)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_unit_stuck_in_published_mode_after_delete(self, default_ms):
"""
Test that a unit does not get stuck in published mode
after discarding a component changes and deleting a component
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy vertical & html component to test against
vertical = self.store.create_item(
self.user_id,
test_course.id,
'vertical',
block_id='test_vertical'
)
component = self.store.create_child(
self.user_id,
vertical.location,
'html',
block_id='html_component'
)
# publish vertical changes
self.store.publish(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
# Change a component, then check that there now are changes
component = self.store.get_item(component.location)
component.display_name = 'Changed Display Name'
self.store.update_item(component, self.user_id)
assert self._has_changes(vertical.location)
# Discard changes and verify that there are no changes
self.store.revert_to_published(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
# Delete the component and verify that the unit has changes
self.store.delete_item(component.location, self.user_id)
vertical = self.store.get_item(vertical.location)
assert self._has_changes(vertical.location)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_publish_automatically_after_delete_unit(self, default_ms):
"""
Check that sequential publishes automatically after deleting a unit
"""
self.initdb(default_ms)
test_course = self.store.create_course('test_org', 'test_course', 'test_run', self.user_id)
# create sequential and vertical to test against
sequential = self.store.create_child(self.user_id, test_course.location, 'sequential', 'test_sequential')
vertical = self.store.create_child(self.user_id, sequential.location, 'vertical', 'test_vertical')
# publish sequential changes
self.store.publish(sequential.location, self.user_id)
assert not self._has_changes(sequential.location)
# delete vertical and check sequential has no changes
self.store.delete_item(vertical.location, self.user_id)
assert not self._has_changes(sequential.location)
def setup_has_changes(self, default_ms):
"""
Common set up for has_changes tests below.
Returns a dictionary of useful location maps for testing.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
locations = {
'grandparent': self.chapter_x, # lint-amnesty, pylint: disable=no-member
'parent_sibling': self.sequential_x2, # lint-amnesty, pylint: disable=no-member
'parent': self.sequential_x1, # lint-amnesty, pylint: disable=no-member
'child_sibling': self.vertical_x1b, # lint-amnesty, pylint: disable=no-member
'child': self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
}
# Publish the vertical units
self.store.publish(locations['parent_sibling'], self.user_id)
self.store.publish(locations['parent'], self.user_id)
return locations
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes_ancestors(self, default_ms):
"""
Tests that has_changes() returns true on ancestors when a child is changed
"""
locations = self.setup_has_changes(default_ms)
# Verify that there are no unpublished changes
for key in locations:
assert not self._has_changes(locations[key])
# Change the child
child = self.store.get_item(locations['child'])
child.display_name = 'Changed Display Name'
self.store.update_item(child, self.user_id)
# All ancestors should have changes, but not siblings
assert self._has_changes(locations['grandparent'])
assert self._has_changes(locations['parent'])
assert self._has_changes(locations['child'])
assert not self._has_changes(locations['parent_sibling'])
assert not self._has_changes(locations['child_sibling'])
# Publish the unit with changes
self.store.publish(locations['parent'], self.user_id)
# Verify that there are no unpublished changes
for key in locations:
assert not self._has_changes(locations[key])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes_publish_ancestors(self, default_ms):
"""
Tests that has_changes() returns false after a child is published only if all children are unchanged
"""
locations = self.setup_has_changes(default_ms)
# Verify that there are no unpublished changes
for key in locations:
assert not self._has_changes(locations[key])
# Change both children
child = self.store.get_item(locations['child'])
child_sibling = self.store.get_item(locations['child_sibling'])
child.display_name = 'Changed Display Name'
child_sibling.display_name = 'Changed Display Name'
self.store.update_item(child, user_id=self.user_id)
self.store.update_item(child_sibling, user_id=self.user_id)
# Verify that ancestors have changes
assert self._has_changes(locations['grandparent'])
assert self._has_changes(locations['parent'])
# Publish one child
self.store.publish(locations['child_sibling'], self.user_id)
# Verify that ancestors still have changes
assert self._has_changes(locations['grandparent'])
assert self._has_changes(locations['parent'])
# Publish the other child
self.store.publish(locations['child'], self.user_id)
# Verify that ancestors now have no changes
assert not self._has_changes(locations['grandparent'])
assert not self._has_changes(locations['parent'])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes_add_remove_child(self, default_ms):
"""
Tests that has_changes() returns true for the parent when a child with changes is added
and false when that child is removed.
"""
locations = self.setup_has_changes(default_ms)
# Test that the ancestors don't have changes
assert not self._has_changes(locations['grandparent'])
assert not self._has_changes(locations['parent'])
# Create a new child and attach it to parent
self.store.create_child(
self.user_id,
locations['parent'],
'vertical',
block_id='new_child',
)
# Verify that the ancestors now have changes
assert self._has_changes(locations['grandparent'])
assert self._has_changes(locations['parent'])
# Remove the child from the parent
parent = self.store.get_item(locations['parent'])
parent.children = [locations['child'], locations['child_sibling']]
self.store.update_item(parent, user_id=self.user_id)
# Verify that ancestors now have no changes
assert not self._has_changes(locations['grandparent'])
assert not self._has_changes(locations['parent'])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_has_changes_non_direct_only_children(self, default_ms):
"""
Tests that has_changes() returns true after editing the child of a vertical (both not direct only categories).
"""
self.initdb(default_ms)
parent = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='parent',
)
child = self.store.create_child(
self.user_id,
parent.location,
'html',
block_id='child',
)
self.store.publish(parent.location, self.user_id)
# Verify that there are no changes
assert not self._has_changes(parent.location)
assert not self._has_changes(child.location)
# Change the child
child.display_name = 'Changed Display Name'
self.store.update_item(child, user_id=self.user_id)
# Verify that both parent and child have changes
assert self._has_changes(parent.location)
assert self._has_changes(child.location)
@ddt.data(*itertools.product(
(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split),
(ModuleStoreEnum.Branch.draft_preferred, ModuleStoreEnum.Branch.published_only)
))
@ddt.unpack
def test_has_changes_missing_child(self, default_ms, default_branch):
"""
Tests that has_changes() does not throw an exception when a child doesn't exist.
"""
self.initdb(default_ms)
with self.store.branch_setting(default_branch, self.course.id):
# Create the parent and point it to a fake child
parent = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='parent',
)
parent.children += [self.course.id.make_usage_key('vertical', 'does_not_exist')]
parent = self.store.update_item(parent, self.user_id)
# Check the parent for changes should return True and not throw an exception
assert self.store.has_changes(parent)
# Draft
# Find: find parents (definition.children query), get parent, get course (fill in run?),
# find parents of the parent (course), get inheritance items,
# get item (to delete subtree), get inheritance again.
# Sends: delete item, update parent
# Split
# Find: active_versions, 2 structures (published & draft), definition (unnecessary)
# Sends: updated draft and published structures and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 7, 2), (ModuleStoreEnum.Type.split, 3, 3))
@ddt.unpack
def test_delete_item(self, default_ms, max_find, max_send):
"""
Delete should reject on r/o db and work on r/w one
"""
self.initdb(default_ms)
if default_ms == ModuleStoreEnum.Type.mongo and mongo_uses_error_check(self.store):
max_find += 1
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.writable_chapter_location.course_key): # lint-amnesty, pylint: disable=line-too-long
with check_mongo_calls(max_find, max_send):
self.store.delete_item(self.writable_chapter_location, self.user_id)
# verify it's gone
with pytest.raises(ItemNotFoundError):
self.store.get_item(self.writable_chapter_location)
# verify it's gone from published too
with pytest.raises(ItemNotFoundError):
self.store.get_item(self.writable_chapter_location, revision=ModuleStoreEnum.RevisionOption.published_only)
# Draft:
# queries: find parent (definition.children), count versions of item, get parent, count grandparents,
# inheritance items, draft item, draft child, inheritance
# sends: delete draft vertical and update parent
# Split:
# queries: active_versions, draft and published structures, definition (unnecessary)
# sends: update published (why?), draft, and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 9, 2), (ModuleStoreEnum.Type.split, 4, 3))
@ddt.unpack
def test_delete_private_vertical(self, default_ms, max_find, max_send):
"""
Because old mongo treated verticals as the first layer which could be draft, it has some interesting
behavioral properties which this deletion test gets at.
"""
self.initdb(default_ms)
if default_ms == ModuleStoreEnum.Type.mongo and mongo_uses_error_check(self.store):
max_find += 1
# create and delete a private vertical with private children
private_vert = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, self.course_locations[self.MONGO_COURSEID],
'vertical', block_id='private'
)
private_leaf = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, private_vert.location, 'html', block_id='private_leaf'
)
# verify pre delete state (just to verify that the test is valid)
if hasattr(private_vert.location, 'version_guid'):
# change to the HEAD version
vert_loc = private_vert.location.for_version(private_leaf.location.version_guid)
else:
vert_loc = private_vert.location
assert self.store.has_item(vert_loc)
assert self.store.has_item(private_leaf.location)
course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key, 0)
assert vert_loc in course.children
# delete the vertical and ensure the course no longer points to it
with check_mongo_calls(max_find, max_send):
self.store.delete_item(vert_loc, self.user_id)
course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key, 0)
if hasattr(private_vert.location, 'version_guid'):
# change to the HEAD version
vert_loc = private_vert.location.for_version(course.location.version_guid)
leaf_loc = private_leaf.location.for_version(course.location.version_guid)
else:
vert_loc = private_vert.location
leaf_loc = private_leaf.location
assert not self.store.has_item(vert_loc)
assert not self.store.has_item(leaf_loc)
assert vert_loc not in course.children
# Draft:
# find: find parent (definition.children) 2x, find draft item, get inheritance items
# send: one delete query for specific item
# Split:
# find: active_version & structure (cached)
# send: update structure and active_versions
@ddt.data((ModuleStoreEnum.Type.mongo, 4, 1), (ModuleStoreEnum.Type.split, 2, 2))
@ddt.unpack
def test_delete_draft_vertical(self, default_ms, max_find, max_send):
"""
Test deleting a draft vertical which has a published version.
"""
self.initdb(default_ms)
# reproduce bug STUD-1965
# create and delete a private vertical with private children
private_vert = self.store.create_child(
# don't use course_location as it may not be the repr
self.user_id, self.course_locations[self.MONGO_COURSEID], 'vertical', block_id='publish'
)
private_leaf = self.store.create_child(
self.user_id, private_vert.location, 'html', block_id='bug_leaf'
)
# verify that an error is raised when the revision is not valid
with pytest.raises(UnsupportedRevisionError):
self.store.delete_item(
private_leaf.location,
self.user_id,
revision=ModuleStoreEnum.RevisionOption.draft_preferred
)
self.store.publish(private_vert.location, self.user_id)
private_leaf.display_name = 'change me'
private_leaf = self.store.update_item(private_leaf, self.user_id)
# test succeeds if delete succeeds w/o error
if default_ms == ModuleStoreEnum.Type.mongo and mongo_uses_error_check(self.store):
max_find += 1
with check_mongo_calls(max_find, max_send):
self.store.delete_item(private_leaf.location, self.user_id)
# Draft:
# 1) find all courses (wildcard),
# 2) get each course 1 at a time (1 course),
# 3) wildcard split if it has any (1) but it doesn't
# Split:
# 1) wildcard split search,
# 2-4) active_versions, structure, definition (s/b lazy; so, unnecessary)
# 5) wildcard draft mongo which has none
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 6, 0))
@ddt.unpack
def test_get_courses(self, default_ms, max_find, max_send):
self.initdb(default_ms)
# we should have one course across all stores
with check_mongo_calls(max_find, max_send):
courses = self.store.get_courses()
course_ids = [course.location for course in courses]
assert len(courses) == 1, 'Not one course: {}'.format(course_ids)
assert self.course_locations[self.MONGO_COURSEID] in course_ids
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
draft_courses = self.store.get_courses(remove_branch=True)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
published_courses = self.store.get_courses(remove_branch=True)
assert [c.id for c in draft_courses] == [c.id for c in published_courses]
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_create_child_detached_tabs(self, default_ms):
"""
test 'create_child' method with a detached category ('static_tab')
to check that new static tab is not a direct child of the course
"""
self.initdb(default_ms)
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
assert len(mongo_course.children) == 1
# create a static tab of the course
self.store.create_child(
self.user_id,
self.course.location,
'static_tab'
)
# now check that the course has same number of children
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
assert len(mongo_course.children) == 1
# draft is 2: find out which ms owns course, get item
# split: active_versions, structure, definition (to load course wiki string)
@ddt.data((ModuleStoreEnum.Type.mongo, 2, 0), (ModuleStoreEnum.Type.split, 3, 0))
@ddt.unpack
def test_get_course(self, default_ms, max_find, max_send):
"""
This test is here for the performance comparison not functionality. It tests the performance
of getting an item whose scope.content fields are looked at.
"""
self.initdb(default_ms)
with check_mongo_calls(max_find, max_send):
course = self.store.get_item(self.course_locations[self.MONGO_COURSEID])
assert course.id == self.course_locations[self.MONGO_COURSEID].course_key
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_library(self, default_ms):
"""
Test that create_library and get_library work regardless of the default modulestore.
Other tests of MixedModulestore support are in test_libraries.py but this one must
be done here so we can test the configuration where Draft/old is the first modulestore.
"""
self.initdb(default_ms)
with self.store.default_store(ModuleStoreEnum.Type.split): # The CMS also wraps create_library like this
library = self.store.create_library("org", "lib", self.user_id, {"display_name": "Test Library"})
library_key = library.location.library_key
assert isinstance(library_key, LibraryLocator)
# Now load with get_library and make sure it works:
library = self.store.get_library(library_key)
assert library.location.library_key == library_key
# Clear the mappings so we can test get_library code path without mapping set:
self.store.mappings.clear()
library = self.store.get_library(library_key)
assert library.location.library_key == library_key
# notice this doesn't test getting a public item via draft_preferred which draft would have 2 hits (split
# still only 2)
# Draft: get_parent
# Split: active_versions, structure
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 2, 0))
@ddt.unpack
def test_get_parent_locations(self, default_ms, max_find, max_send):
"""
Test a simple get parent for a direct only category (i.e, always published)
"""
self.initdb(default_ms)
self._create_block_hierarchy()
with check_mongo_calls(max_find, max_send):
parent = self.store.get_parent_location(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
def verify_get_parent_locations_results(self, expected_results):
"""
Verifies the results of calling get_parent_locations matches expected_results.
"""
for child_location, parent_location, revision in expected_results:
assert parent_location == self.store.get_parent_location(child_location, revision=revision)
def verify_item_parent(self, item_location, expected_parent_location, old_parent_location, is_reverted=False):
"""
Verifies that item is placed under expected parent.
Arguments:
item_location (BlockUsageLocator) : Locator of item.
expected_parent_location (BlockUsageLocator) : Expected parent block locator.
old_parent_location (BlockUsageLocator) : Old parent block locator.
is_reverted (Boolean) : A flag to notify that item was reverted.
"""
with self.store.bulk_operations(self.course.id):
source_item = self.store.get_item(item_location)
old_parent = self.store.get_item(old_parent_location)
expected_parent = self.store.get_item(expected_parent_location)
assert expected_parent_location == source_item.get_parent().location
# If an item is reverted, it means it's actual parent was the one that is the current parent now
# i.e expected_parent_location otherwise old_parent_location.
published_parent_location = expected_parent_location if is_reverted else old_parent_location
# Check parent locations wrt branches
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
assert expected_parent_location == self.store.get_item(item_location).get_parent().location
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert published_parent_location == self.store.get_item(item_location).get_parent().location
# Make location specific to published branch for verify_get_parent_locations_results call.
published_parent_location = published_parent_location.for_branch(ModuleStoreEnum.BranchName.published)
# Verify expected item parent locations
self.verify_get_parent_locations_results([
(item_location, expected_parent_location, None),
(item_location, expected_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(item_location, published_parent_location, ModuleStoreEnum.RevisionOption.published_only),
])
# Also verify item.parent has correct parent location set.
assert source_item.parent == expected_parent_location
assert source_item.parent == self.store.get_parent_location(item_location)
# Item should be present in new parent's children list but not in old parent's children list.
assert item_location in expected_parent.children
assert item_location not in old_parent.children
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_update_item_parent(self, store_type):
"""
Test that when we move an item from old to new parent, the item should be present in new parent.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Publish the course.
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_revert(self, store_type):
"""
Test that when we move an item to new parent and then discard the original parent, the item should be present
back in original parent.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Publish the course
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
self.verify_item_parent(
item_location=item_location,
expected_parent_location=old_parent_location,
old_parent_location=new_parent_location,
is_reverted=True
)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_delete_revert(self, store_type):
"""
Test that when we move an item and delete it and then discard changes for original parent, item should be
present back in original parent.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Publish the course
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now delete the item.
self.store.delete_item(item_location, self.user_id)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
self.verify_item_parent(
item_location=item_location,
expected_parent_location=old_parent_location,
old_parent_location=new_parent_location,
is_reverted=True
)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_revert_move(self, store_type):
"""
Test that when we move an item to new parent and discard changes for the old parent, then the item should be
present in the old parent and then moving an item from old parent to new parent should place that item under
new parent.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Publish the course
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
self.verify_item_parent(
item_location=item_location,
expected_parent_location=old_parent_location,
old_parent_location=new_parent_location,
is_reverted=True
)
# Again try to move from x1 to y1
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_edited_revert(self, store_type):
"""
Test that when we move an edited item from old parent to new parent and then discard changes in old parent,
item should be placed under original parent with initial state.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Publish the course.
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
orig_display_name = problem.display_name
# Change display name of problem and update just it.
problem.display_name = 'updated'
self.store.update_item(problem, self.user_id)
updated_problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert updated_problem.display_name == 'updated'
# Now, move from x1 to y1.
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
# Check that problem has the original name back.
reverted_problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert orig_display_name == reverted_problem.display_name
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_1_moved_1_unchanged(self, store_type):
"""
Test that when we move an item from an old parent which have multiple items then only moved item's parent
is changed while other items are still present inside old parent.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Create some children in vertical_x1a
problem_item2 = self.store.create_child(self.user_id, self.vertical_x1a, 'problem', 'Problem_Item2') # lint-amnesty, pylint: disable=no-member
# Publish the course.
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# Move problem_x1a_1 from x1 to y1.
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Check that problem_item2 is still present in vertical_x1a
problem_item2 = self.store.get_item(problem_item2.location)
assert problem_item2.parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
assert problem_item2.location in problem_item2.get_parent().children
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_1_moved_1_edited(self, store_type):
"""
Test that when we move an item inside an old parent having multiple items, we edit one item and move
other item from old to new parent, then discard changes in old parent would discard the changes of the
edited item and move back the moved item to old location.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Create some children in vertical_x1a
problem_item2 = self.store.create_child(self.user_id, self.vertical_x1a, 'problem', 'Problem_Item2') # lint-amnesty, pylint: disable=no-member
orig_display_name = problem_item2.display_name
# Publish the course.
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Edit problem_item2.
problem_item2.display_name = 'updated'
self.store.update_item(problem_item2, self.user_id)
updated_problem2 = self.store.get_item(problem_item2.location)
assert updated_problem2.display_name == 'updated'
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# Move problem_x1a_1 from x1 to y1.
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
# Check that problem_item2 has the original name back.
reverted_problem2 = self.store.get_item(problem_item2.location)
assert orig_display_name == reverted_problem2.display_name
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_move_1_moved_1_deleted(self, store_type):
"""
Test that when we move an item inside an old parent having multiple items, we delete one item and move
other item from old to new parent, then discard changes in old parent would undo delete the deleted
item and move back the moved item to old location.
"""
self.initdb(store_type)
self._create_block_hierarchy()
# Create some children in vertical_x1a
problem_item2 = self.store.create_child(self.user_id, self.vertical_x1a, 'problem', 'Problem_Item2') # lint-amnesty, pylint: disable=no-member
orig_display_name = problem_item2.display_name # lint-amnesty, pylint: disable=unused-variable
# Publish the course.
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
# Now delete other problem problem_item2.
self.store.delete_item(problem_item2.location, self.user_id)
# Move child problem_x1a_1 to vertical_y1a.
item_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# Move problem_x1a_1 from x1 to y1.
updated_item_location = self.store.update_item_parent(
item_location, new_parent_location, old_parent_location, self.user_id
)
assert updated_item_location == item_location
self.verify_item_parent(
item_location=item_location,
expected_parent_location=new_parent_location,
old_parent_location=old_parent_location
)
# Now discard changes in old_parent_location i.e original parent.
self.store.revert_to_published(old_parent_location, self.user_id)
# Check that problem_item2 is also back in vertical_x1a
problem_item2 = self.store.get_item(problem_item2.location)
assert problem_item2.parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
assert problem_item2.location in problem_item2.get_parent().children
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_parent_locations_moved_child(self, default_ms):
self.initdb(default_ms)
self._create_block_hierarchy()
# publish the course
self.course = self.store.publish(self.course.location, self.user_id) # lint-amnesty, pylint: disable=attribute-defined-outside-init
with self.store.bulk_operations(self.course.id):
# make drafts of verticals
self.store.convert_to_draft(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
self.store.convert_to_draft(self.vertical_y1a, self.user_id) # lint-amnesty, pylint: disable=no-member
# move child problem_x1a_1 to vertical_y1a
child_to_move_location = self.problem_x1a_1 # lint-amnesty, pylint: disable=no-member
new_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_x1a # lint-amnesty, pylint: disable=no-member
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
old_parent = self.store.get_item(child_to_move_location).get_parent()
assert old_parent_location == old_parent.location
child_to_move_contextualized = child_to_move_location.map_into_course(old_parent.location.course_key)
old_parent.children.remove(child_to_move_contextualized)
self.store.update_item(old_parent, self.user_id)
new_parent = self.store.get_item(new_parent_location)
new_parent.children.append(child_to_move_location)
self.store.update_item(new_parent, self.user_id)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
assert new_parent_location == self.store.get_item(child_to_move_location).get_parent().location
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert old_parent_location == self.store.get_item(child_to_move_location).get_parent().location
old_parent_published_location = old_parent_location.for_branch(ModuleStoreEnum.BranchName.published)
self.verify_get_parent_locations_results([
(child_to_move_location, new_parent_location, None),
(child_to_move_location, new_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_move_location, old_parent_published_location, ModuleStoreEnum.RevisionOption.published_only),
])
# publish the course again
self.store.publish(self.course.location, self.user_id)
new_parent_published_location = new_parent_location.for_branch(ModuleStoreEnum.BranchName.published)
self.verify_get_parent_locations_results([
(child_to_move_location, new_parent_location, None),
(child_to_move_location, new_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_move_location, new_parent_published_location, ModuleStoreEnum.RevisionOption.published_only),
])
@ddt.data(ModuleStoreEnum.Type.mongo)
def test_get_parent_locations_deleted_child(self, default_ms):
self.initdb(default_ms)
self._create_block_hierarchy()
# publish the course
self.store.publish(self.course.location, self.user_id)
# make draft of vertical
self.store.convert_to_draft(self.vertical_y1a, self.user_id) # lint-amnesty, pylint: disable=no-member
# delete child problem_y1a_1
child_to_delete_location = self.problem_y1a_1 # lint-amnesty, pylint: disable=no-member
old_parent_location = self.vertical_y1a # lint-amnesty, pylint: disable=no-member
self.store.delete_item(child_to_delete_location, self.user_id)
self.verify_get_parent_locations_results([
(child_to_delete_location, old_parent_location, None),
# Note: The following could be an unexpected result, but we want to avoid an extra database call
(child_to_delete_location, old_parent_location, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_delete_location, old_parent_location, ModuleStoreEnum.RevisionOption.published_only),
])
# publish the course again
self.store.publish(self.course.location, self.user_id)
self.verify_get_parent_locations_results([
(child_to_delete_location, None, None),
(child_to_delete_location, None, ModuleStoreEnum.RevisionOption.draft_preferred),
(child_to_delete_location, None, ModuleStoreEnum.RevisionOption.published_only),
])
@ddt.data(ModuleStoreEnum.Type.mongo)
def test_get_parent_location_draft(self, default_ms):
"""
Test that "get_parent_location" method returns first published parent
for a draft component, if it has many possible parents (including
draft parents).
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
mongo_store = self.store._get_modulestore_for_courselike(course_id) # pylint: disable=protected-access
# add another parent (unit) "vertical_x1b" for problem "problem_x1a_1"
mongo_store.collection.update_one(
self.vertical_x1b.to_deprecated_son('_id.'), # lint-amnesty, pylint: disable=no-member
{'$push': {'definition.children': six.text_type(self.problem_x1a_1)}} # lint-amnesty, pylint: disable=no-member
)
# convert first parent (unit) "vertical_x1a" of problem "problem_x1a_1" to draft
self.store.convert_to_draft(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
item = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert self.store.has_published_version(item)
# now problem "problem_x1a_1" has 3 parents [vertical_x1a (draft),
# vertical_x1a (published), vertical_x1b (published)]
# check that "get_parent_location" method of draft branch returns first
# published parent "vertical_x1a" without raising "AssertionError" for
# problem location revision
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# Draft:
# Problem path:
# 1. Get problem
# 2-6. get parent and rest of ancestors up to course
# 7-8. get sequential, compute inheritance
# 8-9. get vertical, compute inheritance
# 10-11. get other vertical_x1b (why?) and compute inheritance
# Split: active_versions & structure
@ddt.data((ModuleStoreEnum.Type.mongo, [12, 3], 0), (ModuleStoreEnum.Type.split, [3, 2], 0))
@ddt.unpack
def test_path_to_location(self, default_ms, num_finds, num_sends):
"""
Make sure that path_to_location works
"""
self.initdb(default_ms)
course_key = self.course_locations[self.MONGO_COURSEID].course_key
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
self._create_block_hierarchy()
should_work = (
(self.problem_x1a_2, # lint-amnesty, pylint: disable=no-member
(course_key, u"Chapter_x", u"Sequential_x1", u'Vertical_x1a', '1', self.problem_x1a_2)), # lint-amnesty, pylint: disable=no-member
(self.chapter_x, # lint-amnesty, pylint: disable=no-member
(course_key, "Chapter_x", None, None, None, self.chapter_x)), # lint-amnesty, pylint: disable=no-member
)
for location, expected in should_work:
# each iteration has different find count, pop this iter's find count
with check_mongo_calls(num_finds.pop(0), num_sends):
path = path_to_location(self.store, location)
assert path == expected
not_found = (<|fim▁hole|> course_key.make_usage_key('course', 'NotHome'),
)
for location in not_found:
with pytest.raises(ItemNotFoundError):
path_to_location(self.store, location)
# Orphaned items should not be found.
orphan = course_key.make_usage_key('chapter', 'OrphanChapter')
self.store.create_item(
self.user_id,
orphan.course_key,
orphan.block_type,
block_id=orphan.block_id
)
with pytest.raises(NoPathToItem):
path_to_location(self.store, orphan)
def test_navigation_index(self):
"""
Make sure that navigation_index correctly parses the various position values that we might get from calls to
path_to_location
"""
assert 1 == navigation_index('1')
assert 10 == navigation_index('10')
assert navigation_index(None) is None
assert 1 == navigation_index('1_2')
assert 5 == navigation_index('5_2')
assert 7 == navigation_index('7_3_5_6_')
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_revert_to_published_root_draft(self, default_ms):
"""
Test calling revert_to_published on draft vertical.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
vertical_children_num = len(vertical.children)
self.store.publish(self.course.location, self.user_id)
assert not self._has_changes(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
# delete leaf problem (will make parent vertical a draft)
self.store.delete_item(self.problem_x1a_1, self.user_id) # lint-amnesty, pylint: disable=no-member
assert self._has_changes(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
draft_parent = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert (vertical_children_num - 1) == len(draft_parent.children)
published_parent = self.store.get_item(
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
revision=ModuleStoreEnum.RevisionOption.published_only
)
assert vertical_children_num == len(published_parent.children)
self.store.revert_to_published(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
reverted_parent = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert vertical_children_num == len(published_parent.children)
self.assertBlocksEqualByFields(reverted_parent, published_parent)
assert not self._has_changes(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_revert_to_published_root_published(self, default_ms):
"""
Test calling revert_to_published on a published vertical with a draft child.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
orig_display_name = problem.display_name
# Change display name of problem and update just it (so parent remains published)
problem.display_name = "updated before calling revert"
self.store.update_item(problem, self.user_id)
self.store.revert_to_published(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
reverted_problem = self.store.get_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert orig_display_name == reverted_problem.display_name
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_revert_to_published_no_draft(self, default_ms):
"""
Test calling revert_to_published on vertical with no draft content does nothing.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
orig_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
self.store.revert_to_published(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
reverted_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
self.assertBlocksEqualByFields(orig_vertical, reverted_vertical)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_revert_to_published_no_published(self, default_ms):
"""
Test calling revert_to_published on vertical with no published version errors.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
with pytest.raises(InvalidVersionError):
self.store.revert_to_published(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_revert_to_published_direct_only(self, default_ms):
"""
Test calling revert_to_published on a direct-only item is a no-op.
"""
self.initdb(default_ms)
self._create_block_hierarchy()
num_children = len(self.store.get_item(self.sequential_x1).children) # lint-amnesty, pylint: disable=no-member
self.store.revert_to_published(self.sequential_x1, self.user_id) # lint-amnesty, pylint: disable=no-member
reverted_parent = self.store.get_item(self.sequential_x1) # lint-amnesty, pylint: disable=no-member
# It does not discard the child vertical, even though that child is a draft (with no published version)
assert num_children == len(reverted_parent.children)
def test_reset_course_to_version(self):
"""
Test calling `DraftVersioningModuleStore.test_reset_course_to_version`.
"""
# Set up test course.
self.initdb(ModuleStoreEnum.Type.split) # Old Mongo does not support this operation.
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
# Get children of a vertical as a set.
# We will use this set as a basis for content comparision in this test.
original_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
original_vertical_children = set(original_vertical.children)
# Find the version_guid of our course by diving into Split Mongo.
split = self._get_split_modulestore()
course_index = split.get_course_index(self.course.location.course_key)
original_version_guid = course_index["versions"]["published-branch"]
# Reset course to currently-published version.
# This should be a no-op.
self.store.reset_course_to_version(
self.course.location.course_key,
original_version_guid,
self.user_id,
)
noop_reset_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert set(noop_reset_vertical.children) == original_vertical_children
# Delete a problem from the vertical and publish.
# Vertical should have one less problem than before.
self.store.delete_item(self.problem_x1a_1, self.user_id) # lint-amnesty, pylint: disable=no-member
self.store.publish(self.course.location, self.user_id)
modified_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert set(modified_vertical.children) == (
original_vertical_children - {self.problem_x1a_1} # lint-amnesty, pylint: disable=no-member
)
# Add a couple more children to the vertical.
# and publish a couple more times.
# We want to make sure we can restore from something a few versions back.
self.store.create_child(
self.user_id,
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
'problem',
block_id='new_child1',
)
self.store.publish(self.course.location, self.user_id)
self.store.create_child(
self.user_id,
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
'problem',
block_id='new_child2',
)
self.store.publish(self.course.location, self.user_id)
# Add another child, but don't publish.
# We want to make sure that this works with a dirty draft branch.
self.store.create_child(
self.user_id,
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
'problem',
block_id='new_child3',
)
# Reset course to original version.
# The restored vertical should have the same children as it did originally.
self.store.reset_course_to_version(
self.course.location.course_key,
original_version_guid,
self.user_id,
)
restored_vertical = self.store.get_item(self.vertical_x1a) # lint-amnesty, pylint: disable=no-member
assert set(restored_vertical.children) == original_vertical_children
def _get_split_modulestore(self):
"""
Grab the SplitMongo modulestore instance from within the Mixed modulestore.
Assumption: There is a SplitMongo modulestore within the Mixed modulestore.
This assumpion is hacky, but it seems OK because we're removing the
Old (non-Split) Mongo modulestores soon.
Returns: SplitMongoModuleStore
"""
for store in self.store.modulestores:
if isinstance(store, SplitMongoModuleStore):
return store
assert False, "SplitMongoModuleStore was not found in MixedModuleStore"
# Draft: get all items which can be or should have parents
# Split: active_versions, structure
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 2, 0))
@ddt.unpack
def test_get_orphans(self, default_ms, max_find, max_send):
"""
Test finding orphans.
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
# orphans
orphan_locations = [
course_id.make_usage_key('chapter', 'OrphanChapter'),
course_id.make_usage_key('vertical', 'OrphanVertical'),
course_id.make_usage_key('problem', 'OrphanProblem'),
course_id.make_usage_key('html', 'OrphanHTML'),
]
# detached items (not considered as orphans)
detached_locations = [
course_id.make_usage_key('static_tab', 'StaticTab'),
course_id.make_usage_key('course_info', 'updates'),
]
for location in orphan_locations + detached_locations:
self.store.create_item(
self.user_id,
location.course_key,
location.block_type,
block_id=location.block_id
)
with check_mongo_calls(max_find, max_send):
found_orphans = self.store.get_orphans(self.course_locations[self.MONGO_COURSEID].course_key)
six.assertCountEqual(self, found_orphans, orphan_locations)
@ddt.data(ModuleStoreEnum.Type.mongo)
def test_get_non_orphan_parents(self, default_ms):
"""
Test finding non orphan parents from many possible parents.
"""
self.initdb(default_ms)
course_id = self.course_locations[self.MONGO_COURSEID].course_key
# create parented children
self._create_block_hierarchy()
self.store.publish(self.course.location, self.user_id)
# test that problem "problem_x1a_1" has only one published parent
mongo_store = self.store._get_modulestore_for_courselike(course_id) # pylint: disable=protected-access
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# add some published orphans
orphan_sequential = course_id.make_usage_key('sequential', 'OrphanSequential')
orphan_vertical = course_id.make_usage_key('vertical', 'OrphanVertical')
orphan_locations = [orphan_sequential, orphan_vertical]
for location in orphan_locations:
self.store.create_item(
self.user_id,
location.course_key,
location.block_type,
block_id=location.block_id
)
self.store.publish(location, self.user_id)
found_orphans = mongo_store.get_orphans(course_id)
assert set(found_orphans) == set(orphan_locations)
assert len(set(found_orphans)) == 2
# add orphan vertical and sequential as another parents of problem "problem_x1a_1"
mongo_store.collection.update_one(
orphan_sequential.to_deprecated_son('_id.'),
{'$push': {'definition.children': six.text_type(self.problem_x1a_1)}} # lint-amnesty, pylint: disable=no-member
)
mongo_store.collection.update_one(
orphan_vertical.to_deprecated_son('_id.'),
{'$push': {'definition.children': six.text_type(self.problem_x1a_1)}} # lint-amnesty, pylint: disable=no-member
)
# test that "get_parent_location" method of published branch still returns the correct non-orphan parent for
# problem "problem_x1a_1" since the two other parents are orphans
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
parent = mongo_store.get_parent_location(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
assert parent == self.vertical_x1a # lint-amnesty, pylint: disable=no-member
# now add valid published vertical as another parent of problem
mongo_store.collection.update_one(self.sequential_x1.to_deprecated_son('_id.'), {'$push': {'definition.children': six.text_type(self.problem_x1a_1)}}) # lint-amnesty, pylint: disable=no-member, line-too-long
# now check that "get_parent_location" method of published branch raises "ReferentialIntegrityError" for
# problem "problem_x1a_1" since it has now 2 valid published parents
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_id):
assert self.store.has_item(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
with pytest.raises(ReferentialIntegrityError):
self.store.get_parent_location(self.problem_x1a_1) # lint-amnesty, pylint: disable=no-member
@ddt.data(ModuleStoreEnum.Type.mongo)
def test_create_item_from_parent_location(self, default_ms):
"""
Test a code path missed by the above: passing an old-style location as parent but no
new location for the child
"""
self.initdb(default_ms)
self.store.create_child(
self.user_id,
self.course_locations[self.MONGO_COURSEID],
'problem',
block_id='orphan'
)
orphans = self.store.get_orphans(self.course_locations[self.MONGO_COURSEID].course_key)
assert len(orphans) == 0, 'unexpected orphans: {}'.format(orphans)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_create_item_populates_edited_info(self, default_ms):
self.initdb(default_ms)
block = self.store.create_item(
self.user_id,
self.course.location.course_key,
'problem'
)
assert self.user_id == block.edited_by
assert datetime.datetime.now(UTC) > block.edited_on
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_create_item_populates_subtree_edited_info(self, default_ms):
self.initdb(default_ms)
block = self.store.create_item(
self.user_id,
self.course.location.course_key,
'problem'
)
assert self.user_id == block.subtree_edited_by
assert datetime.datetime.now(UTC) > block.subtree_edited_on
# Draft: wildcard search of draft and split
# Split: wildcard search of draft and split
@ddt.data((ModuleStoreEnum.Type.mongo, 2, 0), (ModuleStoreEnum.Type.split, 2, 0))
@ddt.unpack
def test_get_courses_for_wiki(self, default_ms, max_find, max_send):
"""
Test the get_courses_for_wiki method
"""
self.initdb(default_ms)
# Test Mongo wiki
with check_mongo_calls(max_find, max_send):
wiki_courses = self.store.get_courses_for_wiki('999')
assert len(wiki_courses) == 1
assert self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None) in wiki_courses
assert len(self.store.get_courses_for_wiki('edX.simple.2012_Fall')) == 0
assert len(self.store.get_courses_for_wiki('no_such_wiki')) == 0
# Draft:
# Find: find vertical, find children
# Sends:
# 1. delete all of the published nodes in subtree
# 2. insert vertical as published (deleted in step 1) w/ the deleted problems as children
# 3-6. insert the 3 problems and 1 html as published
# Split: active_versions, 2 structures (pre & post published?)
# Sends:
# - insert structure
# - write index entry
@ddt.data((ModuleStoreEnum.Type.mongo, 2, 6), (ModuleStoreEnum.Type.split, 3, 2))
@ddt.unpack
def test_unpublish(self, default_ms, max_find, max_send):
"""
Test calling unpublish
"""
self.initdb(default_ms)
if default_ms == ModuleStoreEnum.Type.mongo and mongo_uses_error_check(self.store):
max_find += 1
self._create_block_hierarchy()
# publish
self.store.publish(self.course.location, self.user_id)
published_xblock = self.store.get_item(
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
revision=ModuleStoreEnum.RevisionOption.published_only
)
assert published_xblock is not None
# unpublish
with check_mongo_calls(max_find, max_send):
self.store.unpublish(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
with pytest.raises(ItemNotFoundError):
self.store.get_item(
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
revision=ModuleStoreEnum.RevisionOption.published_only
)
# make sure draft version still exists
draft_xblock = self.store.get_item(
self.vertical_x1a, # lint-amnesty, pylint: disable=no-member
revision=ModuleStoreEnum.RevisionOption.draft_only
)
assert draft_xblock is not None
# Draft: specific query for revision None
# Split: active_versions, structure
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 2, 0))
@ddt.unpack
def test_has_published_version(self, default_ms, max_find, max_send):
"""
Test the has_published_version method
"""
self.initdb(default_ms)
self._create_block_hierarchy()
# start off as Private
item = self.store.create_child(self.user_id, self.writable_chapter_location, 'problem', 'test_compute_publish_state') # lint-amnesty, pylint: disable=line-too-long
item_location = item.location
with check_mongo_calls(max_find, max_send):
assert not self.store.has_published_version(item)
# Private -> Public
self.store.publish(item_location, self.user_id)
item = self.store.get_item(item_location)
assert self.store.has_published_version(item)
# Public -> Private
self.store.unpublish(item_location, self.user_id)
item = self.store.get_item(item_location)
assert not self.store.has_published_version(item)
# Private -> Public
self.store.publish(item_location, self.user_id)
item = self.store.get_item(item_location)
assert self.store.has_published_version(item)
# Public -> Draft with NO changes
self.store.convert_to_draft(item_location, self.user_id)
item = self.store.get_item(item_location)
assert self.store.has_published_version(item)
# Draft WITH changes
item.display_name = 'new name'
item = self.store.update_item(item, self.user_id)
assert self.store.has_changes(item)
assert self.store.has_published_version(item)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_update_edit_info_ancestors(self, default_ms):
"""
Tests that edited_on, edited_by, subtree_edited_on, and subtree_edited_by are set correctly during update
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
def check_node(location_key, after, before, edited_by, subtree_after, subtree_before, subtree_by):
"""
Checks that the node given by location_key matches the given edit_info constraints.
"""
node = self.store.get_item(location_key)
if after:
assert after < node.edited_on
assert node.edited_on < before
assert node.edited_by == edited_by
if subtree_after:
assert subtree_after < node.subtree_edited_on
assert node.subtree_edited_on < subtree_before
assert node.subtree_edited_by == subtree_by
with self.store.bulk_operations(test_course.id):
# Create a dummy vertical & html to test against
component = self.store.create_child(
self.user_id,
test_course.location,
'vertical',
block_id='test_vertical'
)
child = self.store.create_child(
self.user_id,
component.location,
'html',
block_id='test_html'
)
sibling = self.store.create_child(
self.user_id,
component.location,
'html',
block_id='test_html_no_change'
)
after_create = datetime.datetime.now(UTC)
# Verify that all nodes were last edited in the past by create_user
for block in [component, child, sibling]:
check_node(block.location, None, after_create, self.user_id, None, after_create, self.user_id)
# Change the component, then check that there now are changes
component.display_name = 'Changed Display Name'
editing_user = self.user_id - 2
with self.store.bulk_operations(test_course.id): # TNL-764 bulk ops disabled ancestor updates
component = self.store.update_item(component, editing_user)
after_edit = datetime.datetime.now(UTC)
check_node(component.location, after_create, after_edit, editing_user, after_create, after_edit, editing_user)
# but child didn't change
check_node(child.location, None, after_create, self.user_id, None, after_create, self.user_id)
# Change the child
child = self.store.get_item(child.location)
child.display_name = 'Changed Display Name'
self.store.update_item(child, user_id=editing_user)
after_edit = datetime.datetime.now(UTC)
# Verify that child was last edited between after_create and after_edit by edit_user
check_node(child.location, after_create, after_edit, editing_user, after_create, after_edit, editing_user)
# Verify that ancestors edit info is unchanged, but their subtree edit info matches child
check_node(test_course.location, None, after_create, self.user_id, after_create, after_edit, editing_user)
# Verify that others have unchanged edit info
check_node(sibling.location, None, after_create, self.user_id, None, after_create, self.user_id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_update_edit_info(self, default_ms):
"""
Tests that edited_on and edited_by are set correctly during an update
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
# Create a dummy component to test against
component = self.store.create_child(
self.user_id,
test_course.location,
'vertical',
)
# Store the current edit time and verify that user created the component
assert component.edited_by == self.user_id
old_edited_on = component.edited_on
edit_user = self.user_id - 2
# Change the component
component.display_name = 'Changed'
self.store.update_item(component, edit_user)
updated_component = self.store.get_item(component.location)
# Verify the ordering of edit times and that dummy_user made the edit
assert old_edited_on < updated_component.edited_on
assert updated_component.edited_by == edit_user
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_update_published_info(self, default_ms):
"""
Tests that published_on and published_by are set correctly
"""
self.initdb(default_ms)
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
publish_user = 456
# Create a dummy component to test against
component = self.store.create_child(
self.user_id,
test_course.location,
'vertical',
)
# Store the current time, then publish
old_time = datetime.datetime.now(UTC)
self.store.publish(component.location, publish_user)
updated_component = self.store.get_item(component.location)
# Verify the time order and that publish_user caused publication
assert old_time <= updated_component.published_on
assert updated_component.published_by == publish_user
# Verify that changing the item doesn't unset the published info
updated_component.display_name = 'changed'
self.store.update_item(updated_component, self.user_id)
updated_component = self.store.get_item(updated_component.location)
assert old_time <= updated_component.published_on
assert updated_component.published_by == publish_user
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_auto_publish(self, default_ms):
"""
Test that the correct things have been published automatically
Assumptions:
* we auto-publish courses, chapters, sequentials
* we don't auto-publish problems
"""
self.initdb(default_ms)
# test create_course to make sure we are autopublishing
test_course = self.store.create_course('testx', 'GreekHero', 'test_run', self.user_id)
assert self.store.has_published_version(test_course)
test_course_key = test_course.id
# test create_item of direct-only category to make sure we are autopublishing
chapter = self.store.create_child(self.user_id, test_course.location, 'chapter', 'Overview')
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert chapter.location in self.store.get_item(test_course.location).children
assert self.store.has_published_version(chapter)
chapter_location = chapter.location
# test create_child of direct-only category to make sure we are autopublishing
sequential = self.store.create_child(self.user_id, chapter_location, 'sequential', 'Sequence')
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert sequential.location in self.store.get_item(chapter_location).children
assert self.store.has_published_version(sequential)
# test update_item of direct-only category to make sure we are autopublishing
sequential.display_name = 'sequential1'
sequential = self.store.update_item(sequential, self.user_id)
assert self.store.has_published_version(sequential)
# test delete_item of direct-only category to make sure we are autopublishing
self.store.delete_item(sequential.location, self.user_id, revision=ModuleStoreEnum.RevisionOption.all)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert sequential.location not in self.store.get_item(chapter_location).children
chapter = self.store.get_item(chapter.location.for_branch(None))
assert self.store.has_published_version(chapter)
# test create_child of NOT direct-only category to make sure we aren't autopublishing
problem_child = self.store.create_child(self.user_id, chapter_location, 'problem', 'Problem_Child')
assert not self.store.has_published_version(problem_child)
# test create_item of NOT direct-only category to make sure we aren't autopublishing
problem_item = self.store.create_item(self.user_id, test_course_key, 'problem', 'Problem_Item')
assert not self.store.has_published_version(problem_item)
# test update_item of NOT direct-only category to make sure we aren't autopublishing
problem_item.display_name = 'Problem_Item1'
problem_item = self.store.update_item(problem_item, self.user_id)
assert not self.store.has_published_version(problem_item)
# test delete_item of NOT direct-only category to make sure we aren't autopublishing
self.store.delete_item(problem_child.location, self.user_id)
chapter = self.store.get_item(chapter.location.for_branch(None))
assert self.store.has_published_version(chapter)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_get_courses_for_wiki_shared(self, default_ms):
"""
Test two courses sharing the same wiki
"""
self.initdb(default_ms)
# verify initial state - initially, we should have a wiki for the Mongo course
wiki_courses = self.store.get_courses_for_wiki('999')
assert self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None) in wiki_courses
# set Mongo course to share the wiki with simple course
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
mongo_course.wiki_slug = 'simple'
self.store.update_item(mongo_course, self.user_id)
# now mongo_course should not be retrievable with old wiki_slug
wiki_courses = self.store.get_courses_for_wiki('999')
assert len(wiki_courses) == 0
# but there should be one course with wiki_slug 'simple'
wiki_courses = self.store.get_courses_for_wiki('simple')
assert len(wiki_courses) == 1
assert self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None) in wiki_courses
# configure mongo course to use unique wiki_slug.
mongo_course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)
mongo_course.wiki_slug = 'MITx.999.2013_Spring'
self.store.update_item(mongo_course, self.user_id)
# it should be retrievable with its new wiki_slug
wiki_courses = self.store.get_courses_for_wiki('MITx.999.2013_Spring')
assert len(wiki_courses) == 1
assert self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None) in wiki_courses
# and NOT retriveable with its old wiki_slug
wiki_courses = self.store.get_courses_for_wiki('simple')
assert len(wiki_courses) == 0
assert self.course_locations[self.MONGO_COURSEID].course_key.replace(branch=None) not in wiki_courses
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_branch_setting(self, default_ms):
"""
Test the branch_setting context manager
"""
self.initdb(default_ms)
self._create_block_hierarchy()
problem_location = self.problem_x1a_1.for_branch(None) # lint-amnesty, pylint: disable=no-member
problem_original_name = 'Problem_x1a_1'
course_key = problem_location.course_key
problem_new_name = 'New Problem Name'
def assertNumProblems(display_name, expected_number):
"""
Asserts the number of problems with the given display name is the given expected number.
"""
assert len(self.store.get_items(course_key.for_branch(None), settings={'display_name': display_name})) ==\
expected_number
def assertProblemNameEquals(expected_display_name):
"""
Asserts the display_name of the xblock at problem_location matches the given expected value.
"""
# check the display_name of the problem
problem = self.store.get_item(problem_location)
assert problem.display_name == expected_display_name
# there should be only 1 problem with the expected_display_name
assertNumProblems(expected_display_name, 1)
# verify Draft problem
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key):
assert self.store.has_item(problem_location)
assertProblemNameEquals(problem_original_name)
# verify Published problem doesn't exist
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
assert not self.store.has_item(problem_location)
with pytest.raises(ItemNotFoundError):
self.store.get_item(problem_location)
# PUBLISH the problem
self.store.publish(self.vertical_x1a, self.user_id) # lint-amnesty, pylint: disable=no-member
self.store.publish(problem_location, self.user_id)
# verify Published problem
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
assert self.store.has_item(problem_location)
assertProblemNameEquals(problem_original_name)
# verify Draft-preferred
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key):
assertProblemNameEquals(problem_original_name)
# EDIT name
problem = self.store.get_item(problem_location)
problem.display_name = problem_new_name
self.store.update_item(problem, self.user_id)
# verify Draft problem has new name
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key):
assertProblemNameEquals(problem_new_name)
# verify Published problem still has old name
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
assertProblemNameEquals(problem_original_name)
# there should be no published problems with the new name
assertNumProblems(problem_new_name, 0)
# PUBLISH the problem
self.store.publish(problem_location, self.user_id)
# verify Published problem has new name
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
assertProblemNameEquals(problem_new_name)
# there should be no published problems with the old name
assertNumProblems(problem_original_name, 0)
# verify branch setting is published-only in manager
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
assert self.store.get_branch_setting() == ModuleStoreEnum.Branch.published_only
# verify branch setting is draft-preferred in manager
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
assert self.store.get_branch_setting() == ModuleStoreEnum.Branch.draft_preferred
def verify_default_store(self, store_type):
"""
Verifies the default_store property
"""
assert self.store.default_modulestore.get_modulestore_type() == store_type
# verify internal helper method
store = self.store._get_modulestore_for_courselike() # pylint: disable=protected-access
assert store.get_modulestore_type() == store_type
# verify store used for creating a course
course = self.store.create_course("org", "course{}".format(uuid4().hex[:5]), "run", self.user_id)
assert course.system.modulestore.get_modulestore_type() == store_type
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_default_store(self, default_ms):
"""
Test the default store context manager
"""
# initialize the mixed modulestore
self._initialize_mixed(mappings={})
with self.store.default_store(default_ms):
self.verify_default_store(default_ms)
def test_default_store_nested(self):
"""
Test the default store context manager, nested within one another
"""
# initialize the mixed modulestore
self._initialize_mixed(mappings={})
with self.store.default_store(ModuleStoreEnum.Type.mongo):
self.verify_default_store(ModuleStoreEnum.Type.mongo)
with self.store.default_store(ModuleStoreEnum.Type.split):
self.verify_default_store(ModuleStoreEnum.Type.split)
self.verify_default_store(ModuleStoreEnum.Type.mongo)
def test_default_store_fake(self):
"""
Test the default store context manager, asking for a fake store
"""
# initialize the mixed modulestore
self._initialize_mixed(mappings={})
fake_store = "fake"
with self.assertRaisesRegex(Exception, "Cannot find store of type {}".format(fake_store)):
with self.store.default_store(fake_store):
pass # pragma: no cover
def save_asset(self, asset_key):
"""
Load and save the given file. (taken from test_contentstore)
"""
with open("{}/static/{}".format(DATA_DIR, asset_key.block_id), "rb") as f:
content = StaticContent(
asset_key, "Funky Pix", mimetypes.guess_type(asset_key.block_id)[0], f.read(),
)
self.store.contentstore.save(content)
@ddt.data(
[ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.mongo],
[ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split],
[ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.split]
)
@ddt.unpack
def test_clone_course(self, source_modulestore, destination_modulestore):
"""
Test clone course
"""
with MongoContentstoreBuilder().build() as contentstore:
# initialize the mixed modulestore
self._initialize_mixed(contentstore=contentstore, mappings={})
with self.store.default_store(source_modulestore):
source_course_key = self.store.make_course_key("org.source", "course.source", "run.source")
self._create_course(source_course_key)
self.save_asset(source_course_key.make_asset_key('asset', 'picture1.jpg'))
with self.store.default_store(destination_modulestore):
dest_course_id = self.store.make_course_key("org.other", "course.other", "run.other")
self.store.clone_course(source_course_key, dest_course_id, self.user_id)
# pylint: disable=protected-access
source_store = self.store._get_modulestore_by_type(source_modulestore)
dest_store = self.store._get_modulestore_by_type(destination_modulestore)
self.assertCoursesEqual(source_store, source_course_key, dest_store, dest_course_id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_bulk_operations_signal_firing(self, default):
""" Signals should be fired right before bulk_operations() exits. """
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
course_key = course.id
def _clear_bulk_ops_record(course_key): # pylint: disable=unused-argument
"""
Check if the signal has been fired.
The course_published signal fires before the _clear_bulk_ops_record.
"""
signal_handler.send.assert_called_with('course_published', course_key=course.id)
with patch.object(
self.store.thread_cache.default_store, '_clear_bulk_ops_record', wraps=_clear_bulk_ops_record
) as mock_clear_bulk_ops_record:
with self.store.bulk_operations(course_key):
categories = DIRECT_ONLY_CATEGORIES
for block_type in categories:
self.store.create_item(self.user_id, course_key, block_type)
signal_handler.send.assert_not_called()
assert mock_clear_bulk_ops_record.call_count == 1
signal_handler.send.assert_called_with('course_published', course_key=course.id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_publish_signal_direct_firing(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
course_key = course.id
# Test non-draftable block types. The block should be published with every change.
categories = DIRECT_ONLY_CATEGORIES
for block_type in categories:
log.debug('Testing with block type %s', block_type)
signal_handler.reset_mock()
block = self.store.create_item(self.user_id, course_key, block_type)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
block.display_name = block_type
self.store.update_item(block, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
self.store.publish(block.location, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_publish_signal_rerun_firing(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
course_key = course.id
# Test course re-runs
signal_handler.reset_mock()
dest_course_id = self.store.make_course_key("org.other", "course.other", "run.other")
self.store.clone_course(course_key, dest_course_id, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=dest_course_id)
@patch('xmodule.tabs.CourseTab.from_json', side_effect=mock_tab_from_json)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_publish_signal_import_firing(self, default, _from_json):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Test course imports
# Note: The signal is fired once when the course is created and
# a second time after the actual data import.
import_course_from_xml(
self.store, self.user_id, DATA_DIR, ['toy'], load_error_modules=False,
static_content_store=contentstore,
create_if_not_present=True,
)
signal_handler.send.assert_has_calls([
call('pre_publish', course_key=self.store.make_course_key('edX', 'toy', '2012_Fall')),
call('course_published', course_key=self.store.make_course_key('edX', 'toy', '2012_Fall')),
call('pre_publish', course_key=self.store.make_course_key('edX', 'toy', '2012_Fall')),
call('course_published', course_key=self.store.make_course_key('edX', 'toy', '2012_Fall')),
])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_publish_signal_publish_firing(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
# Test a draftable block type, which needs to be explicitly published, and nest it within the
# normal structure - this is important because some implementors change the parent when adding a
# non-published child; if parent is in DIRECT_ONLY_CATEGORIES then this should not fire the event
signal_handler.reset_mock()
section = self.store.create_item(self.user_id, course.id, 'chapter')
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
subsection = self.store.create_child(self.user_id, section.location, 'sequential')
signal_handler.send.assert_called_with('course_published', course_key=course.id)
# 'units' and 'blocks' are draftable types
signal_handler.reset_mock()
unit = self.store.create_child(self.user_id, subsection.location, 'vertical')
signal_handler.send.assert_not_called()
block = self.store.create_child(self.user_id, unit.location, 'problem')
signal_handler.send.assert_not_called()
self.store.update_item(block, self.user_id)
signal_handler.send.assert_not_called()
signal_handler.reset_mock()
self.store.publish(unit.location, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
self.store.unpublish(unit.location, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
self.store.delete_item(unit.location, self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_bulk_course_publish_signal_direct_firing(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
course_key = course.id
# Test non-draftable block types. No signals should be received until
signal_handler.reset_mock()
with self.store.bulk_operations(course_key):
categories = DIRECT_ONLY_CATEGORIES
for block_type in categories:
log.debug('Testing with block type %s', block_type)
block = self.store.create_item(self.user_id, course_key, block_type)
signal_handler.send.assert_not_called()
block.display_name = block_type
self.store.update_item(block, self.user_id)
signal_handler.send.assert_not_called()
self.store.publish(block.location, self.user_id)
signal_handler.send.assert_not_called()
signal_handler.send.assert_called_with('course_published', course_key=course.id)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_bulk_course_publish_signal_publish_firing(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Course creation and publication should fire the signal
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
signal_handler.send.assert_called_with('course_published', course_key=course.id)
course_key = course.id
# Test a draftable block type, which needs to be explicitly published, and nest it within the
# normal structure - this is important because some implementors change the parent when adding a
# non-published child; if parent is in DIRECT_ONLY_CATEGORIES then this should not fire the event
signal_handler.reset_mock()
with self.store.bulk_operations(course_key):
section = self.store.create_item(self.user_id, course_key, 'chapter')
signal_handler.send.assert_not_called()
subsection = self.store.create_child(self.user_id, section.location, 'sequential')
signal_handler.send.assert_not_called()
# 'units' and 'blocks' are draftable types
unit = self.store.create_child(self.user_id, subsection.location, 'vertical')
signal_handler.send.assert_not_called()
block = self.store.create_child(self.user_id, unit.location, 'problem')
signal_handler.send.assert_not_called()
self.store.update_item(block, self.user_id)
signal_handler.send.assert_not_called()
self.store.publish(unit.location, self.user_id)
signal_handler.send.assert_not_called()
signal_handler.send.assert_called_with('course_published', course_key=course.id)
# Test editing draftable block type without publish
signal_handler.reset_mock()
with self.store.bulk_operations(course_key):
unit = self.store.create_child(self.user_id, subsection.location, 'vertical')
signal_handler.send.assert_not_called()
block = self.store.create_child(self.user_id, unit.location, 'problem')
signal_handler.send.assert_not_called()
self.store.publish(unit.location, self.user_id)
signal_handler.send.assert_not_called()
signal_handler.send.assert_called_with('course_published', course_key=course.id)
signal_handler.reset_mock()
with self.store.bulk_operations(course_key):
signal_handler.send.assert_not_called()
unit.display_name = "Change this unit"
self.store.update_item(unit, self.user_id)
signal_handler.send.assert_not_called()
signal_handler.send.assert_not_called()
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_course_deleted_signal(self, default):
with MongoContentstoreBuilder().build() as contentstore:
signal_handler = Mock(name='signal_handler')
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
signal_handler=signal_handler,
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default):
assert self.store.thread_cache.default_store.signal_handler is not None
signal_handler.send.assert_not_called()
# Create a course
course = self.store.create_course('org_x', 'course_y', 'run_z', self.user_id)
course_key = course.id
# Delete the course
course = self.store.delete_course(course_key, self.user_id)
# Verify that the signal was emitted
signal_handler.send.assert_called_with('course_deleted', course_key=course_key)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_delete_published_item_orphans(self, default_store):
"""
Tests delete published item dont create any oprhans in course
"""
self.initdb(default_store)
course_locator = self.course.id
chapter = self.store.create_child(
self.user_id, self.course.location, 'chapter', block_id='section_one'
)
sequential = self.store.create_child(
self.user_id, chapter.location, 'sequential', block_id='subsection_one'
)
vertical = self.store.create_child(
self.user_id, sequential.location, 'vertical', block_id='moon_unit'
)
problem = self.store.create_child(
self.user_id, vertical.location, 'problem', block_id='problem'
)
self.store.publish(chapter.location, self.user_id)
# Verify that there are no changes
assert not self._has_changes(chapter.location)
assert not self._has_changes(sequential.location)
assert not self._has_changes(vertical.location)
assert not self._has_changes(problem.location)
# No orphans in course
course_orphans = self.store.get_orphans(course_locator)
assert len(course_orphans) == 0
self.store.delete_item(vertical.location, self.user_id)
# No orphans in course after delete, except
# in old mongo, which still creates orphans
course_orphans = self.store.get_orphans(course_locator)
if default_store == ModuleStoreEnum.Type.mongo:
assert len(course_orphans) == 1
else:
assert len(course_orphans) == 0
course_locator_publish = course_locator.for_branch(ModuleStoreEnum.BranchName.published)
# No published oprhans after delete, except
# in old mongo, which still creates orphans
course_publish_orphans = self.store.get_orphans(course_locator_publish)
if default_store == ModuleStoreEnum.Type.mongo:
assert len(course_publish_orphans) == 1
else:
assert len(course_publish_orphans) == 0
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_delete_draft_item_orphans(self, default_store):
"""
Tests delete draft item create no orphans in course
"""
self.initdb(default_store)
course_locator = self.course.id
chapter = self.store.create_child(
self.user_id, self.course.location, 'chapter', block_id='section_one'
)
sequential = self.store.create_child(
self.user_id, chapter.location, 'sequential', block_id='subsection_one'
)
vertical = self.store.create_child(
self.user_id, sequential.location, 'vertical', block_id='moon_unit'
)
problem = self.store.create_child(
self.user_id, vertical.location, 'problem', block_id='problem'
)
self.store.publish(chapter.location, self.user_id)
# Verify that there are no changes
assert not self._has_changes(chapter.location)
assert not self._has_changes(sequential.location)
assert not self._has_changes(vertical.location)
assert not self._has_changes(problem.location)
# No orphans in course
course_orphans = self.store.get_orphans(course_locator)
assert len(course_orphans) == 0
problem.display_name = 'changed'
problem = self.store.update_item(problem, self.user_id)
assert self._has_changes(vertical.location)
assert self._has_changes(problem.location)
self.store.delete_item(vertical.location, self.user_id)
# No orphans in course after delete, except
# in old mongo, which still creates them
course_orphans = self.store.get_orphans(course_locator)
if default_store == ModuleStoreEnum.Type.mongo:
assert len(course_orphans) == 1
else:
assert len(course_orphans) == 0
course_locator_publish = course_locator.for_branch(ModuleStoreEnum.BranchName.published)
# No published orphans after delete, except
# in old mongo, which still creates them
course_publish_orphans = self.store.get_orphans(course_locator_publish)
if default_store == ModuleStoreEnum.Type.mongo:
assert len(course_publish_orphans) == 1
else:
assert len(course_publish_orphans) == 0
@ddt.ddt
@attr('mongo')
class TestPublishOverExportImport(CommonMixedModuleStoreSetup):
"""
Tests which publish (or don't publish) items - and then export/import the course,
checking the state of the imported items.
"""
def setUp(self):
"""
Set up the database for testing
"""
super(TestPublishOverExportImport, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.user_id = ModuleStoreEnum.UserID.test
self.export_dir = mkdtemp()
self.addCleanup(rmtree, self.export_dir, ignore_errors=True)
def _export_import_course_round_trip(self, modulestore, contentstore, source_course_key, export_dir):
"""
Export the course from a modulestore and then re-import the course.
"""
top_level_export_dir = 'exported_source_course'
export_course_to_xml(
modulestore,
contentstore,
source_course_key,
export_dir,
top_level_export_dir,
)
import_course_from_xml(
modulestore,
'test_user',
export_dir,
source_dirs=[top_level_export_dir],
static_content_store=contentstore,
target_id=source_course_key,
create_if_not_present=True,
raise_on_failure=True,
)
@contextmanager
def _build_store(self, default_ms):
"""
Perform the modulestore-building and course creation steps for a mixed modulestore test.
"""
with MongoContentstoreBuilder().build() as contentstore:
# initialize the mixed modulestore
self._initialize_mixed(contentstore=contentstore, mappings={})
with self.store.default_store(default_ms):
source_course_key = self.store.make_course_key("org.source", "course.source", "run.source")
self._create_course(source_course_key)
yield contentstore, source_course_key
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_draft_has_changes_before_export_and_after_import(self, default_ms):
"""
Tests that an unpublished unit remains with no changes across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# Create a dummy component to test against and don't publish it.
draft_xblock = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical'
)
# Not yet published, so changes are present
assert self._has_changes(draft_xblock.location)
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Verify that the imported block still is a draft, i.e. has changes.
assert self._has_changes(draft_xblock.location)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_published_has_changes_before_export_and_after_import(self, default_ms):
"""
Tests that an published unit remains published across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# Create a dummy component to test against and publish it.
published_xblock = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical'
)
self.store.publish(published_xblock.location, self.user_id)
# Retrieve the published block and make sure it's published.
assert not self._has_changes(published_xblock.location)
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Get the published xblock from the imported course.
# Verify that it still is published, i.e. has no changes.
assert not self._has_changes(published_xblock.location)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_changed_published_has_changes_before_export_and_after_import(self, default_ms):
"""
Tests that an published unit with an unpublished draft remains published across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# Create a dummy component to test against and publish it.
published_xblock = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical'
)
self.store.publish(published_xblock.location, self.user_id)
# Retrieve the published block and make sure it's published.
assert not self._has_changes(published_xblock.location)
updated_display_name = 'Changed Display Name'
component = self.store.get_item(published_xblock.location)
component.display_name = updated_display_name
component = self.store.update_item(component, self.user_id)
assert self.store.has_changes(component)
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Get the published xblock from the imported course.
# Verify that the published block still has a draft block, i.e. has changes.
assert self._has_changes(published_xblock.location)
# Verify that the changes in the draft vertical still exist.
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, source_course_key):
component = self.store.get_item(published_xblock.location)
assert component.display_name == updated_display_name
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_seq_with_unpublished_vertical_has_changes_before_export_and_after_import(self, default_ms):
"""
Tests that an published unit with an unpublished draft remains published across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# create chapter
chapter = self.store.create_child(
self.user_id, self.course.location, 'chapter', block_id='section_one'
)
self.store.publish(chapter.location, self.user_id)
# create sequential
sequential = self.store.create_child(
self.user_id, chapter.location, 'sequential', block_id='subsection_one'
)
self.store.publish(sequential.location, self.user_id)
# create vertical - don't publish it!
vertical = self.store.create_child(
self.user_id, sequential.location, 'vertical', block_id='moon_unit'
)
# Retrieve the published block and make sure it's published.
# Chapter is published - but the changes in vertical below means it "has_changes".
assert self._has_changes(chapter.location)
# Sequential is published - but the changes in vertical below means it "has_changes".
assert self._has_changes(sequential.location)
# Vertical is unpublished - so it "has_changes".
assert self._has_changes(vertical.location)
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Get the published xblock from the imported course.
# Verify that the published block still has a draft block, i.e. has changes.
assert self._has_changes(chapter.location)
assert self._has_changes(sequential.location)
assert self._has_changes(vertical.location)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_vertical_with_draft_and_published_unit_has_changes_before_export_and_after_import(self, default_ms):
"""
Tests that an published unit with an unpublished draft remains published across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# create chapter
chapter = self.store.create_child(
self.user_id, self.course.location, 'chapter', block_id='section_one'
)
self.store.publish(chapter.location, self.user_id)
# create sequential
sequential = self.store.create_child(
self.user_id, chapter.location, 'sequential', block_id='subsection_one'
)
self.store.publish(sequential.location, self.user_id)
# create vertical
vertical = self.store.create_child(
self.user_id, sequential.location, 'vertical', block_id='moon_unit'
)
# Vertical has changes until it is actually published.
assert self._has_changes(vertical.location)
self.store.publish(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
# create unit
unit = self.store.create_child(
self.user_id, vertical.location, 'html', block_id='html_unit'
)
# Vertical has a new child -and- unit is unpublished. So both have changes.
assert self._has_changes(vertical.location)
assert self._has_changes(unit.location)
# Publishing the vertical also publishes its unit child.
self.store.publish(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
assert not self._has_changes(unit.location)
# Publishing the unit separately has no effect on whether it has changes - it's already published.
self.store.publish(unit.location, self.user_id)
assert not self._has_changes(vertical.location)
assert not self._has_changes(unit.location)
# Retrieve the published block and make sure it's published.
self.store.publish(chapter.location, self.user_id)
assert not self._has_changes(chapter.location)
assert not self._has_changes(sequential.location)
assert not self._has_changes(vertical.location)
assert not self._has_changes(unit.location)
# Now make changes to the unit - but don't publish them.
component = self.store.get_item(unit.location)
updated_display_name = 'Changed Display Name'
component.display_name = updated_display_name
component = self.store.update_item(component, self.user_id)
assert self._has_changes(component.location)
# Export the course - then import the course export.
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Get the published xblock from the imported course.
# Verify that the published block still has a draft block, i.e. has changes.
assert self._has_changes(chapter.location)
assert self._has_changes(sequential.location)
assert self._has_changes(vertical.location)
assert self._has_changes(unit.location)
# Verify that the changes in the draft unit still exist.
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, source_course_key):
component = self.store.get_item(unit.location)
assert component.display_name == updated_display_name
# Verify that the draft changes don't exist in the published unit - it still uses the default name.
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, source_course_key):
component = self.store.get_item(unit.location)
assert component.display_name == 'Text'
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_vertical_with_published_unit_remains_published_before_export_and_after_import(self, default_ms):
"""
Tests that an published unit remains published across export and re-import.
"""
with self._build_store(default_ms) as (contentstore, source_course_key):
# create chapter
chapter = self.store.create_child(
self.user_id, self.course.location, 'chapter', block_id='section_one'
)
self.store.publish(chapter.location, self.user_id)
# create sequential
sequential = self.store.create_child(
self.user_id, chapter.location, 'sequential', block_id='subsection_one'
)
self.store.publish(sequential.location, self.user_id)
# create vertical
vertical = self.store.create_child(
self.user_id, sequential.location, 'vertical', block_id='moon_unit'
)
# Vertical has changes until it is actually published.
assert self._has_changes(vertical.location)
self.store.publish(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
# create unit
unit = self.store.create_child(
self.user_id, vertical.location, 'html', block_id='html_unit'
)
# Now make changes to the unit.
updated_display_name = 'Changed Display Name'
unit.display_name = updated_display_name
unit = self.store.update_item(unit, self.user_id)
assert self._has_changes(unit.location)
# Publishing the vertical also publishes its unit child.
self.store.publish(vertical.location, self.user_id)
assert not self._has_changes(vertical.location)
assert not self._has_changes(unit.location)
# Export the course - then import the course export.
self._export_import_course_round_trip(
self.store, contentstore, source_course_key, self.export_dir
)
# Get the published xblock from the imported course.
# Verify that the published block still has a draft block, i.e. has changes.
assert not self._has_changes(chapter.location)
assert not self._has_changes(sequential.location)
assert not self._has_changes(vertical.location)
assert not self._has_changes(unit.location)
# Verify that the published changes exist in the published unit.
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, source_course_key):
component = self.store.get_item(unit.location)
assert component.display_name == updated_display_name
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside'])
def test_aside_crud(self, default_store):
"""
Check that asides could be imported from XML and the modulestores handle asides crud
"""
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
with MongoContentstoreBuilder().build() as contentstore:
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default_store):
dest_course_key = self.store.make_course_key('edX', "aside_test", "2012_Fall")
courses = import_course_from_xml(
self.store, self.user_id, DATA_DIR, ['aside'],
load_error_modules=False,
static_content_store=contentstore,
target_id=dest_course_key,
create_if_not_present=True,
)
# check that the imported blocks have the right asides and values
def check_block(block):
"""
Check whether block has the expected aside w/ its fields and then recurse to the block's children
"""
asides = block.runtime.get_asides(block)
assert len(asides) == 1, 'Found {} asides but expected only test_aside'.format(asides)
assert isinstance(asides[0], AsideTestType)
category = block.scope_ids.block_type
assert asides[0].data_field == '{} aside data'.format(category)
assert asides[0].content == '{} Aside'.format(category.capitalize())
for child in block.get_children():
check_block(child)
check_block(courses[0])
# create a new block and ensure its aside magically appears with the right fields
new_chapter = self.store.create_child(self.user_id, courses[0].location, 'chapter', 'new_chapter')
asides = new_chapter.runtime.get_asides(new_chapter)
assert len(asides) == 1, 'Found {} asides but expected only test_aside'.format(asides)
chapter_aside = asides[0]
assert isinstance(chapter_aside, AsideTestType)
assert not chapter_aside.fields['data_field'].is_set_on(chapter_aside), \
f"data_field says it's assigned to {chapter_aside.data_field}"
assert not chapter_aside.fields['content'].is_set_on(chapter_aside), \
f"content says it's assigned to {chapter_aside.content}"
# now update the values
chapter_aside.data_field = 'new value'
self.store.update_item(new_chapter, self.user_id, asides=[chapter_aside])
new_chapter = self.store.get_item(new_chapter.location)
chapter_aside = new_chapter.runtime.get_asides(new_chapter)[0]
assert 'new value' == chapter_aside.data_field
# update the values the second time
chapter_aside.data_field = 'another one value'
self.store.update_item(new_chapter, self.user_id, asides=[chapter_aside])
new_chapter2 = self.store.get_item(new_chapter.location)
chapter_aside2 = new_chapter2.runtime.get_asides(new_chapter2)[0]
assert 'another one value' == chapter_aside2.data_field
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside'])
def test_export_course_with_asides(self, default_store):
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
with MongoContentstoreBuilder().build() as contentstore:
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default_store):
dest_course_key = self.store.make_course_key('edX', "aside_test", "2012_Fall")
dest_course_key2 = self.store.make_course_key('edX', "aside_test_2", "2012_Fall_2")
courses = import_course_from_xml(
self.store,
self.user_id,
DATA_DIR,
['aside'],
load_error_modules=False,
static_content_store=contentstore,
target_id=dest_course_key,
create_if_not_present=True,
)
def update_block_aside(block):
"""
Check whether block has the expected aside w/ its fields and then recurse to the block's children
"""
asides = block.runtime.get_asides(block)
asides[0].data_field = ''.join(['Exported data_field ', asides[0].data_field])
asides[0].content = ''.join(['Exported content ', asides[0].content])
self.store.update_item(block, self.user_id, asides=[asides[0]])
for child in block.get_children():
update_block_aside(child)
update_block_aside(courses[0])
# export course to xml
top_level_export_dir = 'exported_source_course_with_asides'
export_course_to_xml(
self.store,
contentstore,
dest_course_key,
self.export_dir,
top_level_export_dir,
)
# and restore the new one from the exported xml
courses2 = import_course_from_xml(
self.store,
self.user_id,
self.export_dir,
source_dirs=[top_level_export_dir],
static_content_store=contentstore,
target_id=dest_course_key2,
create_if_not_present=True,
raise_on_failure=True,
)
assert 1 == len(courses2)
# check that the imported blocks have the right asides and values
def check_block(block):
"""
Check whether block has the expected aside w/ its fields and then recurse to the block's children
"""
asides = block.runtime.get_asides(block)
assert len(asides) == 1, 'Found {} asides but expected only test_aside'.format(asides)
assert isinstance(asides[0], AsideTestType)
category = block.scope_ids.block_type
assert asides[0].data_field == 'Exported data_field {} aside data'.format(category)
assert asides[0].content == 'Exported content {} Aside'.format(category.capitalize())
for child in block.get_children():
check_block(child)
check_block(courses2[0])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside'])
def test_export_course_after_creating_new_items_with_asides(self, default_store): # pylint: disable=too-many-statements
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
with MongoContentstoreBuilder().build() as contentstore:
self.store = MixedModuleStore(
contentstore=contentstore,
create_modulestore_instance=create_modulestore_instance,
mappings={},
**self.OPTIONS
)
self.addCleanup(self.store.close_all_connections)
with self.store.default_store(default_store):
dest_course_key = self.store.make_course_key('edX', "aside_test", "2012_Fall")
dest_course_key2 = self.store.make_course_key('edX', "aside_test_2", "2012_Fall_2")
courses = import_course_from_xml(
self.store,
self.user_id,
DATA_DIR,
['aside'],
load_error_modules=False,
static_content_store=contentstore,
target_id=dest_course_key,
create_if_not_present=True,
)
# create new chapter and modify aside for it
new_chapter_display_name = 'New Chapter'
new_chapter = self.store.create_child(self.user_id, courses[0].location, 'chapter', 'new_chapter')
new_chapter.display_name = new_chapter_display_name
asides = new_chapter.runtime.get_asides(new_chapter)
assert len(asides) == 1, 'Found {} asides but expected only test_aside'.format(asides)
chapter_aside = asides[0]
assert isinstance(chapter_aside, AsideTestType)
chapter_aside.data_field = 'new value'
self.store.update_item(new_chapter, self.user_id, asides=[chapter_aside])
# create new problem and modify aside for it
sequence = courses[0].get_children()[0].get_children()[0]
new_problem_display_name = 'New Problem'
new_problem = self.store.create_child(self.user_id, sequence.location, 'problem', 'new_problem')
new_problem.display_name = new_problem_display_name
asides = new_problem.runtime.get_asides(new_problem)
assert len(asides) == 1, 'Found {} asides but expected only test_aside'.format(asides)
problem_aside = asides[0]
assert isinstance(problem_aside, AsideTestType)
problem_aside.data_field = 'new problem value'
problem_aside.content = 'new content value'
self.store.update_item(new_problem, self.user_id, asides=[problem_aside])
# export course to xml
top_level_export_dir = 'exported_source_course_with_asides'
export_course_to_xml(
self.store,
contentstore,
dest_course_key,
self.export_dir,
top_level_export_dir,
)
# and restore the new one from the exported xml
courses2 = import_course_from_xml(
self.store,
self.user_id,
self.export_dir,
source_dirs=[top_level_export_dir],
static_content_store=contentstore,
target_id=dest_course_key2,
create_if_not_present=True,
raise_on_failure=True,
)
assert 1 == len(courses2)
# check that aside for the new chapter was exported/imported properly
chapters = courses2[0].get_children()
assert 2 == len(chapters)
assert new_chapter_display_name in [item.display_name for item in chapters]
found = False
for child in chapters:
if new_chapter.display_name == child.display_name:
found = True
asides = child.runtime.get_asides(child)
assert len(asides) == 1
child_aside = asides[0]
assert isinstance(child_aside, AsideTestType)
assert child_aside.data_field == 'new value'
break
assert found, 'new_chapter not found'
# check that aside for the new problem was exported/imported properly
sequence_children = courses2[0].get_children()[0].get_children()[0].get_children()
assert 2 == len(sequence_children)
assert new_problem_display_name in [item.display_name for item in sequence_children]
found = False
for child in sequence_children:
if new_problem.display_name == child.display_name:
found = True
asides = child.runtime.get_asides(child)
assert len(asides) == 1
child_aside = asides[0]
assert isinstance(child_aside, AsideTestType)
assert child_aside.data_field == 'new problem value'
assert child_aside.content == 'new content value'
break
assert found, 'new_chapter not found'
@ddt.ddt
@attr('mongo')
class TestAsidesWithMixedModuleStore(CommonMixedModuleStoreSetup):
"""
Tests of the MixedModulestore interface methods with XBlock asides.
"""
def setUp(self):
"""
Setup environment for testing
"""
super(TestAsidesWithMixedModuleStore, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
key_store = DictKeyValueStore()
field_data = KvsFieldData(key_store)
self.runtime = TestRuntime(services={'field-data': field_data})
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideFoo, 'test_aside1')
@XBlockAside.register_temp_plugin(AsideBar, 'test_aside2')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside1', 'test_aside2'])
def test_get_and_update_asides(self, default_store):
"""
Tests that connected asides could be stored, received and updated along with connected course items
"""
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
self.initdb(default_store)
block_type1 = 'test_aside1'
def_id = self.runtime.id_generator.create_definition(block_type1)
usage_id = self.runtime.id_generator.create_usage(def_id)
# the first aside item
aside1 = AsideFoo(scope_ids=ScopeIds('user', block_type1, def_id, usage_id), runtime=self.runtime)
aside1.field11 = 'new_value11'
aside1.field12 = 'new_value12'
block_type2 = 'test_aside2'
def_id = self.runtime.id_generator.create_definition(block_type1)
usage_id = self.runtime.id_generator.create_usage(def_id)
# the second aside item
aside2 = AsideBar(scope_ids=ScopeIds('user', block_type2, def_id, usage_id), runtime=self.runtime)
aside2.field21 = 'new_value21'
# create new item with two asides
published_xblock = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical',
asides=[aside1, aside2]
)
def _check_asides(asides, field11, field12, field21, field22):
""" Helper function to check asides """
assert len(asides) == 2
assert {type(asides[0]), type(asides[1])} == {AsideFoo, AsideBar}
assert asides[0].field11 == field11
assert asides[0].field12 == field12
assert asides[1].field21 == field21
assert asides[1].field22 == field22
# get saved item and check asides
component = self.store.get_item(published_xblock.location)
asides = component.runtime.get_asides(component)
_check_asides(asides, 'new_value11', 'new_value12', 'new_value21', 'aside2_default_value2')
asides[0].field11 = 'other_value11'
# update the first aside item and check that it was stored correctly
self.store.update_item(component, self.user_id, asides=[asides[0]])
cached_asides = component.runtime.get_asides(component)
_check_asides(cached_asides, 'other_value11', 'new_value12', 'new_value21', 'aside2_default_value2')
new_component = self.store.get_item(published_xblock.location)
new_asides = new_component.runtime.get_asides(new_component)
_check_asides(new_asides, 'other_value11', 'new_value12', 'new_value21', 'aside2_default_value2')
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideFoo, 'test_aside1')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside1'])
def test_clone_course_with_asides(self, default_store):
"""
Tests that connected asides will be cloned together with the parent courses
"""
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
with MongoContentstoreBuilder().build() as contentstore:
# initialize the mixed modulestore
self._initialize_mixed(contentstore=contentstore, mappings={})
with self.store.default_store(default_store):
block_type1 = 'test_aside1'
def_id = self.runtime.id_generator.create_definition(block_type1)
usage_id = self.runtime.id_generator.create_usage(def_id)
aside1 = AsideFoo(scope_ids=ScopeIds('user', block_type1, def_id, usage_id), runtime=self.runtime)
aside1.field11 = 'test1'
source_course_key = self.store.make_course_key("org.source", "course.source", "run.source")
self._create_course(source_course_key, asides=[aside1])
dest_course_id = self.store.make_course_key("org.other", "course.other", "run.other")
self.store.clone_course(source_course_key, dest_course_id, self.user_id)
source_store = self.store._get_modulestore_by_type(default_store) # pylint: disable=protected-access
self.assertCoursesEqual(source_store, source_course_key, source_store, dest_course_id)
# after clone get connected aside and check that it was cloned correctly
actual_items = source_store.get_items(dest_course_id,
revision=ModuleStoreEnum.RevisionOption.published_only)
chapter_is_found = False
for block in actual_items:
if block.scope_ids.block_type == 'chapter':
asides = block.runtime.get_asides(block)
assert len(asides) == 1
assert asides[0].field11 == 'test1'
assert asides[0].field12 == 'aside1_default_value2'
chapter_is_found = True
break
assert chapter_is_found
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
@XBlockAside.register_temp_plugin(AsideFoo, 'test_aside1')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside1'])
def test_delete_item_with_asides(self, default_store):
"""
Tests that connected asides will be removed together with the connected items
"""
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
self.initdb(default_store)
block_type1 = 'test_aside1'
def_id = self.runtime.id_generator.create_definition(block_type1)
usage_id = self.runtime.id_generator.create_usage(def_id)
aside1 = AsideFoo(scope_ids=ScopeIds('user', block_type1, def_id, usage_id), runtime=self.runtime)
aside1.field11 = 'new_value11'
aside1.field12 = 'new_value12'
published_xblock = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical',
asides=[aside1]
)
asides = published_xblock.runtime.get_asides(published_xblock)
assert asides[0].field11 == 'new_value11'
assert asides[0].field12 == 'new_value12'
# remove item
self.store.delete_item(published_xblock.location, self.user_id)
# create item again
published_xblock2 = self.store.create_item(
self.user_id,
self.course.id,
'vertical',
block_id='test_vertical'
)
# check that aside has default values
asides2 = published_xblock2.runtime.get_asides(published_xblock2)
assert asides2[0].field11 == 'aside1_default_value1'
assert asides2[0].field12 == 'aside1_default_value2'
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 2, 0))
@XBlockAside.register_temp_plugin(AsideFoo, 'test_aside1')
@patch('xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.applicable_aside_types',
lambda self, block: ['test_aside1'])
@ddt.unpack
def test_published_and_unpublish_item_with_asides(self, default_store, max_find, max_send):
"""
Tests that public/unpublish doesn't affect connected stored asides
"""
if default_store == ModuleStoreEnum.Type.mongo:
pytest.skip("asides not supported in old mongo")
self.initdb(default_store)
block_type1 = 'test_aside1'
def_id = self.runtime.id_generator.create_definition(block_type1)
usage_id = self.runtime.id_generator.create_usage(def_id)
aside1 = AsideFoo(scope_ids=ScopeIds('user', block_type1, def_id, usage_id), runtime=self.runtime)
aside1.field11 = 'new_value11'
aside1.field12 = 'new_value12'
def _check_asides(item):
""" Helper function to check asides """
asides = item.runtime.get_asides(item)
assert asides[0].field11 == 'new_value11'
assert asides[0].field12 == 'new_value12'
# start off as Private
item = self.store.create_child(self.user_id, self.writable_chapter_location, 'problem',
'test_compute_publish_state', asides=[aside1])
item_location = item.location
with check_mongo_calls(max_find, max_send):
assert not self.store.has_published_version(item)
_check_asides(item)
# Private -> Public
published_block = self.store.publish(item_location, self.user_id)
_check_asides(published_block)
item = self.store.get_item(item_location)
assert self.store.has_published_version(item)
_check_asides(item)
# Public -> Private
unpublished_block = self.store.unpublish(item_location, self.user_id)
_check_asides(unpublished_block)
item = self.store.get_item(item_location)
assert not self.store.has_published_version(item)
_check_asides(item)<|fim▁end|>
|
course_key.make_usage_key('video', 'WelcomeX'),
|
<|file_name|>PersonPaperResourceConverter.java<|end_file_name|><|fim▁begin|>package org.lnu.is.converter.person.paper;
import org.lnu.is.annotations.Converter;
import org.lnu.is.converter.AbstractConverter;
import org.lnu.is.domain.honors.type.HonorType;
import org.lnu.is.domain.paper.type.PaperType;
import org.lnu.is.domain.person.Person;
import org.lnu.is.domain.person.paper.PersonPaper;
import org.lnu.is.resource.person.paper.PersonPaperResource;
<|fim▁hole|>/**
* Person Paper Resource Converter.
* @author ivanursul
*
*/
@Converter("personPaperResourceConverter")
public class PersonPaperResourceConverter extends AbstractConverter<PersonPaperResource, PersonPaper> {
@Override
public PersonPaper convert(final PersonPaperResource source, final PersonPaper target) {
if (source.getPersonId() != null) {
Person person = new Person();
person.setId(source.getPersonId());
target.setPerson(person);
}
if (source.getPaperTypeId() != null) {
PaperType paperType = new PaperType();
paperType.setId(source.getPaperTypeId());
target.setPaperType(paperType);
}
if (source.getHonorsTypeId() != null) {
HonorType honorsType = new HonorType();
honorsType.setId(source.getHonorsTypeId());
target.setHonorsType(honorsType);
}
target.setDocSeries(source.getDocSeries());
target.setDocNum(source.getDocNum());
target.setDocDate(source.getDocDate());
target.setDocIssued(source.getDocIssued());
target.setDocPin(source.getDocPin());
target.setMark(source.getMark());
target.setIsChecked(source.getIsChecked());
target.setIsForeign(source.getIsForeign());
return target;
}
@Override
public PersonPaper convert(final PersonPaperResource source) {
return convert(source, new PersonPaper());
}
}<|fim▁end|>
| |
<|file_name|>material.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import {
MdAutocompleteModule,
MdButtonModule,
MdButtonToggleModule,
MdCardModule,
MdCheckboxModule,
MdChipsModule,
MdCoreModule,
MdDatepickerModule,
MdDialogModule,
MdExpansionModule,
MdGridListModule,
MdIconModule,
MdInputModule,
MdListModule,
MdMenuModule,
MdNativeDateModule,
MdProgressBarModule,
MdProgressSpinnerModule,
MdRadioModule,
MdRippleModule,
MdSelectModule,
MdSidenavModule,
MdSliderModule,
MdSlideToggleModule,
MdSnackBarModule,
MdSortModule,
MdTableModule,
MdTabsModule,
MdToolbarModule,
MdTooltipModule,
} from '@angular/material';
@NgModule({
exports: [
MdAutocompleteModule,
MdButtonModule,
<|fim▁hole|> MdChipsModule,
MdCoreModule,
MdDatepickerModule,
MdDialogModule,
MdExpansionModule,
MdGridListModule,
MdIconModule,
MdInputModule,
MdListModule,
MdMenuModule,
MdNativeDateModule,
MdProgressBarModule,
MdProgressSpinnerModule,
MdRadioModule,
MdRippleModule,
MdSelectModule,
MdSidenavModule,
MdSliderModule,
MdSlideToggleModule,
MdSnackBarModule,
MdSortModule,
MdTableModule,
MdTabsModule,
MdToolbarModule,
MdTooltipModule,
]
})
export class MaterialModule {}<|fim▁end|>
|
MdButtonToggleModule,
MdCardModule,
MdCheckboxModule,
|
<|file_name|>global.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 Leslie Zhai <[email protected]>
.pragma library
var v2 = false;
var isAway = false;
var isRobot = false;
var uin = "";
var sid = "";
var skey = "";
var ticket = "";
var deviceId = "";
var loginUserName = "";
var loginNickName = "";
var initSyncKey;
var syncKey;
var initContactList;
var wxView;
var chatView;
function monitorNewMsg() <|fim▁hole|>
if (typeof(chatView) != 'undefined' && typeof(chatView.doNewMsg) == 'function')
chatView.doNewMsg();
}<|fim▁end|>
|
{
if (typeof(wxView) != 'undefined')
wxView.doNewMsg();
|
<|file_name|>Radio.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
<|fim▁hole|>
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Radio = function (_Component) {
_inherits(Radio, _Component);
function Radio(props) {
_classCallCheck(this, Radio);
var _this = _possibleConstructorReturn(this, (Radio.__proto__ || Object.getPrototypeOf(Radio)).call(this, props));
_this.state = {
checked: _this.getChecked(props)
};
return _this;
}
_createClass(Radio, [{
key: 'componentWillReceiveProps',
value: function componentWillReceiveProps(props) {
var checked = this.getChecked(props);
if (this.state.checked != checked) {
this.setState({ checked: checked });
}
}
}, {
key: 'onChange',
value: function onChange(e) {
var checked = e.target.checked;
if (checked) {
if (this.props.onChange) {
if (this.props.hasOwnProperty('model')) {
this.props.onChange(this.props.value);
} else {
this.props.onChange(e);
}
}
}
this.setState({ checked: checked });
}
}, {
key: 'onFocus',
value: function onFocus() {
this.setState({
focus: true
});
}
}, {
key: 'onBlur',
value: function onBlur() {
this.setState({
focus: false
});
}
}, {
key: 'getChecked',
value: function getChecked(props) {
return props.model == props.value || Boolean(props.checked);
}
}, {
key: 'render',
value: function render() {
var checked = this.state.checked;
var _props = this.props,
disabled = _props.disabled,
focus = _props.focus,
value = _props.value,
children = _props.children;
return _react2.default.createElement(
'label',
{ style: this.style(), className: this.className('el-radio') },
_react2.default.createElement(
'span',
{ className: this.classNames({
'el-radio__input': true,
'is-checked': checked,
'is-disabled': disabled,
'is-focus': focus
}) },
_react2.default.createElement('span', { className: 'el-radio__inner' }),
_react2.default.createElement('input', {
type: 'radio',
className: 'el-radio__original',
checked: checked,
disabled: disabled,
onChange: this.onChange.bind(this),
onFocus: this.onFocus.bind(this),
onBlur: this.onBlur.bind(this)
})
),
_react2.default.createElement(
'span',
{ className: 'el-radio__label' },
children || value
)
);
}
}]);
return Radio;
}(_libs.Component);
var _default = Radio;
exports.default = _default;
Radio.propTypes = {
value: _libs.PropTypes.oneOfType([_libs.PropTypes.string, _libs.PropTypes.number]).isRequired,
onChange: _libs.PropTypes.func,
disabled: _libs.PropTypes.bool,
focus: _libs.PropTypes.bool,
/* eslint-disable */
checked: _libs.PropTypes.bool
/* eslint-enable */
};
;
var _temp = function () {
if (typeof __REACT_HOT_LOADER__ === 'undefined') {
return;
}
__REACT_HOT_LOADER__.register(Radio, 'Radio', 'src/radio/Radio.jsx');
__REACT_HOT_LOADER__.register(_default, 'default', 'src/radio/Radio.jsx');
}();
;<|fim▁end|>
|
var _react2 = _interopRequireDefault(_react);
var _libs = require('../../libs');
|
<|file_name|>assert-eq-macro-unsized.rs<|end_file_name|><|fim▁begin|>// run-pass<|fim▁hole|><|fim▁end|>
|
pub fn main() {
assert_eq!([1, 2, 3][..], vec![1, 2, 3][..]);
}
|
<|file_name|>future.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* A type representing values that may be computed concurrently and
* operations for working with them.
*
* # Example
*
* ```rust
* # fn fib(n: uint) -> uint {42};
* # fn make_a_sandwich() {};
* let mut delayed_fib = extra::future::spawn (|| fib(5000) );
* make_a_sandwich();
* println!("fib(5000) = {}", delayed_fib.get())
* ```
*/
#[allow(missing_doc)];
use std::cell::Cell;
use std::comm::{PortOne, oneshot};
use std::task;
use std::util::replace;
/// A type encapsulating the result of a computation which may not be complete
pub struct Future<A> {
priv state: FutureState<A>,
}
enum FutureState<A> {
Pending(~fn() -> A),
Evaluating,
Forced(A)
}
/// Methods on the `future` type
impl<A:Clone> Future<A> {
pub fn get(&mut self) -> A {
//! Get the value of the future.
(*(self.get_ref())).clone()
}
}
impl<A> Future<A> {
/// Gets the value from this future, forcing evaluation.
pub fn unwrap(self) -> A {
let mut this = self;
this.get_ref();
let state = replace(&mut this.state, Evaluating);
match state {
Forced(v) => v,
_ => fail2!( "Logic error." ),
}
}
pub fn get_ref<'a>(&'a mut self) -> &'a A {
/*!
* Executes the future's closure and then returns a borrowed
* pointer to the result. The borrowed pointer lasts as long as
* the future.
*/
match self.state {
Forced(ref v) => return v,
Evaluating => fail2!("Recursive forcing of future!"),
Pending(_) => {
match replace(&mut self.state, Evaluating) {
Forced(_) | Evaluating => fail2!("Logic error."),
Pending(f) => {
self.state = Forced(f());
self.get_ref()
}
}
}
}
}
pub fn from_value(val: A) -> Future<A> {
/*!
* Create a future from a value.
*
* The value is immediately available and calling `get` later will
* not block.
*/
Future {state: Forced(val)}
}
pub fn from_fn(f: ~fn() -> A) -> Future<A> {
/*!
* Create a future from a function.
*
* The first time that the value is requested it will be retrieved by
* calling the function. Note that this function is a local
* function. It is not spawned into another task.
*/
Future {state: Pending(f)}
}
}
impl<A:Send> Future<A> {
pub fn from_port(port: PortOne<A>) -> Future<A> {
/*!
* Create a future from a port
*
* The first time that the value is requested the task will block
* waiting for the result to be received on the port.
*/
let port = Cell::new(port);
do Future::from_fn {
port.take().recv()
}
}
pub fn spawn(blk: ~fn() -> A) -> Future<A> {
/*!
* Create a future from a unique closure.
*
* The closure will be run in a new task and its result used as the
* value of the future.
*/
let (port, chan) = oneshot();
do task::spawn_with(chan) |chan| {
chan.send(blk());
}
Future::from_port(port)
}
pub fn spawn_with<B: Send>(v: B, blk: ~fn(B) -> A) -> Future<A> {
/*!
* Create a future from a unique closure taking one argument.
*
* The closure and its argument will be moved into a new task. The
* closure will be run and its result used as the value of the future.
*/
let (port, chan) = oneshot();
do task::spawn_with((v, chan)) |(v, chan)| {
chan.send(blk(v));
}
Future::from_port(port)
}
}
#[cfg(test)]
mod test {
use future::Future;
use std::cell::Cell;
use std::comm::oneshot;
use std::task;
<|fim▁hole|> assert_eq!(f.get(), ~"snail");
}
#[test]
fn test_from_port() {
let (po, ch) = oneshot();
ch.send(~"whale");
let mut f = Future::from_port(po);
assert_eq!(f.get(), ~"whale");
}
#[test]
fn test_from_fn() {
let mut f = Future::from_fn(|| ~"brail");
assert_eq!(f.get(), ~"brail");
}
#[test]
fn test_interface_get() {
let mut f = Future::from_value(~"fail");
assert_eq!(f.get(), ~"fail");
}
#[test]
fn test_interface_unwrap() {
let f = Future::from_value(~"fail");
assert_eq!(f.unwrap(), ~"fail");
}
#[test]
fn test_get_ref_method() {
let mut f = Future::from_value(22);
assert_eq!(*f.get_ref(), 22);
}
#[test]
fn test_spawn() {
let mut f = Future::spawn(|| ~"bale");
assert_eq!(f.get(), ~"bale");
}
#[test]
fn test_spawn_with() {
let mut f = Future::spawn_with(~"gale", |s| { s });
assert_eq!(f.get(), ~"gale");
}
#[test]
#[should_fail]
fn test_futurefail() {
let mut f = Future::spawn(|| fail2!());
let _x: ~str = f.get();
}
#[test]
fn test_sendable_future() {
let expected = "schlorf";
let f = Cell::new(do Future::spawn { expected });
do task::spawn {
let mut f = f.take();
let actual = f.get();
assert_eq!(actual, expected);
}
}
}<|fim▁end|>
|
#[test]
fn test_from_value() {
let mut f = Future::from_value(~"snail");
|
<|file_name|>DOMStylePropertiesAndMethodsExample04.js<|end_file_name|><|fim▁begin|>let prop, value, i, len;<|fim▁hole|> console.log(`prop: ${value.cssText} (${value.cssValueType})`);
}<|fim▁end|>
|
for (i = 0, len = myDiv.style.length; i < len; i++) {
prop = myDiv.style[i]; // alternately, myDiv.style.item(i)
value = myDiv.style.getPropertyCSSValue(prop);
|
<|file_name|>issue-14845.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>fn main() {
let x = X { a: [0] };
let _f = &x.a as *mut u8;
//~^ ERROR mismatched types
//~| expected `*mut u8`
//~| found `&[u8; 1]`
//~| expected u8
//~| found array of 1 elements
let local = [0u8];
let _v = &local as *mut u8;
//~^ ERROR mismatched types
//~| expected `*mut u8`
//~| found `&[u8; 1]`
//~| expected u8,
//~| found array of 1 elements
}<|fim▁end|>
|
struct X {
a: [u8; 1]
}
|
<|file_name|>PerformanceOverlay.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule PerformanceOverlay
* @flow
*/
'use strict';
var PerformanceLogger = require('PerformanceLogger');
var React = require('React');
var StyleSheet = require('StyleSheet');
var Text = require('Text');
var View = require('View');
var PerformanceOverlay = React.createClass({
render: function() {
var perfLogs = PerformanceLogger.getTimespans();
var items = [];
for (var key in perfLogs) {
if (perfLogs[key].totalTime) {
var unit = (key === 'BundleSize') ? 'b' : 'ms';
items.push(
<View style={styles.row} key={key}>
<Text style={[styles.text, styles.label]}>{key}</Text>
<Text style={[styles.text, styles.totalTime]}>
{perfLogs[key].totalTime + unit}
</Text>
</View>
);
}
}
return (
<View style={styles.container}>
{items}<|fim▁hole|>
var styles = StyleSheet.create({
container: {
height: 100,
paddingTop: 10,
},
label: {
flex: 1,
},
row: {
flexDirection: 'row',
paddingHorizontal: 10,
},
text: {
color: 'white',
fontSize: 12,
},
totalTime: {
paddingRight: 100,
},
});
module.exports = PerformanceOverlay;<|fim▁end|>
|
</View>
);
}
});
|
<|file_name|>isilon.py<|end_file_name|><|fim▁begin|># Copyright 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Isilon specific NAS backend plugin.
"""
import os
from oslo_config import cfg
from oslo_log import log
from oslo_utils import units
from requests.exceptions import HTTPError
import six
from manila.common import constants as const
from manila import exception
from manila.i18n import _
from manila.share.drivers.dell_emc.plugins import base
from manila.share.drivers.dell_emc.plugins.isilon import isilon_api
CONF = cfg.CONF
VERSION = "0.1.0"
LOG = log.getLogger(__name__)
class IsilonStorageConnection(base.StorageConnection):
"""Implements Isilon specific functionality for EMC Manila driver."""
def __init__(self, *args, **kwargs):
super(IsilonStorageConnection, self).__init__(*args, **kwargs)
self._server = None
self._port = None
self._username = None
self._password = None
self._server_url = None
self._connect_resp = None
self._root_dir = None
self._verify_ssl_cert = None
self._containers = {}
self._shares = {}
self._snapshots = {}
self._isilon_api = None
self._isilon_api_class = isilon_api.IsilonApi
self.driver_handles_share_servers = False
def _get_container_path(self, share):
"""Return path to a container."""
return os.path.join(self._root_dir, share['name'])
def create_share(self, context, share, share_server):
"""Is called to create share."""
if share['share_proto'] == 'NFS':
location = self._create_nfs_share(share)
elif share['share_proto'] == 'CIFS':
location = self._create_cifs_share(share)
else:
message = (_('Unsupported share protocol: %(proto)s.') %
{'proto': share['share_proto']})
LOG.error(message)
raise exception.InvalidShare(reason=message)
# apply directory quota based on share size
max_share_size = share['size'] * units.Gi
self._isilon_api.quota_create(
self._get_container_path(share), 'directory', max_share_size)
return location
def create_share_from_snapshot(self, context, share, snapshot,
share_server):
"""Creates a share from the snapshot."""
# Create share at new location
location = self.create_share(context, share, share_server)
# Clone snapshot to new location
fq_target_dir = self._get_container_path(share)
self._isilon_api.clone_snapshot(snapshot['name'], fq_target_dir)
return location
def _create_nfs_share(self, share):
"""Is called to create nfs share."""
container_path = self._get_container_path(share)
self._isilon_api.create_directory(container_path)
share_created = self._isilon_api.create_nfs_export(container_path)
if not share_created:
message = (
_('The requested NFS share "%(share)s" was not created.') %
{'share': share['name']})
LOG.error(message)
raise exception.ShareBackendException(msg=message)
location = '{0}:{1}'.format(self._server, container_path)
return location
def _create_cifs_share(self, share):
"""Is called to create cifs share."""
# Create the directory
container_path = self._get_container_path(share)
self._isilon_api.create_directory(container_path)
self._isilon_api.create_smb_share(share['name'], container_path)
share_path = '\\\\{0}\\{1}'.format(self._server, share['name'])
return share_path
def create_snapshot(self, context, snapshot, share_server):
"""Is called to create snapshot."""
snapshot_path = os.path.join(self._root_dir, snapshot['share_name'])
self._isilon_api.create_snapshot(snapshot['name'], snapshot_path)
def delete_share(self, context, share, share_server):
"""Is called to remove share."""
if share['share_proto'] == 'NFS':
self._delete_nfs_share(share)
elif share['share_proto'] == 'CIFS':
self._delete_cifs_share(share)
else:
message = (_('Unsupported share type: %(type)s.') %
{'type': share['share_proto']})
LOG.error(message)
raise exception.InvalidShare(reason=message)
def _delete_nfs_share(self, share):
"""Is called to remove nfs share."""
share_id = self._isilon_api.lookup_nfs_export(
self._root_dir + '/' + share['name'])
if share_id is None:
lw = ('Attempted to delete NFS Share "%s", but the share does '
'not appear to exist.')
LOG.warning(lw, share['name'])
else:
# attempt to delete the share
export_deleted = self._isilon_api.delete_nfs_share(share_id)
if not export_deleted:
message = _('Error deleting NFS share: %s') % share['name']
LOG.error(message)
raise exception.ShareBackendException(msg=message)
def _delete_cifs_share(self, share):
"""Is called to remove CIFS share."""
smb_share = self._isilon_api.lookup_smb_share(share['name'])
if smb_share is None:
lw = ('Attempted to delete CIFS Share "%s", but the share does '
'not appear to exist.')
LOG.warning(lw, share['name'])
else:
share_deleted = self._isilon_api.delete_smb_share(share['name'])
if not share_deleted:
message = _('Error deleting CIFS share: %s') % share['name']
LOG.error(message)
raise exception.ShareBackendException(msg=message)
def delete_snapshot(self, context, snapshot, share_server):
"""Is called to remove snapshot."""
self._isilon_api.delete_snapshot(snapshot['name'])
def ensure_share(self, context, share, share_server):
"""Invoked to ensure that share is exported."""
def extend_share(self, share, new_size, share_server=None):
"""Extends a share."""
new_quota_size = new_size * units.Gi
self._isilon_api.quota_set(
self._get_container_path(share), 'directory', new_quota_size)
def allow_access(self, context, share, access, share_server):
"""Allow access to the share."""
if share['share_proto'] == 'NFS':
self._nfs_allow_access(share, access)
elif share['share_proto'] == 'CIFS':
self._cifs_allow_access(share, access)
else:
message = _(
'Unsupported share protocol: %s. Only "NFS" and '
'"CIFS" are currently supported share protocols.') % share[
'share_proto']
LOG.error(message)
raise exception.InvalidShare(reason=message)
def _nfs_allow_access(self, share, access):
"""Allow access to nfs share."""
access_type = access['access_type']
if access_type != 'ip':
message = _('Only "ip" access type allowed for the NFS'
'protocol.')
LOG.error(message)
raise exception.InvalidShareAccess(reason=message)
export_path = self._get_container_path(share)
access_ip = access['access_to']
access_level = access['access_level']
share_id = self._isilon_api.lookup_nfs_export(export_path)
share_access_group = 'clients'
if access_level == const.ACCESS_LEVEL_RO:
share_access_group = 'read_only_clients'
# Get current allowed clients
export = self._get_existing_nfs_export(share_id)
current_clients = export[share_access_group]
# Format of ips could be '10.0.0.2', or '10.0.0.2, 10.0.0.0/24'
ips = list()
ips.append(access_ip)
ips.extend(current_clients)
export_params = {share_access_group: ips}
url = '{0}/platform/1/protocols/nfs/exports/{1}'.format(
self._server_url, share_id)
resp = self._isilon_api.request('PUT', url, data=export_params)
resp.raise_for_status()
def _cifs_allow_access(self, share, access):
access_type = access['access_type']
access_to = access['access_to']
access_level = access['access_level']
if access_type == 'ip':
access_ip = access['access_to']
self._cifs_allow_access_ip(access_ip, share, access_level)
elif access_type == 'user':
self._cifs_allow_access_user(access_to, share, access_level)
else:
message = _('Only "ip" and "user" access types allowed for '
'CIFS protocol.')
LOG.error(message)
raise exception.InvalidShareAccess(reason=message)
def _cifs_allow_access_ip(self, ip, share, access_level):
if access_level == const.ACCESS_LEVEL_RO:
message = _('Only RW Access allowed for CIFS Protocol when using '
'the "ip" access type.')
LOG.error(message)
raise exception.InvalidShareAccess(reason=message)
allowed_ip = 'allow:' + ip
smb_share = self._isilon_api.lookup_smb_share(share['name'])
host_acl = smb_share['host_acl']
if allowed_ip not in host_acl:
host_acl.append(allowed_ip)
data = {'host_acl': host_acl}
url = ('{0}/platform/1/protocols/smb/shares/{1}'
.format(self._server_url, share['name']))
r = self._isilon_api.request('PUT', url, data=data)
r.raise_for_status()
def _cifs_allow_access_user(self, user, share, access_level):
if access_level == const.ACCESS_LEVEL_RW:
smb_permission = isilon_api.SmbPermission.rw
elif access_level == const.ACCESS_LEVEL_RO:
smb_permission = isilon_api.SmbPermission.ro
else:
message = _('Only "RW" and "RO" access levels are supported.')
LOG.error(message)
raise exception.InvalidShareAccess(reason=message)
self._isilon_api.smb_permissions_add(share['name'], user,
smb_permission)
def deny_access(self, context, share, access, share_server):
"""Deny access to the share."""
if share['share_proto'] == 'NFS':
self._nfs_deny_access(share, access)
elif share['share_proto'] == 'CIFS':
self._cifs_deny_access(share, access)
def _nfs_deny_access(self, share, access):
"""Deny access to nfs share."""
if access['access_type'] != 'ip':
return
denied_ip = access['access_to']
access_level = access['access_level']
share_access_group = 'clients'
if access_level == const.ACCESS_LEVEL_RO:
share_access_group = 'read_only_clients'
# Get list of currently allowed client ips
export_id = self._isilon_api.lookup_nfs_export(
self._get_container_path(share))
if export_id is None:
message = _('Share %s should have been created, but was not '
'found.') % share['name']
LOG.error(message)
raise exception.ShareBackendException(msg=message)
export = self._get_existing_nfs_export(export_id)
try:
clients = export[share_access_group]
except KeyError:
message = (_('Export %(export_name)s should have contained the '
'JSON key %(json_key)s, but this key was not found.')
% {'export_name': share['name'],
'json_key': share_access_group})
LOG.error(message)
raise exception.ShareBackendException(msg=message)
allowed_ips = set(clients)
if allowed_ips.__contains__(denied_ip):
allowed_ips.remove(denied_ip)
data = {share_access_group: list(allowed_ips)}
url = ('{0}/platform/1/protocols/nfs/exports/{1}'
.format(self._server_url, six.text_type(export_id)))
r = self._isilon_api.request('PUT', url, data=data)
r.raise_for_status()
def _get_existing_nfs_export(self, export_id):
export = self._isilon_api.get_nfs_export(export_id)
if export is None:
message = _('NFS share with export id %d should have been '
'created, but was not found.') % export_id
LOG.error(message)
raise exception.ShareBackendException(msg=message)
return export
def _cifs_deny_access(self, share, access):
access_type = access['access_type']
if access_type == 'ip':
self._cifs_deny_access_ip(access['access_to'], share)
elif access_type == 'user':
self._cifs_deny_access_user(share, access)
else:
message = _('Access type for CIFS deny access request was '
'"%(access_type)s". Only "user" and "ip" access types '
'are supported for CIFS protocol access.') % {
'access_type': access_type}
LOG.warning(message)
def _cifs_deny_access_ip(self, denied_ip, share):
"""Deny access to cifs share."""
share_json = self._isilon_api.lookup_smb_share(share['name'])
host_acl_list = share_json['host_acl']
allow_ip = 'allow:' + denied_ip
if allow_ip in host_acl_list:
host_acl_list.remove(allow_ip)
share_params = {"host_acl": host_acl_list}
url = ('{0}/platform/1/protocols/smb/shares/{1}'
.format(self._server_url, share['name']))
resp = self._isilon_api.request('PUT', url, data=share_params)
resp.raise_for_status()
def _cifs_deny_access_user(self, share, access):
self._isilon_api.smb_permissions_remove(share['name'], access[
'access_to'])
def check_for_setup_error(self):
"""Check for setup error."""
def connect(self, emc_share_driver, context):
"""Connect to an Isilon cluster."""
self._server = emc_share_driver.configuration.safe_get(
"emc_nas_server")
self._port = (
int(emc_share_driver.configuration.safe_get("emc_nas_server_port"))
)
self._server_url = ('https://' + self._server + ':' +
six.text_type(self._port))
self._username = emc_share_driver.configuration.safe_get(
"emc_nas_login")
self._password = emc_share_driver.configuration.safe_get(
"emc_nas_password")
self._root_dir = emc_share_driver.configuration.safe_get(
"emc_nas_root_dir")
# TODO(Shaun Edwards): make verify ssl a config variable?
self._verify_ssl_cert = False
self._isilon_api = self._isilon_api_class(self._server_url, auth=(
self._username, self._password),
verify_ssl_cert=self._verify_ssl_cert)
if not self._isilon_api.is_path_existent(self._root_dir):
self._isilon_api.create_directory(self._root_dir, recursive=True)
def update_share_stats(self, stats_dict):
"""TODO."""
# TODO(Shaun Edwards): query capacity, set storage_protocol,
# QoS support?
stats_dict['driver_version'] = VERSION
def get_network_allocations_number(self):
"""Returns number of network allocations for creating VIFs."""
# TODO(Shaun Edwards)
return 0
def setup_server(self, network_info, metadata=None):
"""Set up and configures share server with given network parameters."""
# TODO(Shaun Edwards): Look into supporting share servers
def teardown_server(self, server_details, security_services=None):
"""Teardown share server."""
# TODO(Shaun Edwards): Look into supporting share servers
def update_access(self, context, share, access_rules, add_rules,
delete_rules, share_server=None):
"""Update share access."""
if share['share_proto'] == 'NFS':
state_map = self._update_access_nfs(share, access_rules)
if share['share_proto'] == 'CIFS':
state_map = self._update_access_cifs(share, access_rules)
return state_map
def _update_access_nfs(self, share, access_rules):
"""Updates access on a NFS share."""
nfs_rw_ips = set()
nfs_ro_ips = set()
rule_state_map = {}
for rule in access_rules:
rule_state_map[rule['access_id']] = {
'state': 'error'
}
for rule in access_rules:
if rule['access_level'] == const.ACCESS_LEVEL_RW:
nfs_rw_ips.add(rule['access_to'])
elif rule['access_level'] == const.ACCESS_LEVEL_RO:
nfs_ro_ips.add(rule['access_to'])
export_id = self._isilon_api.lookup_nfs_export(
self._get_container_path(share))
if export_id is None:
# share does not exist on backend (set all rules to error state)
return rule_state_map
data = {
'clients': list(nfs_rw_ips),
'read_only_clients': list(nfs_ro_ips)
}
url = ('{0}/platform/1/protocols/nfs/exports/{1}'
.format(self._server_url, six.text_type(export_id)))
r = self._isilon_api.request('PUT', url, data=data)
try:
r.raise_for_status()
except HTTPError:
return rule_state_map
# if we finish the bulk rule update with no error set rules to active
for rule in access_rules:
rule_state_map[rule['access_id']]['state'] = 'active'
return rule_state_map
def _update_access_cifs(self, share, access_rules):
"""Clear access on a CIFS share."""
cifs_ip_set = set()
users = set()
for rule in access_rules:
if rule['access_type'] == 'ip':
cifs_ip_set.add('allow:' + rule['access_to'])
elif rule['access_type'] == 'user':
users.add(rule['access_to'])
smb_share = self._isilon_api.lookup_smb_share(share['name'])
backend_smb_user_permissions = smb_share['permissions']
perms_to_remove = []
for perm in backend_smb_user_permissions:
if perm['trustee']['name'] not in users:
perms_to_remove.append(perm)
for perm in perms_to_remove:
backend_smb_user_permissions.remove(perm)
data = {
'host_acl': list(cifs_ip_set),
'permissions': backend_smb_user_permissions,
}
url = ('{0}/platform/1/protocols/smb/shares/{1}'
.format(self._server_url, share['name']))
r = self._isilon_api.request('PUT', url, data=data)
try:
r.raise_for_status()
except HTTPError:
# clear access rules failed so set all access rules to error state
rule_state_map = {}
for rule in access_rules:
rule_state_map[rule['access_id']] = {
'state': 'error'
}
return rule_state_map
# add access rules that don't exist on backend
rule_state_map = {}
for rule in access_rules:
rule_state_map[rule['access_id']] = {
'state': 'error'
}
try:
if rule['access_type'] == 'ip':<|fim▁hole|> rule_state_map[rule['access_id']]['state'] = 'active'
elif rule['access_type'] == 'user':
backend_users = set()
for perm in backend_smb_user_permissions:
backend_users.add(perm['trustee']['name'])
if rule['access_to'] not in backend_users:
self._cifs_allow_access_user(
rule['access_to'], share, rule['access_level'])
rule_state_map[rule['access_id']]['state'] = 'active'
else:
continue
except exception.ManilaException:
pass
return rule_state_map<|fim▁end|>
|
self._cifs_allow_access_ip(rule['access_to'], share,
rule['access_level'])
|
<|file_name|>test_authentication.py<|end_file_name|><|fim▁begin|>import os
import os.path
import binascii
import tempfile
import shutil
import getpass
import time
from twisted.trial import unittest
from twisted.internet.protocol import Factory
from twisted.internet import reactor, defer, protocol, error
from txdbus import bus, endpoints, authentication
from txdbus.authentication import DBusAuthenticationFailed
class GetPass(object):
def getuser(self):
return 'testuser'
tohex = binascii.hexlify
unhex = binascii.unhexlify
class ClientAuthenticatorTester(unittest.TestCase):
def setUp(self):
authentication.getpass = GetPass() # override 'getpass' module
self.ca = authentication.ClientAuthenticator()
self.reply = None
self.ca.beginAuthentication(self)
def tearDown(self):
authentication.getpass = getpass
def sendAuthMessage(self, m):
self.reply = m
def send(self, msg):
self.ca.handleAuthMessage(msg)
def ae(self, x,y):
self.assertEquals(x,y)
def are(self, x):
self.assertEquals(self.reply, x)
def test_bad_auth_message(self):
self.assertRaises(DBusAuthenticationFailed, self.send, 'BAD_LINE')
def test_rejection(self):
self.ae(self.ca.authMech, 'EXTERNAL')
self.are( 'AUTH EXTERNAL')
self.send('REJECTED')
self.ae(self.ca.authMech, 'DBUS_COOKIE_SHA1')
self.are( 'AUTH DBUS_COOKIE_SHA1 ' + tohex('testuser'))
self.send('REJECTED')
self.ae(self.ca.authMech, 'ANONYMOUS')
self.are( 'AUTH ANONYMOUS 747864627573')
self.assertRaises(DBusAuthenticationFailed, self.send, 'REJECTED')
def test_error(self):
self.ae(self.ca.authMech, 'EXTERNAL')
self.are( 'AUTH EXTERNAL')
self.send('ERROR')
self.ae(self.ca.authMech, 'DBUS_COOKIE_SHA1')
self.are( 'AUTH DBUS_COOKIE_SHA1 ' + tohex('testuser'))
self.send('ERROR')
self.ae(self.ca.authMech, 'ANONYMOUS')
self.are( 'AUTH ANONYMOUS 747864627573')
def test_ok(self):
self.assertRaises(DBusAuthenticationFailed, self.send, 'OK')
self.assertRaises(DBusAuthenticationFailed, self.send, 'OK foo')
self.send('OK ' + tohex('foo'))
self.ae(self.ca.getGUID(), 'foo')
self.are( 'BEGIN')
self.assertTrue(self.ca.authenticationSucceeded())
def test_agree_unix_fd(self):
self.are('AUTH EXTERNAL')
self.send('AGREE_UNIX_FD')
self.are('AUTH EXTERNAL')
def test_data_external(self):
self.ca.authMech = 'EXTERNAL'
self.send('DATA')
self.are('DATA')
def test_get_cookie(self):
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')
ctx = 'foo'
cid = 'bar'
fn = os.path.join(k, ctx)
try:
os.mkdir(k, 0o0777)
self.ca.cookie_dir = k
self.assertRaises(Exception, self.ca._authGetDBusCookie, None, None)
os.chmod(k, 0o0700)
self.ca.cookie_dir = '/etc'
self.assertRaises(Exception, self.ca._authGetDBusCookie, None, None)
with open(fn, 'w') as f:
f.write('abcd 12345 234234234\n')
f.write('bar 12345 123456\n')
self.ca.cookie_dir = k
self.ae(self.ca._authGetDBusCookie(ctx,cid), '123456')
finally:
shutil.rmtree(t)
def test_data_dbus_cookie_sha1_err(self):
self.ca.authMech = 'DBUS_COOKIE_SHA1'
self.send('DATA ACK!')
self.are('ERROR Non-hexadecimal digit found')
class BusCookieAuthenticatorTester(unittest.TestCase):
def setUp(self):
self.ba = authentication.BusCookieAuthenticator()
def ae(self, x,y):
self.assertEquals(x,y)
def ar(self, x):
self.assertEquals(x, ('REJECTED', None))
def s(self, x):
return self.ba.step(x)
def s1(self, x, y=None):
return self.ba._step_one(x, y)
def s2(self, x):
return self.ba._step_two(x)
def test_mech_name(self):
self.ae(self.ba.getMechanismName(), 'DBUS_COOKIE_SHA1')
def test_step(self):
self.ar( self.s(None) )
self.ba.step_num = 2
self.ar( self.s('foo') )
def test_step1_invalid_username(self):
self.ar( self.s1('foobarbazwannabewilliamwallace') )
def test_step1_invalid_uid(self):
self.ar( self.s1(99999999999) )
def test_step1_bad_user_keyring_permissions(self):
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')
try:
os.mkdir(k, 0o0777)
self.ar(self.s1(0,k))
finally:
shutil.rmtree(t)
def test_step1_create_user_keyring_dir(self):
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')<|fim▁hole|>
self.assertTrue( not os.path.exists(k) )
self.ae(self.s1(0,k)[0], 'CONTINUE')
self.assertTrue( os.path.exists(k) )
finally:
shutil.rmtree(t)
def test_step2_fail(self):
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')
try:
self.assertTrue( not os.path.exists(k) )
self.ae(self.s1(0,k)[0], 'CONTINUE')
self.assertTrue( os.path.exists(k) )
self.ar(self.s2('INVALID RESPONSE'))
finally:
shutil.rmtree(t)
def test_lock(self):
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')
try:
self.assertTrue( not os.path.exists(k) )
self.ae(self.s1(0,k)[0], 'CONTINUE')
self.assertTrue( os.path.exists(k) )
lf = self.ba.cookie_file + '.lock'
with open(lf, 'w') as f:
f.write('\0')
self.ba._get_lock()
self.assertTrue(True)
finally:
shutil.rmtree(t)
class DBusCookieAuthenticationTester(unittest.TestCase):
def setUp(self):
authentication.getpass = GetPass() # override 'getpass' module
self.ca = authentication.ClientAuthenticator()
self.ba = authentication.BusCookieAuthenticator()
self.reply = None
self.ca.beginAuthentication(self)
def tearDown(self):
authentication.getpass = getpass
def sendAuthMessage(self, m):
self.reply = m
def send(self, msg):
self.ca.handleAuthMessage(msg)
def test_dbus_cookie_authentication(self):
self.assertEquals(self.ba.getMechanismName(), 'DBUS_COOKIE_SHA1')
while not self.ca.authMech == 'DBUS_COOKIE_SHA1':
self.ca.authTryNextMethod()
self.assertEquals(self.reply, 'AUTH DBUS_COOKIE_SHA1 ' + tohex('testuser'))
t = tempfile.mkdtemp()
k = os.path.join(t,'keyring')
try:
self.ca.cookie_dir = k
s1 = self.ba._step_one('0',k)
self.assertEquals(s1[0], 'CONTINUE')
self.send( 'DATA ' + tohex(s1[1]) )
self.assertTrue( self.reply.startswith('DATA') )
self.assertEquals(self.ba._step_two(unhex(self.reply.split()[1])), ('OK',None))
finally:
shutil.rmtree(t)
class DBusCookieCookieHandlingTester(unittest.TestCase):
def setUp(self):
self.ba = authentication.BusCookieAuthenticator()
self.t = tempfile.mkdtemp()
self.ba.cookie_file = os.path.join(self.t,'nomnomnom')
def tearDown(self):
shutil.rmtree(self.t)
def test_make_cookies(self):
def g(t):
def tf():
return time.time()-t
return tf
self.ba._create_cookie(g(31.0))
self.ba._create_cookie(g(31.0))
self.ba._create_cookie(g(20.0))
self.ba._create_cookie(g(21.2))
c = self.ba._get_cookies()
self.assertEquals(set(['3','4']), set( x[0] for x in c ))
def test_del_cookie_with_remaining(self):
self.ba._create_cookie()
self.ba._create_cookie()
self.ba._create_cookie()
self.ba.cookieId = 2
self.ba._delete_cookie()
c = self.ba._get_cookies()
self.assertEquals(set(['1','3']), set( x[0] for x in c ))
def test_del_cookie_last(self):
self.ba._create_cookie()
self.ba.cookieId = 1
self.assertTrue(os.path.exists(self.ba.cookie_file))
self.ba._delete_cookie()
self.assertTrue(not os.path.exists(self.ba.cookie_file))
class ExternalAuthMechanismTester(unittest.TestCase):
def test_external_auth_logic(self):
bea = authentication.BusExternalAuthenticator()
self.assertEquals(bea.getMechanismName(), 'EXTERNAL')
class T(object):
_unix_creds = None
bea.init(T())
self.assertEquals(bea.step(''), ('REJECT', 'Unix credentials not available'))
bea.creds = ('foo', 0)
self.assertEquals(bea.step(''), ('CONTINUE', ''))
self.assertEquals(bea.step(''), ('OK',None))
self.assertEquals(bea.getUserName(), 'root')
bea.cancel()
class AnonymousAuthMechanismTester(unittest.TestCase):
def test_anonymous_auth_logic(self):
baa = authentication.BusAnonymousAuthenticator()
self.assertEquals(baa.getMechanismName(), 'ANONYMOUS')
baa.init(None)
self.assertEquals(baa.step(''), ('OK',None))
self.assertEquals(baa.getUserName(), 'anonymous')
baa.cancel()
#----------------------------------------------------------------------
# Protocol Level Tests
#----------------------------------------------------------------------
# Always use the internal bus for tests if a system bus isn't available
# typically the session bus won't exist on Windows
#
INTERNAL_BUS = not 'DBUS_SESSION_BUS_ADDRESS' in os.environ
INTERNAL_BUS = True
def delay(arg):
d = defer.Deferred()
reactor.callLater(0.05, lambda : d.callback(arg) )
return d
def get_username():
uname = os.environ.get('USERNAME', None)
if uname is None:
uname = os.environ.get('LOGNAME', None)
return uname
class AuthTestProtocol(protocol.Protocol):
_buffer = ''
_sent_null = False
def connectionMade(self):
self.disconnect_d = None
self.disconnect_timeout = None
self.fail_exit_d = None
self.factory._ok(self)
def dataReceived(self, data):
lines = (self._buffer+data).split('\r\n')
self._buffer = lines.pop(-1)
for line in lines:
self.gotMessage(line)
def disconnect(self):
self.transport.loseConnection()
def setTest(self, test):
self.test = test
self.assertTrue = self.test.assertTrue
self.assertEquals = self.test.assertEquals
self.fail = self.test.fail
def succeed(self):
self.assertTrue(True)
def connectionLost(self, reason):
if self.disconnect_d:
if self.disconnect_timeout:
self.disconnect_timeout.cancel()
self.disconnect_timeout = None
d = self.disconnect_d
self.disconnect_d = None
d.callback(None)
elif self.fail_exit_d:
d = self.fail_exit_d
self.fail_exit_d = None
d.errback(unittest.FailTest('Connection unexpectedly dropped'))
def failOnExit(self):
self.fail_exit_d = defer.Deferred()
def cleanup(_):
self.fail_exit_d = None
return _
self.fail_exit_d.addCallback(cleanup)
return self.fail_exit_d
def expectDisconnect(self):
self.disconnect_d = defer.Deferred()
def timeout():
self.fail()
d = self.disconnect_d
self.disconnect_d = None
d.errback(Exception('Disconnect timed out'))
self.disconnect_timeout = reactor.callLater(2, timeout)
self.disconnect_d.addCallback( lambda _: self.succeed() )
return self.disconnect_d
def send(self, msg):
if not self._sent_null:
self.transport.write('\0')
self._sent_null = True
self.transport.write(msg + '\r\n')
def test_no_null_byte_at_start(self):
d = self.expectDisconnect()
self.transport.write('blah')
return d
def test_bad_command(self):
d = self.failOnExit()
self.send('FISHY')
def recv( msg ):
self.assertEquals(msg, 'ERROR "Unknown command"')
d.callback(None)
self.gotMessage = recv
return d
def test_bad_mech(self):
d = self.failOnExit()
self.send('AUTH FOOBAR')
def recv( msg ):
self.assertTrue(msg.startswith('REJECTED'))
d.callback(None)
self.gotMessage = recv
return d
def test_bad_mech2(self):
d = self.failOnExit()
self.send('AUTH FOO BAR')
def recv( msg ):
self.assertTrue(msg.startswith('REJECTED'))
d.callback(None)
self.gotMessage = recv
return d
def test_too_long(self):
d = self.expectDisconnect()
self.send('AUTH ' + 'A'* 17000)
return d
def test_max_rejects(self):
d = self.expectDisconnect()
def retry(_=None):
dr = defer.Deferred()
self.send('AUTH FOOBAR')
def recv( msg ):
self.assertTrue(msg.startswith('REJECTED'))
dr.callback(None)
self.gotMessage = recv
return dr
x = retry()
x.addCallback( retry )
x.addCallback( retry )
x.addCallback( retry )
x.addCallback( retry )
x.addCallback( retry )
return d
def test_reject(self):
d = self.failOnExit()
self.send('AUTH DBUS_COOKIE_SHA1')
def recv(msg):
self.assertTrue(msg.startswith('REJECTED'))
d.callback(None)
self.gotMessage = recv
return d
def test_retry(self):
d = self.failOnExit()
self.send('AUTH DBUS_COOKIE_SHA1')
def recv2(msg):
self.assertTrue(msg.startswith('DATA'))
d.callback(None)
def recv1(msg):
self.send('AUTH DBUS_COOKIE_SHA1 ' + binascii.hexlify(get_username()))
self.assertTrue(msg.startswith('REJECTED'))
self.gotMessage = recv2
self.gotMessage = recv1
return d
def test_cancel(self):
d = self.failOnExit()
self.send('AUTH DBUS_COOKIE_SHA1 '+ binascii.hexlify(get_username()))
def recv2(msg):
self.assertTrue(msg.startswith('REJECTED'))
d.callback(None)
def recv1(msg):
self.send('CANCEL' )
self.assertTrue(msg.startswith('DATA'))
self.gotMessage = recv2
self.gotMessage = recv1
return d
class AuthFactory (Factory):
"""
Factory for DBusClientConnection instances
"""
protocol = AuthTestProtocol
def __init__(self):
self.d = defer.Deferred()
def _ok(self, proto):
self.d.callback( proto )
def _failed(self, err):
self.d.errback(err)
def getConnection(self):
"""
Returns the fully-connected DBusClientConnection instance. This method
should be used to obtain a reference to the DBusClientConnection as it
will be called back/error backed after authentication and DBus session
registration are complete.
"""
return self.d
class ServerObjectTester(unittest.TestCase):
def setUp(self):
if INTERNAL_BUS:
os.environ['DBUS_SESSION_BUS_ADDRESS']='unix:abstract=/tmp/txdbus-test,guid=5'
bus_obj = bus.Bus()
f = Factory()
f.protocol = bus.BusProtocol
f.bus = bus_obj
point = endpoints.getDBusEnvEndpoints(reactor, False)[0]
d = point.listen(f)
def got_port(port):
self.port = port
return self._client_connect()
d.addCallback( got_port )
return d
else:
return self._client_connect()
def _client_connect(self):
self.conn = None
f = AuthFactory()
point = endpoints.getDBusEnvEndpoints(reactor)[0]
point.connect(f)
d = f.getConnection()
d.addCallback(self._connected)
return d
def _connected(self, conn):
self.conn = conn
self.conn.setTest(self)
def tearDown(self):
if self.conn:
self.conn.disconnect()
if INTERNAL_BUS:
return self.port.stopListening()
def test_no_null_byte_at_start(self):
return self.conn.test_no_null_byte_at_start()
def test_bad_command(self):
return self.conn.test_bad_command()
def test_bad_mech(self):
return self.conn.test_bad_mech()
def test_bad_mech2(self):
return self.conn.test_bad_mech2()
def test_too_long(self):
return self.conn.test_too_long()
def test_reject(self):
return self.conn.test_reject()
def test_retry(self):
return self.conn.test_retry()
def test_cancel(self):
return self.conn.test_cancel()
def test_max_rejects(self):
return self.conn.test_max_rejects()<|fim▁end|>
|
try:
|
<|file_name|>uniquify_subscriptions.py<|end_file_name|><|fim▁begin|>from django.core.management import BaseCommand
from django.db.models import Count
from zds.notification.models import Subscription
class Command(BaseCommand):
help = "Delete all but last duplicate subscriptions"
def handle(self, *args, **options):
self.stdout.write("Starting uniquifying subscriptions")
count = 0<|fim▁hole|> .filter(id__count__gt=1)
)
for sub in duplicates:
del sub["id__count"]
# Find PKs of duplicates, exclude the most recent one
pks = Subscription.objects.filter(**sub).order_by("-pubdate").values_list("id", flat=True)[1:]
count = count + len(pks)
# Delete each of them
for pk in pks:
Subscription.objects.filter(pk=pk).delete()
self.stdout.write(f"Deleted {count} duplicates")<|fim▁end|>
|
# Find all duplicates
duplicates = (
Subscription.objects.values("user", "content_type", "object_id")
.annotate(Count("id"))
|
<|file_name|>Pokemon.js<|end_file_name|><|fim▁begin|>const { InventoryError,
NotFoundError } = require('../../errors')
const checkExists = (data) => {
return (entity) => {
if (!entity) throw new NotFoundError(`${data} not found`)
return entity
}
}
module.exports = (sequelize, DataTypes) => {
const Pokemon = sequelize.define('Pokemon', {
name: DataTypes.STRING,
price: DataTypes.FLOAT,<|fim▁hole|>
Pokemon.getByIdWithLock = function (pokemonId, transaction) {
return Pokemon.findOne({
where: {
id: pokemonId
},
lock: {
level: transaction.LOCK.UPDATE
}
})
}
Pokemon.getByName = function (name) {
return Pokemon.findOne({
where: {
name
}
}).then(checkExists(name))
}
Pokemon.prototype.decreaseStock = function (quantity) {
if (this.stock < quantity) {
return Promise.reject(new InventoryError(this.name, this.stock))
}
this.stock -= quantity
return this.save()
}
Pokemon.prototype.increaseStock = function (quantity) {
this.stock += quantity
return this.save()
}
return Pokemon
}<|fim▁end|>
|
stock: DataTypes.INTEGER
}, { tableName: 'pokemons' })
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.contrib.auth.decorators import login_required
from django.contrib import messages
from .models import Profile
from .forms import ProfileForm
@login_required
def profile_edit(request):
next = request.GET.get("next")
profile, created = Profile.objects.get_or_create(
user=request.user,
defaults={
"first_name": request.user.first_name,
"last_name": request.user.last_name,
}
)
if request.method == "POST":
form = ProfileForm(request.POST, instance=profile)
if form.is_valid():
profile = form.save()
request.user.first_name = form.cleaned_data["first_name"]
request.user.last_name = form.cleaned_data["last_name"]
messages.add_message(request, messages.SUCCESS,
"Successfully updated profile."
)
if next:
return redirect(next)
else:
form = ProfileForm(instance=profile)
return render(request, "profiles/edit.html", {
"form": form,
"next": next,
})<|fim▁end|>
|
from django.shortcuts import render, redirect
|
<|file_name|>afterPack-template.js<|end_file_name|><|fim▁begin|>const fs = require('fs');
const fse = require('fs-extra');
const path = require('path');
module.exports = function (params) {
const unpackFile = path.join(
params.appOutDir,
'*NAMECASENOSPACE*.app/Contents/Resources/app.asar.unpacked'
);
fse.removeSync(unpackFile);<|fim▁hole|><|fim▁end|>
|
};
|
<|file_name|>WebImageCache.java<|end_file_name|><|fim▁begin|>package org.osmdroid.bonuspack.utils;
import java.util.LinkedHashMap;
import android.graphics.Bitmap;
import android.util.Log;
/**
<|fim▁hole|> */
public class WebImageCache {
LinkedHashMap<String, Bitmap> mCacheMap;
int mCapacity;
public WebImageCache(int maxItems) {
mCapacity = maxItems;
mCacheMap = new LinkedHashMap<String, Bitmap>(maxItems+1, 1.1f, true){
private static final long serialVersionUID = -4831331496601290979L;
protected boolean removeEldestEntry(Entry<String, Bitmap> eldest) {
return size() > mCapacity;
}
};
}
private void putInCache(String url, Bitmap image){
synchronized(mCacheMap){
mCacheMap.put(url, image);
}
//Log.d(BonusPackHelper.LOG_TAG, "WebImageCache:updateCache size="+mCacheMap.size());
}
/**
* get the image, either from the cache, or from the web if not in the cache.
* Can be called by multiple threads.
* If 2 threads ask for the same url simultaneously, this could put the image twice in the cache.
* => TODO, have a "queue" of current requests.
* @param url of the image
* @return the image, or null if any failure.
*/
public Bitmap get(String url){
Bitmap image;
synchronized(mCacheMap) {
image = mCacheMap.get(url);
}
if (image == null){
Log.d(BonusPackHelper.LOG_TAG, "WebImageCache:load :"+url);
image = BonusPackHelper.loadBitmap(url);
if (image != null){
putInCache(url, image);
}
}
return image;
}
}<|fim▁end|>
|
* Simple memory cache for handling images loaded from the web.
* The url is the key.
* @author M.Kergall
|
<|file_name|>borrowck-lend-flow-loop.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Note: the borrowck analysis is currently flow-insensitive.<|fim▁hole|>// cases are noted.
fn borrow(_v: &int) {}
fn borrow_mut(_v: &mut int) {}
fn cond() -> bool { fail!() }
fn for_func(_f: &fn() -> bool) -> bool { fail!() }
fn produce<T>() -> T { fail!(); }
fn inc(v: &mut ~int) {
*v = ~(**v + 1);
}
fn loop_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire loop.
let mut v = ~3;
let mut x = &mut v;
**x += 1;
loop {
borrow(v); //~ ERROR cannot borrow
}
}
fn block_overarching_alias_mut() {
// In this instance, the borrow encompasses the entire closure call.
let mut v = ~3;
let mut x = &mut v;
for 3.times {
borrow(v); //~ ERROR cannot borrow
}
*x = ~5;
}
fn loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn while_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn for_loop_aliased_mut() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
}
}
fn loop_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
loop {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
while cond() {
borrow_mut(v);
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn for_aliased_mut_break() {
// In this instance, the borrow is carried through the loop.
let mut v = ~3;
let mut w = ~4;
let mut _x = &w;
for for_func {
// here we cannot be sure that `for_func` respects the break below
borrow_mut(v); //~ ERROR cannot borrow
_x = &v;
break;
}
borrow_mut(v); //~ ERROR cannot borrow
}
fn while_aliased_mut_cond(cond: bool, cond2: bool) {
let mut v = ~3;
let mut w = ~4;
let mut x = &mut w;
while cond {
**x += 1;
borrow(v); //~ ERROR cannot borrow
if cond2 {
x = &mut v; //~ ERROR cannot borrow
}
}
}
fn loop_break_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Here we check that when you break out of an inner loop, the
// borrows that go out of scope as you exit the inner loop are
// removed from the bitset.
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
break; // ...so it is not live as exit the `while` loop here
}
}
}
}
fn loop_loop_pops_scopes<'r>(_v: &'r mut [uint], f: &fn(&'r mut uint) -> bool) {
// Similar to `loop_break_pops_scopes` but for the `loop` keyword
while cond() {
while cond() {
// this borrow is limited to the scope of `r`...
let r: &'r mut uint = produce();
if !f(&mut *r) {
loop; // ...so it is not live as exit (and re-enter) the `while` loop here
}
}
}
}
fn main() {}<|fim▁end|>
|
// Therefore, some of these errors are marked as spurious and could be
// corrected by a simple change to the analysis. The others are
// either genuine or would require more advanced changes. The latter
|
<|file_name|>command_smudge.go<|end_file_name|><|fim▁begin|>package commands
import (
"bytes"
"io"
"os"
"path/filepath"
"github.com/github/git-lfs/lfs"
"github.com/github/git-lfs/vendor/_nuts/github.com/spf13/cobra"
)
var (
smudgeInfo = false
smudgeCmd = &cobra.Command{
Use: "smudge",
Short: "Implements the Git smudge filter",
Run: smudgeCommand,
}
)
func smudgeCommand(cmd *cobra.Command, args []string) {
requireStdin("This command should be run by the Git 'smudge' filter")
lfs.InstallHooks(false)<|fim▁hole|>
ptr, err := lfs.DecodePointer(r)
if err != nil {
mr := io.MultiReader(b, os.Stdin)
_, err := io.Copy(os.Stdout, mr)
if err != nil {
Panic(err, "Error writing data to stdout:")
}
return
}
if smudgeInfo {
localPath, err := lfs.LocalMediaPath(ptr.Oid)
if err != nil {
Exit(err.Error())
}
stat, err := os.Stat(localPath)
if err != nil {
Print("%d --", ptr.Size)
} else {
Print("%d %s", stat.Size(), localPath)
}
return
}
filename := smudgeFilename(args, err)
cb, file, err := lfs.CopyCallbackFile("smudge", filename, 1, 1)
if err != nil {
Error(err.Error())
}
err = ptr.Smudge(os.Stdout, filename, cb)
if file != nil {
file.Close()
}
if err != nil {
ptr.Encode(os.Stdout)
LoggedError(err, "Error accessing media: %s (%s)", filename, ptr.Oid)
}
}
func smudgeFilename(args []string, err error) string {
if len(args) > 0 {
return args[0]
}
if smudgeErr, ok := err.(*lfs.SmudgeError); ok {
return filepath.Base(smudgeErr.Filename)
}
return "<unknown file>"
}
func init() {
smudgeCmd.Flags().BoolVarP(&smudgeInfo, "info", "i", false, "Display the local path and size of the smudged file.")
RootCmd.AddCommand(smudgeCmd)
}<|fim▁end|>
|
b := &bytes.Buffer{}
r := io.TeeReader(os.Stdin, b)
|
<|file_name|>loc.py<|end_file_name|><|fim▁begin|>"""Local settings and globals."""
import sys
from os.path import normpath, join
from .base import *
# Import secrets -- not needed<|fim▁hole|>#from secrets import *
# Set static URL
STATIC_URL = '/static'<|fim▁end|>
|
#sys.path.append(
# abspath(join(PROJECT_ROOT, '../secrets/TimelineJS/stg'))
#)
|
<|file_name|>facebookConnect.js<|end_file_name|><|fim▁begin|>/*
* Module : FacebookConnect.js
*
* Setups up the basic code to connect to the facebook JS api.
*
* Requires Config:
* - app.config.facebook.appId
*/
(function(app)
{
var module = app.module("facebookConnect", {
requires : [
"jquery-1.9.1.min"<|fim▁hole|>
init : function()
{
this.is_loaded = false;
//loading facebook all.js after we've added the fb-root div to avoid fb warning
$('body').prepend('<div id="fb-root"></div>');
app.getScript("//connect.facebook.net/en_US/all.js");
},
scrollTop : function()
{
if( this.is_loaded )
FB.Canvas.scrollTo(0, 0);
}
});
//add listener to window object
window.fbAsyncInit = function ()
{
module.is_loaded = true;
// init the FB JS SDK
FB.init({
appId : app.config.facebook.appId,
status : true, // check the login status upon init?
cookie : true, // set sessions cookies to allow your server to access the session?
xfbml : false // parse XFBML tags on this page?
});
// Grow the canvas to the correct size
FB.Canvas.scrollTo(0, 0);
FB.Canvas.setSize({ height: $('body').height()-100});
setTimeout("FB.Canvas.setAutoGrow()", 500);
//dispact event
$(document).trigger('facebookConnected');
//fix scroll bars
if (self !== top)
{
$("body").css("overflow", "hidden");
}
};
}(app));<|fim▁end|>
|
],
|
<|file_name|>RTCPeerConnection-tests.ts<|end_file_name|><|fim▁begin|>/// <reference path="MediaStream.d.ts" />
/// <reference path="RTCPeerConnection.d.ts" />
var config: RTCConfiguration =
{ iceServers: [{ urls: "stun.l.google.com:19302" }] };
var constraints: RTCMediaConstraints =
{ mandatory: { offerToReceiveAudio: true, offerToReceiveVideo: true } };
var peerConnection: RTCPeerConnection =
new RTCPeerConnection(config, constraints);
navigator.getUserMedia({ audio: true, video: true },
stream => {
peerConnection.addStream(stream);
},
error => {
console.log('Error message: ' + error.message);
console.log('Error name: ' + error.name);
});
peerConnection.onaddstream = ev => console.log(ev.type);
peerConnection.ondatachannel = ev => console.log(ev.type);
peerConnection.oniceconnectionstatechange = ev => console.log(ev.type);
peerConnection.onnegotiationneeded = ev => console.log(ev.type);
peerConnection.onopen = ev => console.log(ev.type);
peerConnection.onicecandidate = ev => console.log(ev.type);
peerConnection.onremovestream = ev => console.log(ev.type);
peerConnection.onstatechange = ev => console.log(ev.type);
peerConnection.createOffer(
offer => {
peerConnection.setLocalDescription(offer,
() => console.log("set local description"),
error => console.log("Error setting local description: " + error));
},
error => console.log("Error creating offer: " + error));
var type: string = RTCSdpType[RTCSdpType.offer];
var offer: RTCSessionDescriptionInit = { type: type, sdp: "some sdp" };
var sessionDescription = new RTCSessionDescription(offer);
peerConnection.setRemoteDescription(sessionDescription, () => {
peerConnection.createAnswer(
answer => {
peerConnection.setLocalDescription(answer,
() => console.log('Set local description'),
error => console.log(
"Error setting local description from created answer: " + error +
"; answer.sdp=" + answer.sdp));
},
error => console.log("Error creating answer: " + error));
},
error => console.log('Error setting remote description: ' + error +
"; offer.sdp=" + offer.sdp));
<|fim▁hole|>peerConnection.setRemoteDescription(webkitSessionDescription, () => {
peerConnection.createAnswer(
answer => {
peerConnection.setLocalDescription(answer,
() => console.log('Set local description'),
error => console.log(
"Error setting local description from created answer: " + error +
"; answer.sdp=" + answer.sdp));
},
error => console.log("Error creating answer: " + error));
},
error => console.log('Error setting remote description: ' + error +
"; offer.sdp=" + offer.sdp));
var mozSessionDescription = new mozRTCSessionDescription(offer);
peerConnection.setRemoteDescription(mozSessionDescription, () => {
peerConnection.createAnswer(
answer => {
peerConnection.setLocalDescription(answer,
() => console.log('Set local description'),
error => console.log(
"Error setting local description from created answer: " + error +
"; answer.sdp=" + answer.sdp));
},
error => console.log("Error creating answer: " + error));
},
error => console.log('Error setting remote description: ' + error +
"; offer.sdp=" + offer.sdp));
var wkPeerConnection: webkitRTCPeerConnection =
new webkitRTCPeerConnection(config, constraints);<|fim▁end|>
|
var webkitSessionDescription = new webkitRTCSessionDescription(offer);
|
<|file_name|>run.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::{Arc, Mutex, Condvar};
use ctrlc::CtrlC;
use fdlimit::raise_fd_limit;
use ethcore_logger::{Config as LogConfig, setup_log};
use ethcore_rpc::{NetworkSettings, is_major_importing};
use ethsync::NetworkConfiguration;
use util::{Colour, version, U256};
use io::{MayPanic, ForwardPanic, PanicHandler};
use ethcore::client::{Mode, DatabaseCompactionProfile, VMType, ChainNotify, BlockChainClient};
use ethcore::service::ClientService;
use ethcore::account_provider::AccountProvider;
use ethcore::miner::{Miner, MinerService, ExternalMiner, MinerOptions};
use ethcore::snapshot;
use ethsync::SyncConfig;
use informant::Informant;
use rpc::{HttpServer, IpcServer, HttpConfiguration, IpcConfiguration};
use signer::SignerServer;
use dapps::WebappServer;
use io_handler::ClientIoHandler;
use params::{
SpecType, Pruning, AccountsConfig, GasPricerConfig, MinerExtras, Switch,
tracing_switch_to_bool, fatdb_switch_to_bool,
};
use helpers::{to_client_config, execute_upgrades, passwords_from_files};
use dir::Directories;
use cache::CacheConfig;
use user_defaults::UserDefaults;
use dapps;
use signer;
use modules;
use rpc_apis;
use rpc;
use url;
// how often to take periodic snapshots.
const SNAPSHOT_PERIOD: u64 = 10000;
// how many blocks to wait before starting a periodic snapshot.
const SNAPSHOT_HISTORY: u64 = 500;
#[derive(Debug, PartialEq)]
pub struct RunCmd {
pub cache_config: CacheConfig,
pub dirs: Directories,
pub spec: SpecType,
pub pruning: Pruning,
pub pruning_history: u64,
/// Some if execution should be daemonized. Contains pid_file path.
pub daemon: Option<String>,
pub logger_config: LogConfig,
pub miner_options: MinerOptions,
pub http_conf: HttpConfiguration,
pub ipc_conf: IpcConfiguration,
pub net_conf: NetworkConfiguration,
pub network_id: Option<U256>,
pub warp_sync: bool,
pub acc_conf: AccountsConfig,
pub gas_pricer: GasPricerConfig,
pub miner_extras: MinerExtras,
pub mode: Mode,
pub tracing: Switch,
pub fat_db: Switch,
pub compaction: DatabaseCompactionProfile,
pub wal: bool,
pub vm_type: VMType,
pub enable_network: bool,
pub geth_compatibility: bool,
pub signer_port: Option<u16>,
pub net_settings: NetworkSettings,
pub dapps_conf: dapps::Configuration,
pub signer_conf: signer::Configuration,
pub ui: bool,
pub name: String,
pub custom_bootnodes: bool,
pub no_periodic_snapshot: bool,
pub check_seal: bool,
}
pub fn execute(cmd: RunCmd) -> Result<(), String> {<|fim▁hole|> let logger = try!(setup_log(&cmd.logger_config));
// increase max number of open files
raise_fd_limit();
// create dirs used by parity
try!(cmd.dirs.create_dirs());
// load spec
let spec = try!(cmd.spec.spec());
// load genesis hash
let genesis_hash = spec.genesis_header().hash();
// database paths
let db_dirs = cmd.dirs.database(genesis_hash, spec.fork_name.clone());
// user defaults path
let user_defaults_path = db_dirs.user_defaults_path();
// load user defaults
let mut user_defaults = try!(UserDefaults::load(&user_defaults_path));
// select pruning algorithm
let algorithm = cmd.pruning.to_algorithm(&user_defaults);
// check if tracing is on
let tracing = try!(tracing_switch_to_bool(cmd.tracing, &user_defaults));
// check if fatdb is on
let fat_db = try!(fatdb_switch_to_bool(cmd.fat_db, &user_defaults, algorithm));
// prepare client and snapshot paths.
let client_path = db_dirs.client_path(algorithm);
let snapshot_path = db_dirs.snapshot_path();
// execute upgrades
try!(execute_upgrades(&db_dirs, algorithm, cmd.compaction.compaction_profile(db_dirs.fork_path().as_path())));
// run in daemon mode
if let Some(pid_file) = cmd.daemon {
try!(daemonize(pid_file));
}
// display info about used pruning algorithm
info!("Starting {}", Colour::White.bold().paint(version()));
info!("State DB configuation: {}{}{}",
Colour::White.bold().paint(algorithm.as_str()),
match fat_db {
true => Colour::White.bold().paint(" +Fat").to_string(),
false => "".to_owned(),
},
match tracing {
true => Colour::White.bold().paint(" +Trace").to_string(),
false => "".to_owned(),
}
);
// display warning about using experimental journaldb alorithm
if !algorithm.is_stable() {
warn!("Your chosen strategy is {}! You can re-run with --pruning to change.", Colour::Red.bold().paint("unstable"));
}
// create sync config
let mut sync_config = SyncConfig::default();
sync_config.network_id = match cmd.network_id {
Some(id) => id,
None => spec.network_id(),
};
if spec.subprotocol_name().len() != 3 {
warn!("Your chain specification's subprotocol length is not 3. Ignoring.");
} else {
sync_config.subprotocol_name.clone_from_slice(spec.subprotocol_name().as_bytes());
}
sync_config.fork_block = spec.fork_block();
sync_config.warp_sync = cmd.warp_sync;
// prepare account provider
let account_provider = Arc::new(try!(prepare_account_provider(&cmd.dirs, cmd.acc_conf)));
// create miner
let miner = Miner::new(cmd.miner_options, cmd.gas_pricer.into(), &spec, Some(account_provider.clone()));
miner.set_author(cmd.miner_extras.author);
miner.set_gas_floor_target(cmd.miner_extras.gas_floor_target);
miner.set_gas_ceil_target(cmd.miner_extras.gas_ceil_target);
miner.set_extra_data(cmd.miner_extras.extra_data);
miner.set_transactions_limit(cmd.miner_extras.transactions_limit);
// create client config
let client_config = to_client_config(
&cmd.cache_config,
cmd.mode,
tracing,
fat_db,
cmd.compaction,
cmd.wal,
cmd.vm_type,
cmd.name,
algorithm,
cmd.pruning_history,
cmd.check_seal,
);
// set up bootnodes
let mut net_conf = cmd.net_conf;
if !cmd.custom_bootnodes {
net_conf.boot_nodes = spec.nodes.clone();
}
// set network path.
net_conf.net_config_path = Some(db_dirs.network_path().to_string_lossy().into_owned());
// create supervisor
let mut hypervisor = modules::hypervisor(&cmd.dirs.ipc_path());
// create client service.
let service = try!(ClientService::start(
client_config,
&spec,
&client_path,
&snapshot_path,
&cmd.dirs.ipc_path(),
miner.clone(),
).map_err(|e| format!("Client service error: {:?}", e)));
// forward panics from service
panic_handler.forward_from(&service);
// take handle to client
let client = service.client();
let snapshot_service = service.snapshot_service();
// create external miner
let external_miner = Arc::new(ExternalMiner::default());
// create sync object
let (sync_provider, manage_network, chain_notify) = try!(modules::sync(
&mut hypervisor, sync_config, net_conf.into(), client.clone(), snapshot_service.clone(), &cmd.logger_config,
).map_err(|e| format!("Sync error: {}", e)));
service.add_notify(chain_notify.clone());
// start network
if cmd.enable_network {
chain_notify.start();
}
// set up dependencies for rpc servers
let signer_path = cmd.signer_conf.signer_path.clone();
let deps_for_rpc_apis = Arc::new(rpc_apis::Dependencies {
signer_service: Arc::new(rpc_apis::SignerService::new(move || {
signer::generate_new_token(signer_path.clone()).map_err(|e| format!("{:?}", e))
}, cmd.signer_port)),
client: client.clone(),
sync: sync_provider.clone(),
net: manage_network.clone(),
secret_store: account_provider.clone(),
miner: miner.clone(),
external_miner: external_miner.clone(),
logger: logger.clone(),
settings: Arc::new(cmd.net_settings.clone()),
net_service: manage_network.clone(),
geth_compatibility: cmd.geth_compatibility,
dapps_port: match cmd.dapps_conf.enabled {
true => Some(cmd.dapps_conf.port),
false => None,
},
});
let dependencies = rpc::Dependencies {
panic_handler: panic_handler.clone(),
apis: deps_for_rpc_apis.clone(),
};
// start rpc servers
let http_server = try!(rpc::new_http(cmd.http_conf, &dependencies));
let ipc_server = try!(rpc::new_ipc(cmd.ipc_conf, &dependencies));
let dapps_deps = dapps::Dependencies {
panic_handler: panic_handler.clone(),
apis: deps_for_rpc_apis.clone(),
client: client.clone(),
sync: sync_provider.clone(),
};
// start dapps server
let dapps_server = try!(dapps::new(cmd.dapps_conf.clone(), dapps_deps));
let signer_deps = signer::Dependencies {
panic_handler: panic_handler.clone(),
apis: deps_for_rpc_apis.clone(),
};
// start signer server
let signer_server = try!(signer::start(cmd.signer_conf, signer_deps));
let informant = Arc::new(Informant::new(
service.client(),
Some(sync_provider.clone()),
Some(manage_network.clone()),
Some(snapshot_service.clone()),
cmd.logger_config.color
));
let info_notify: Arc<ChainNotify> = informant.clone();
service.add_notify(info_notify);
let io_handler = Arc::new(ClientIoHandler {
client: service.client(),
info: informant,
sync: sync_provider.clone(),
net: manage_network.clone(),
accounts: account_provider.clone(),
shutdown: Default::default(),
});
service.register_io_handler(io_handler.clone()).expect("Error registering IO handler");
// the watcher must be kept alive.
let _watcher = match cmd.no_periodic_snapshot {
true => None,
false => {
let sync = sync_provider.clone();
let watcher = Arc::new(snapshot::Watcher::new(
service.client(),
move || is_major_importing(Some(sync.status().state), client.queue_info()),
service.io().channel(),
SNAPSHOT_PERIOD,
SNAPSHOT_HISTORY,
));
service.add_notify(watcher.clone());
Some(watcher)
},
};
// start ui
if cmd.ui {
if !cmd.dapps_conf.enabled {
return Err("Cannot use UI command with Dapps turned off.".into())
}
url::open(&format!("http://{}:{}/", cmd.dapps_conf.interface, cmd.dapps_conf.port));
}
// save user defaults
user_defaults.pruning = algorithm;
user_defaults.tracing = tracing;
try!(user_defaults.save(&user_defaults_path));
// Handle exit
wait_for_exit(panic_handler, http_server, ipc_server, dapps_server, signer_server);
// to make sure timer does not spawn requests while shutdown is in progress
io_handler.shutdown.store(true, ::std::sync::atomic::Ordering::SeqCst);
// just Arc is dropping here, to allow other reference release in its default time
drop(io_handler);
// hypervisor should be shutdown first while everything still works and can be
// terminated gracefully
drop(hypervisor);
Ok(())
}
#[cfg(not(windows))]
fn daemonize(pid_file: String) -> Result<(), String> {
extern crate daemonize;
daemonize::Daemonize::new()
.pid_file(pid_file)
.chown_pid_file(true)
.start()
.map(|_| ())
.map_err(|e| format!("Couldn't daemonize; {}", e))
}
#[cfg(windows)]
fn daemonize(_pid_file: String) -> Result<(), String> {
Err("daemon is no supported on windows".into())
}
fn prepare_account_provider(dirs: &Directories, cfg: AccountsConfig) -> Result<AccountProvider, String> {
use ethcore::ethstore::EthStore;
use ethcore::ethstore::dir::DiskDirectory;
let passwords = try!(passwords_from_files(cfg.password_files));
let dir = Box::new(try!(DiskDirectory::create(dirs.keys.clone()).map_err(|e| format!("Could not open keys directory: {}", e))));
let account_service = AccountProvider::new(Box::new(
try!(EthStore::open_with_iterations(dir, cfg.iterations).map_err(|e| format!("Could not open keys directory: {}", e)))
));
for a in cfg.unlocked_accounts {
if passwords.iter().find(|p| account_service.unlock_account_permanently(a, (*p).clone()).is_ok()).is_none() {
return Err(format!("No password found to unlock account {}. Make sure valid password is present in files passed using `--password`.", a));
}
}
Ok(account_service)
}
fn wait_for_exit(
panic_handler: Arc<PanicHandler>,
_http_server: Option<HttpServer>,
_ipc_server: Option<IpcServer>,
_dapps_server: Option<WebappServer>,
_signer_server: Option<SignerServer>
) {
let exit = Arc::new(Condvar::new());
// Handle possible exits
let e = exit.clone();
CtrlC::set_handler(move || { e.notify_all(); });
// Handle panics
let e = exit.clone();
panic_handler.on_panic(move |_reason| { e.notify_all(); });
// Wait for signal
let mutex = Mutex::new(());
let _ = exit.wait(mutex.lock().unwrap());
info!("Finishing work, please wait...");
}<|fim▁end|>
|
// set up panic handler
let panic_handler = PanicHandler::new_in_arc();
// set up logger
|
<|file_name|>RecordAudio.py<|end_file_name|><|fim▁begin|>import sys
import time
from naoqi import ALProxy
IP = "nao.local"
PORT = 9559
if (len(sys.argv) < 2):
print "Usage: 'python RecordAudio.py nume'"
sys.exit(1)
fileName = "/home/nao/" + sys.argv[1] + ".wav"
aur = ALProxy("ALAudioRecorder", IP, PORT)
channels = [0,0,1,0]
aur.startMicrophonesRecording(fileName, "wav", 160000, channels)
c=raw_input("Sfarsit?")
aur.stopMicrophonesRecording()
c=raw_input("play?")
aup = ALProxy("ALAudioPlayer", IP, PORT)
#Launchs the playing of a file<|fim▁hole|>
#Launchs the playing of a file on the left speaker to a volume of 50%
#aup.playFile("/usr/share/naoqi/wav/random.wav",0.5,-1.0)<|fim▁end|>
|
aup.playFile(fileName,0.5,-1.0)
c=raw_input("gata?")
#Launchs the playing of a file
#aup.playFile("/usr/share/naoqi/wav/random.wav")
|
<|file_name|>events.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package validation
import (
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/util/validation"
)
// ValidateEvent makes sure that the event makes sense.
func ValidateEvent(event *api.Event) validation.ErrorList {<|fim▁hole|> event.Namespace != event.InvolvedObject.Namespace {
allErrs = append(allErrs, validation.NewFieldInvalid("involvedObject.namespace", event.InvolvedObject.Namespace, "namespace does not match involvedObject"))
}
if !validation.IsDNS1123Subdomain(event.Namespace) {
allErrs = append(allErrs, validation.NewFieldInvalid("namespace", event.Namespace, ""))
}
return allErrs
}<|fim▁end|>
|
allErrs := validation.ErrorList{}
// TODO: There is no namespace required for node.
if event.InvolvedObject.Kind != "Node" &&
|
<|file_name|>ChainInstance.js<|end_file_name|><|fim▁begin|>module.exports = ChainInstance;
function ChainInstance(chain, cb) {
var instances = null;
var loading = false;
var queue = [];
var load = function () {
loading = true;
chain.run(function (err, items) {
instances = items;
return next();
});<|fim▁hole|> return function () {
if (!loading) {
load();
}
queue.push({ hwd: hwd, args: arguments });
return calls;
};
};
var next = function () {
if (queue.length === 0) return;
var item = queue.shift();
item.hwd.apply(calls, item.args);
};
var calls = {
filter: promise(function (cb) {
instances = instances.filter(cb);
return next();
}),
forEach: promise(function (cb) {
instances.forEach(cb);
return next();
}),
sort: promise(function (cb) {
instances.sort(cb);
return next();
}),
count: promise(function (cb) {
cb(instances.length);
return next();
}),
get: promise(function (cb) {
cb(instances);
return next();
}),
save: promise(function (cb) {
var saveNext = function (i) {
if (i >= instances.length) {
if (typeof cb === "function") {
cb();
}
return next();
}
return instances[i].save(function (err) {
if (err) {
if (typeof cb === "function") {
cb(err);
}
return next();
}
return saveNext(i + 1);
});
};
return saveNext(0);
})
};
if (typeof cb === "function") {
return calls.forEach(cb);
}
return calls;
}<|fim▁end|>
|
};
var promise = function(hwd, next) {
|
<|file_name|>partfrac.py<|end_file_name|><|fim▁begin|>"""Algorithms for partial fraction decomposition of rational functions. """
from __future__ import print_function, division
from sympy.polys import Poly, RootSum, cancel, factor
from sympy.polys.polytools import parallel_poly_from_expr
from sympy.polys.polyoptions import allowed_flags, set_defaults
from sympy.polys.polyerrors import PolynomialError
from sympy.core import S, Add, sympify, Function, Lambda, Dummy
from sympy.core.basic import preorder_traversal
from sympy.utilities import numbered_symbols, take, xthreaded, public
from sympy.core.compatibility import xrange
@xthreaded
@public
def apart(f, x=None, full=False, **options):
"""
Compute partial fraction decomposition of a rational function.
Given a rational function ``f``, computes the partial fraction
decomposition of ``f``. Two algorithms are available: One is based on the
undertermined coefficients method, the other is Bronstein's full partial
fraction decomposition algorithm.
The undetermined coefficients method (selected by ``full=False``) uses
polynomial factorization (and therefore accepts the same options as
factor) for the denominator. Per default it works over the rational
numbers, therefore decomposition of denominators with non-rational roots
(e.g. irrational, complex roots) is not supported by default (see options
of factor).
Bronstein's algorithm can be selected by using ``full=True`` and allows a
decomposition of denominators with non-rational roots. A human-readable
result can be obtained via ``doit()`` (see examples below).
Examples
========
>>> from sympy.polys.partfrac import apart
>>> from sympy.abc import x, y
By default, using the undetermined coefficients method:
>>> apart(y/(x + 2)/(x + 1), x)
-y/(x + 2) + y/(x + 1)
The undetermined coefficients method does not provide a result when the
denominators roots are not rational:
>>> apart(y/(x**2 + x + 1), x)
y/(x**2 + x + 1)
You can choose Bronstein's algorithm by setting ``full=True``:
>>> apart(y/(x**2 + x + 1), x, full=True)
RootSum(_w**2 + _w + 1, Lambda(_a, (-2*_a*y/3 - y/3)/(-_a + x)))
Calling ``doit()`` yields a human-readable result:
>>> apart(y/(x**2 + x + 1), x, full=True).doit()
(-y/3 - 2*y*(-1/2 - sqrt(3)*I/2)/3)/(x + 1/2 + sqrt(3)*I/2) + (-y/3 -
2*y*(-1/2 + sqrt(3)*I/2)/3)/(x + 1/2 - sqrt(3)*I/2)
See Also
========
apart_list, assemble_partfrac_list
"""
allowed_flags(options, [])
f = sympify(f)
if f.is_Atom:
return f
else:
P, Q = f.as_numer_denom()
_options = options.copy()
options = set_defaults(options, extension=True)
try:
(P, Q), opt = parallel_poly_from_expr((P, Q), x, **options)
except PolynomialError as msg:
if f.is_commutative:
raise PolynomialError(msg)
# non-commutative
if f.is_Mul:
c, nc = f.args_cnc(split_1=False)
nc = f.func(*[apart(i, x=x, full=full, **_options) for i in nc])
if c:
c = apart(f.func._from_args(c), x=x, full=full, **_options)
return c*nc
else:
return nc
elif f.is_Add:
c = []
nc = []
for i in f.args:
if i.is_commutative:
c.append(i)
else:
try:
nc.append(apart(i, x=x, full=full, **_options))
except NotImplementedError:
nc.append(i)
return apart(f.func(*c), x=x, full=full, **_options) + f.func(*nc)
else:
reps = []
pot = preorder_traversal(f)
next(pot)
for e in pot:
try:
reps.append((e, apart(e, x=x, full=full, **_options)))
pot.skip() # this was handled successfully
except NotImplementedError:
pass
return f.xreplace(dict(reps))
if P.is_multivariate:
fc = f.cancel()
if fc != f:
return apart(fc, x=x, full=full, **_options)
raise NotImplementedError(
"multivariate partial fraction decomposition")
common, P, Q = P.cancel(Q)
poly, P = P.div(Q, auto=True)
P, Q = P.rat_clear_denoms(Q)
if Q.degree() <= 1:
partial = P/Q
else:
if not full:
partial = apart_undetermined_coeffs(P, Q)
else:
partial = apart_full_decomposition(P, Q)
terms = S.Zero
for term in Add.make_args(partial):
if term.has(RootSum):
terms += term
else:
terms += factor(term)
return common*(poly.as_expr() + terms)
def apart_undetermined_coeffs(P, Q):
"""Partial fractions via method of undetermined coefficients. """
X = numbered_symbols(cls=Dummy)
partial, symbols = [], []
_, factors = Q.factor_list()
for f, k in factors:
n, q = f.degree(), Q
for i in xrange(1, k + 1):
coeffs, q = take(X, n), q.quo(f)
partial.append((coeffs, q, f, i))
symbols.extend(coeffs)
dom = Q.get_domain().inject(*symbols)
F = Poly(0, Q.gen, domain=dom)
for i, (coeffs, q, f, k) in enumerate(partial):
h = Poly(coeffs, Q.gen, domain=dom)
partial[i] = (h, f, k)
q = q.set_domain(dom)
F += h*q
system, result = [], S(0)
for (k,), coeff in F.terms():
system.append(coeff - P.nth(k))
from sympy.solvers import solve
solution = solve(system, symbols)
for h, f, k in partial:
h = h.as_expr().subs(solution)
result += h/f.as_expr()**k
return result
def apart_full_decomposition(P, Q):
"""
Bronstein's full partial fraction decomposition algorithm.
Given a univariate rational function ``f``, performing only GCD
operations over the algebraic closure of the initial ground domain
of definition, compute full partial fraction decomposition with
fractions having linear denominators.
Note that no factorization of the initial denominator of ``f`` is
performed. The final decomposition is formed in terms of a sum of
:class:`RootSum` instances.
References
==========
1. [Bronstein93]_
"""
return assemble_partfrac_list(apart_list(P/Q, P.gens[0]))
@public
def apart_list(f, x=None, dummies=None, **options):
"""
Compute partial fraction decomposition of a rational function
and return the result in structured form.
Given a rational function ``f`` compute the partial fraction decomposition
of ``f``. Only Bronstein's full partial fraction decomposition algorithm
is supported by this method. The return value is highly structured and
perfectly suited for further algorithmic treatment rather than being
human-readable. The function returns a tuple holding three elements:
* The first item is the common coefficient, free of the variable `x` used
for decomposition. (It is an element of the base field `K`.)
* The second item is the polynomial part of the decomposition. This can be
the zero polynomial. (It is an element of `K[x]`.)
* The third part itself is a list of quadruples. Each quadruple
has the following elements in this order:
- The (not necessarily irreducible) polynomial `D` whose roots `w_i` appear
in the linear denominator of a bunch of related fraction terms. (This item
can also be a list of explicit roots. However, at the moment ``apart_list``
never returns a result this way, but the related ``assemble_partfrac_list``
function accepts this format as input.)
- The numerator of the fraction, written as a function of the root `w`
- The linear denominator of the fraction *excluding its power exponent*,
written as a function of the root `w`.
- The power to which the denominator has to be raised.
On can always rebuild a plain expression by using the function ``assemble_partfrac_list``.
Examples
========
A first example:
>>> from sympy.polys.partfrac import apart_list, assemble_partfrac_list
>>> from sympy.abc import x, t
>>> f = (2*x**3 - 2*x) / (x**2 - 2*x + 1)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(2*x + 4, x, domain='ZZ'),
[(Poly(_w - 1, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, -_a + x), 1)])
>>> assemble_partfrac_list(pfd)
2*x + 4 + 4/(x - 1)
Second example:
>>> f = (-2*x - 2*x**2) / (3*x**2 - 6*x)
>>> pfd = apart_list(f)
>>> pfd
(-1,
Poly(2/3, x, domain='QQ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 2), Lambda(_a, -_a + x), 1)])
>>> assemble_partfrac_list(pfd)
-2/3 - 2/(x - 2)
Another example, showing symbolic parameters:
>>> pfd = apart_list(t/(x**2 + x + t), x)
>>> pfd
(1,
Poly(0, x, domain='ZZ[t]'),
[(Poly(_w**2 + _w + t, _w, domain='ZZ[t]'),
Lambda(_a, -2*_a*t/(4*t - 1) - t/(4*t - 1)),
Lambda(_a, -_a + x),
1)])
>>> assemble_partfrac_list(pfd)
RootSum(_w**2 + _w + t, Lambda(_a, (-2*_a*t/(4*t - 1) - t/(4*t - 1))/(-_a + x)))
This example is taken from Bronstein's original paper:
>>> f = 36 / (x**5 - 2*x**4 - 2*x**3 + 4*x**2 + x - 2)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, -_a + x), 1),
(Poly(_w**2 - 1, _w, domain='ZZ'), Lambda(_a, -3*_a - 6), Lambda(_a, -_a + x), 2),
(Poly(_w + 1, _w, domain='ZZ'), Lambda(_a, -4), Lambda(_a, -_a + x), 1)])
>>> assemble_partfrac_list(pfd)
-4/(x + 1) - 3/(x + 1)**2 - 9/(x - 1)**2 + 4/(x - 2)
See also
========
<|fim▁hole|>
1. [Bronstein93]_
"""
allowed_flags(options, [])
f = sympify(f)
if f.is_Atom:
return f
else:
P, Q = f.as_numer_denom()
options = set_defaults(options, extension=True)
(P, Q), opt = parallel_poly_from_expr((P, Q), x, **options)
if P.is_multivariate:
raise NotImplementedError(
"multivariate partial fraction decomposition")
common, P, Q = P.cancel(Q)
poly, P = P.div(Q, auto=True)
P, Q = P.rat_clear_denoms(Q)
polypart = poly
if dummies is None:
def dummies(name):
d = Dummy(name)
while True:
yield d
dummies = dummies("w")
rationalpart = apart_list_full_decomposition(P, Q, dummies)
return (common, polypart, rationalpart)
def apart_list_full_decomposition(P, Q, dummygen):
"""
Bronstein's full partial fraction decomposition algorithm.
Given a univariate rational function ``f``, performing only GCD
operations over the algebraic closure of the initial ground domain
of definition, compute full partial fraction decomposition with
fractions having linear denominators.
Note that no factorization of the initial denominator of ``f`` is
performed. The final decomposition is formed in terms of a sum of
:class:`RootSum` instances.
References
==========
1. [Bronstein93]_
"""
f, x, U = P/Q, P.gen, []
u = Function('u')(x)
a = Dummy('a')
partial = []
for d, n in Q.sqf_list_include(all=True):
b = d.as_expr()
U += [ u.diff(x, n - 1) ]
h = cancel(f*b**n) / u**n
H, subs = [h], []
for j in range(1, n):
H += [ H[-1].diff(x) / j ]
for j in range(1, n + 1):
subs += [ (U[j - 1], b.diff(x, j) / j) ]
for j in range(0, n):
P, Q = cancel(H[j]).as_numer_denom()
for i in range(0, j + 1):
P = P.subs(*subs[j - i])
Q = Q.subs(*subs[0])
P = Poly(P, x)
Q = Poly(Q, x)
G = P.gcd(d)
D = d.quo(G)
B, g = Q.half_gcdex(D)
b = (P * B.quo(g)).rem(D)
Dw = D.subs(x, next(dummygen))
numer = Lambda(a, b.as_expr().subs(x, a))
denom = Lambda(a, (x - a))
exponent = n-j
partial.append((Dw, numer, denom, exponent))
return partial
@public
def assemble_partfrac_list(partial_list):
r"""Reassemble a full partial fraction decomposition
from a structured result obtained by the function ``apart_list``.
Examples
========
This example is taken from Bronstein's original paper:
>>> from sympy.polys.partfrac import apart_list, assemble_partfrac_list
>>> from sympy.abc import x, y
>>> f = 36 / (x**5 - 2*x**4 - 2*x**3 + 4*x**2 + x - 2)
>>> pfd = apart_list(f)
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w - 2, _w, domain='ZZ'), Lambda(_a, 4), Lambda(_a, -_a + x), 1),
(Poly(_w**2 - 1, _w, domain='ZZ'), Lambda(_a, -3*_a - 6), Lambda(_a, -_a + x), 2),
(Poly(_w + 1, _w, domain='ZZ'), Lambda(_a, -4), Lambda(_a, -_a + x), 1)])
>>> assemble_partfrac_list(pfd)
-4/(x + 1) - 3/(x + 1)**2 - 9/(x - 1)**2 + 4/(x - 2)
If we happen to know some roots we can provide them easily inside the structure:
>>> pfd = apart_list(2/(x**2-2))
>>> pfd
(1,
Poly(0, x, domain='ZZ'),
[(Poly(_w**2 - 2, _w, domain='ZZ'),
Lambda(_a, _a/2),
Lambda(_a, -_a + x),
1)])
>>> pfda = assemble_partfrac_list(pfd)
>>> pfda
RootSum(_w**2 - 2, Lambda(_a, _a/(-_a + x)))/2
>>> pfda.doit()
-sqrt(2)/(2*(x + sqrt(2))) + sqrt(2)/(2*(x - sqrt(2)))
>>> from sympy import Dummy, Poly, Lambda, sqrt
>>> a = Dummy("a")
>>> pfd = (1, Poly(0, x, domain='ZZ'), [([sqrt(2),-sqrt(2)], Lambda(a, a/2), Lambda(a, -a + x), 1)])
>>> assemble_partfrac_list(pfd)
-sqrt(2)/(2*(x + sqrt(2))) + sqrt(2)/(2*(x - sqrt(2)))
See also
========
apart, apart_list
"""
# Common factor
common = partial_list[0]
# Polynomial part
polypart = partial_list[1]
pfd = polypart.as_expr()
# Rational parts
for r, nf, df, ex in partial_list[2]:
if isinstance(r, Poly):
# Assemble in case the roots are given implicitly by a polynomials
an, nu = nf.variables, nf.expr
ad, de = df.variables, df.expr
# Hack to make dummies equal because Lambda created new Dummies
de = de.subs(ad[0], an[0])
func = Lambda(an, nu/de**ex)
pfd += RootSum(r, func, auto=False, quadratic=False)
else:
# Assemble in case the roots are given explicitely by a list of algebraic numbers
for root in r:
pfd += nf(root)/df(root)**ex
return common*pfd<|fim▁end|>
|
apart, assemble_partfrac_list
References
==========
|
<|file_name|>linear_solver_natural_api.py<|end_file_name|><|fim▁begin|># Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Patch to the python wrapper of ../linear_solver.h providing an algebraic API.
This is directly imported, and use exclusively in ./linear_solver.swig. See that
file.
For examples leveraging the code defined here, see ./pywraplp_test.py and
../../../python/linear_programming.py.
"""
import collections
import numbers
# The classes below allow linear expressions to be expressed naturally with the
# usual arithmetic operators +-*/ and with constant numbers, which makes the
# python API very intuitive. See the top-level comment for examples.
inf = float('inf')
class _FakeMPVariableRepresentingTheConstantOffset(object):
"""A dummy class for a singleton instance used to represent the constant.
To represent linear expressions, we store a dictionary
MPVariable->coefficient. To represent the constant offset of the expression,
we use this class as a substitute: its coefficient will be the offset. To
properly be evaluated, its solution_value() needs to be 1.
"""
def solution_value(self): # pylint: disable=invalid-name
return 1
def __repr__(self):
return 'OFFSET_KEY'
OFFSET_KEY = _FakeMPVariableRepresentingTheConstantOffset()
def CastToLinExp(v):
if isinstance(v, numbers.Number):
return Constant(v)
else:
return v
class LinearExpr(object):
"""Holds linear expressions.
A linear expression is essentially an offset (floating-point value), and a
dictionary mapping MPVariable objects to their coefficient (which is also a
floating-point value).
"""
OVERRIDDEN_OPERATOR_METHODS = [
'__%s__' % opname
for opname in ['add', 'radd', 'sub', 'rsub', 'mul', 'rmul', 'div',
'truediv', 'neg', 'eq', 'ge', 'le', 'gt', 'lt', 'ne']
]
def solution_value(self): # pylint: disable=invalid-name
"""Value of this linear expr, using the solution_value of its vars."""
coeffs = self.GetCoeffs()
return sum(var.solution_value() * coeff for var, coeff in coeffs.items())
def AddSelfToCoeffMapOrStack(self, coeffs, multiplier, stack):
"""Private function used by GetCoeffs() to delegate processing.
Implementation must either update coeffs or push to the stack a
sub-expression and the accumulated multiplier that applies to it.
Args:
coeffs: A dictionary of variables' coefficients. It is a defaultdict that
initializes the new values to 0 by default.
multiplier: The current accumulated multiplier to apply to this
expression.
stack: A list to append to if the current expression is composed of
sub-expressions. The elements of the stack are pair tuples
(multiplier, linear_expression).
"""
raise NotImplementedError
def GetCoeffs(self):
coeffs = collections.defaultdict(float)
stack = [(1.0, self)]
while stack:
current_multiplier, current_expression = stack.pop()
current_expression.AddSelfToCoeffMapOrStack(coeffs, current_multiplier,
stack)
return coeffs
def __add__(self, expr):
return Sum(self, expr)
def __radd__(self, cst):
return Sum(self, cst)
def __sub__(self, expr):
return Sum(self, -expr)
def __rsub__(self, cst):
return Sum(-self, cst)
def __mul__(self, cst):
return ProductCst(self, cst)
def __rmul__(self, cst):
return ProductCst(self, cst)
def __div__(self, cst):
return ProductCst(self, 1.0 / cst)
def __truediv__(self, cst):
return ProductCst(self, 1.0 / cst)
def __neg__(self):
return ProductCst(self, -1)
def __eq__(self, arg):
if isinstance(arg, numbers.Number):
return LinearConstraint(self, arg, arg)
else:
return LinearConstraint(self - arg, 0.0, 0.0)
def __ge__(self, arg):
if isinstance(arg, numbers.Number):
return LinearConstraint(self, arg, inf)
else:
return LinearConstraint(self - arg, 0.0, inf)
def __le__(self, arg):
if isinstance(arg, numbers.Number):
return LinearConstraint(self, -inf, arg)
else:
return LinearConstraint(self - arg, -inf, 0.0)
def __lt__(self, arg):
raise ValueError(
'Operators "<" and ">" not supported with the linear solver')
def __gt__(self, arg):
raise ValueError(
'Operators "<" and ">" not supported with the linear solver')
def __ne__(self, arg):
raise ValueError('Operator "!=" not supported with the linear solver')
class VariableExpr(LinearExpr):
"""Represents a LinearExpr containing only a single variable."""
def __init__(self, mpvar):
self.__var = mpvar
def AddSelfToCoeffMapOrStack(self, coeffs, multiplier, stack):
coeffs[self.__var] += multiplier
class ProductCst(LinearExpr):
"""Represents the product of a LinearExpr by a constant."""
def __init__(self, expr, coef):
self.__expr = CastToLinExp(expr)
if isinstance(coef, numbers.Number):
self.__coef = coef
else:
raise TypeError
def __str__(self):
if self.__coef == -1:
return '-' + str(self.__expr)<|fim▁hole|> else:
return '(' + str(self.__coef) + ' * ' + str(self.__expr) + ')'
def AddSelfToCoeffMapOrStack(self, coeffs, multiplier, stack):
current_multiplier = multiplier * self.__coef
if current_multiplier:
stack.append((current_multiplier, self.__expr))
class Constant(LinearExpr):
def __init__(self, val):
self.__val = val
def __str__(self):
return str(self.__val)
def AddSelfToCoeffMapOrStack(self, coeffs, multiplier, stack):
coeffs[OFFSET_KEY] += self.__val * multiplier
class SumArray(LinearExpr):
"""Represents the sum of a list of LinearExpr."""
def __init__(self, array):
self.__array = [CastToLinExp(elem) for elem in array]
def __str__(self):
return '({})'.format(' + '.join(map(str, self.__array)))
def AddSelfToCoeffMapOrStack(self, coeffs, multiplier, stack):
# Append elements in reversed order so that the first popped from the stack
# in the next iteration of the evaluation loop will be the first item of the
# array. This keeps the end result of the floating point computation
# predictable from user perspective.
for arg in reversed(self.__array):
stack.append((multiplier, arg))
def Sum(*args):
return SumArray(args)
SumCst = Sum # pylint: disable=invalid-name
class LinearConstraint(object):
"""Represents a linear constraint: LowerBound <= LinearExpr <= UpperBound."""
def __init__(self, expr, lb, ub):
self.__expr = expr
self.__lb = lb
self.__ub = ub
def __str__(self):
if self.__lb > -inf and self.__ub < inf:
if self.__lb == self.__ub:
return str(self.__expr) + ' == ' + str(self.__lb)
else:
return (str(self.__lb) + ' <= ' + str(self.__expr) +
' <= ' + str(self.__ub))
elif self.__lb > -inf:
return str(self.__expr) + ' >= ' + str(self.__lb)
elif self.__ub < inf:
return str(self.__expr) + ' <= ' + str(self.__ub)
else:
return 'Trivial inequality (always true)'
def Extract(self, solver, name=''):
"""Performs the actual creation of the constraint object."""
coeffs = self.__expr.GetCoeffs()
constant = coeffs.pop(OFFSET_KEY, 0.0)
lb = -solver.infinity()
ub = solver.infinity()
if self.__lb > -inf:
lb = self.__lb - constant
if self.__ub < inf:
ub = self.__ub - constant
constraint = solver.RowConstraint(lb, ub, name)
for v, c, in coeffs.items():
constraint.SetCoefficient(v, float(c))
return constraint<|fim▁end|>
| |
<|file_name|>quick_stage.py<|end_file_name|><|fim▁begin|>from collections import namedtuple
import sublime
from sublime_plugin import WindowCommand
from ..git_command import GitCommand
MenuOption = namedtuple("MenuOption", ["requires_action", "menu_text", "filename", "is_untracked"])<|fim▁hole|>
CLEAN_WORKING_DIR = "Nothing to commit, working directory clean."
ADD_ALL_UNSTAGED_FILES = " ? All unstaged files"
ADD_ALL_FILES = " + All files"
class GsQuickStageCommand(WindowCommand, GitCommand):
"""
Display a quick panel of unstaged files in the current git repository,
allowing the user to select one or more files for staging.
Display filenames with one of the following indicators:
* [M] modified
* [A] added
* [D] deleted
* [R] renamed/moved
* [C] copied
* [U] updated but unmerged
* [?] untracked
"""
def run(self):
sublime.set_timeout_async(self.run_async)
def run_async(self):
menu_options = self.get_menu_options()
menu_entries = [f.menu_text for f in menu_options]
def on_selection(id):
if id == -1:
return
selection = menu_options[id]
if not selection.requires_action:
return
elif selection.menu_text == ADD_ALL_UNSTAGED_FILES:
self.git("add", "--update", ".")
scope_of_action = "all unstaged files"
elif selection.menu_text == ADD_ALL_FILES:
self.git("add", "--all")
scope_of_action = "all files"
elif selection.is_untracked:
self.git("add", "--", selection.filename)
scope_of_action = "`{}`".format(selection.filename)
else:
self.git("add", "--update", "--", selection.filename)
scope_of_action = "`{}`".format(selection.filename)
sublime.status_message("Successfully added `{}`.".format(
scope_of_action))
sublime.set_timeout_async(self.run_async, 0)
self.window.show_quick_panel(
menu_entries,
on_selection,
flags=sublime.MONOSPACE_FONT
)
def get_menu_options(self):
"""
Determine the git status of the current working directory, and return
a list of menu options for each file that is shown.
"""
status_entries = self.get_status()
menu_options = []
for entry in status_entries:
if entry.working_status in ("M", "D", "?"):
filename = (entry.path if not entry.index_status == "R"
else entry.path + " <- " + entry.path_alt)
menu_text = "[{0}] {1}".format(entry.working_status, filename)
menu_options.append(MenuOption(True, menu_text, filename, entry.index_status == "?"))
if not menu_options:
return [MenuOption(False, CLEAN_WORKING_DIR, None, None)]
menu_options.append(MenuOption(True, ADD_ALL_UNSTAGED_FILES, None, None))
menu_options.append(MenuOption(True, ADD_ALL_FILES, None, None))
return menu_options<|fim▁end|>
| |
<|file_name|>Simulation.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
from TOSSIM import *
from sets import Set
import sys
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] filename",
version="%prog 1.0")
parser.add_option("-g", "--gainfile",
action="store",
dest="gainfile",
default="topology.txt",
help="file containing gains between simulation nodes")
parser.add_option("-n", "--noisefile",
action="store",
dest="noise",
default="Noise/meyer-heavy-short.txt",
help="file containing gains between simulation nodes")
(options, args) = parser.parse_args()
options_dict = vars(options)
print options_dict['gainfile']
print "Simulation start"
from tinyos.tossim.TossimApp import *
n = NescApp()
vars = n.variables.variables()
t = Tossim(vars)
r = t.radio()
mac = t.mac()
# Topology configuration
gainfile = open(options_dict['gainfile'], "r")
nodes = Set([])
print "Simulation Topology:"
lines = gainfile.readlines()
for line in lines:
splitlines = line.split()
if (len(splitlines) > 0):
if (splitlines[0] == "gain"):
r.add(int(splitlines[1]), int(splitlines[2]), float(splitlines[3].replace(",",".")))
print "Source:", splitlines[1], "Destination:", splitlines[2], "Gain:", splitlines[3], "dBm";
nodes.add(int(splitlines[1]))
nodes.add(int(splitlines[2]))
print "Number of nodes: " + str(len(nodes)) + ", nodes' ids:", nodes
# Allocating debug outputs
energy_output = open("Simulation/Energy.txt", "w")
packet_output = open("Simulation/Packet.txt", "w")
t.addChannel("PacketState", packet_output)
t.addChannel("ENERGY_HANDLER", energy_output)
# Opening simulation result file
resultfile = open("Simulation/Result.txt", "w")
# Default noise trace
noise = open(options_dict['noise'], "r")
lines = noise.readlines()
for line in lines:
stripline = line.strip() <|fim▁hole|> t.getNode(node).addNoiseTraceReading(val)
for node in nodes:
print "Creating noise model for node " + str(node) + "."
t.getNode(node).createNoiseModel()
# Boot time spread
boot_time = 0
for node in nodes:
t.getNode(node).bootAtTime(0 + boot_time);
boot_time += 50000000 # equal to 5 ms
# This runs the network for 50 seconds:
time = t.time()
while (time + t.ticksPerSecond() * 50 > t.time()):
t.runNextEvent()
resultfile.write("%d\n" % (t.time()))
for node in nodes:
m = t.getNode(node)
v = m.getVariable("MacPerformanceC.received_packets")
received_packets = v.getData()
c = m.getVariable("MacPerformanceC.counter")
sent_packets = c.getData()
print "The node id", node, "has sent", sent_packets, "and received", received_packets, "in total.";
resultfile.write("%d,%d,%d\n" % (node, sent_packets, received_packets))
print "End of simulation."<|fim▁end|>
|
if (stripline != ""):
val = int(stripline)
for node in nodes:
|
<|file_name|>prospective_search_pb.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.datastore.entity_pb import EntityProto
class SchemaEntry(ProtocolBuffer.ProtocolMessage):
STRING = 1
INT32 = 2
BOOLEAN = 3
DOUBLE = 4
POINT = 5
USER = 6
REFERENCE = 7
_Type_NAMES = {
1: "STRING",
2: "INT32",
3: "BOOLEAN",
4: "DOUBLE",
5: "POINT",
6: "USER",
7: "REFERENCE",
}
def Type_Name(cls, x): return cls._Type_NAMES.get(x, "")
Type_Name = classmethod(Type_Name)
has_name_ = 0
name_ = ""
has_type_ = 0
type_ = 0
has_meaning_ = 0
meaning_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = 0
def has_type(self): return self.has_type_
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_type()): self.set_type(x.type())
if (x.has_meaning()): self.set_meaning(x.meaning())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthVarInt64(self.type_)
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_type_):
n += 1
n += self.lengthVarInt64(self.type_)
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
return n<|fim▁hole|> self.clear_name()
self.clear_type()
self.clear_meaning()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
out.putVarInt32(16)
out.putVarInt32(self.type_)
if (self.has_meaning_):
out.putVarInt32(24)
out.putVarInt32(self.meaning_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
if (self.has_type_):
out.putVarInt32(16)
out.putVarInt32(self.type_)
if (self.has_meaning_):
out.putVarInt32(24)
out.putVarInt32(self.meaning_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_name(d.getPrefixedString())
continue
if tt == 16:
self.set_type(d.getVarInt32())
continue
if tt == 24:
self.set_meaning(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatInt32(self.type_))
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kname = 1
ktype = 2
kmeaning = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "name",
2: "type",
3: "meaning",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SchemaEntry'
class SubscribeRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
has_sub_id_ = 0
sub_id_ = ""
has_lease_duration_sec_ = 0
lease_duration_sec_ = 0.0
has_vanilla_query_ = 0
vanilla_query_ = ""
def __init__(self, contents=None):
self.schema_entry_ = []
if contents is not None: self.MergeFromString(contents)
def topic(self): return self.topic_
def set_topic(self, x):
self.has_topic_ = 1
self.topic_ = x
def clear_topic(self):
if self.has_topic_:
self.has_topic_ = 0
self.topic_ = ""
def has_topic(self): return self.has_topic_
def sub_id(self): return self.sub_id_
def set_sub_id(self, x):
self.has_sub_id_ = 1
self.sub_id_ = x
def clear_sub_id(self):
if self.has_sub_id_:
self.has_sub_id_ = 0
self.sub_id_ = ""
def has_sub_id(self): return self.has_sub_id_
def lease_duration_sec(self): return self.lease_duration_sec_
def set_lease_duration_sec(self, x):
self.has_lease_duration_sec_ = 1
self.lease_duration_sec_ = x
def clear_lease_duration_sec(self):
if self.has_lease_duration_sec_:
self.has_lease_duration_sec_ = 0
self.lease_duration_sec_ = 0.0
def has_lease_duration_sec(self): return self.has_lease_duration_sec_
def vanilla_query(self): return self.vanilla_query_
def set_vanilla_query(self, x):
self.has_vanilla_query_ = 1
self.vanilla_query_ = x
def clear_vanilla_query(self):
if self.has_vanilla_query_:
self.has_vanilla_query_ = 0
self.vanilla_query_ = ""
def has_vanilla_query(self): return self.has_vanilla_query_
def schema_entry_size(self): return len(self.schema_entry_)
def schema_entry_list(self): return self.schema_entry_
def schema_entry(self, i):
return self.schema_entry_[i]
def mutable_schema_entry(self, i):
return self.schema_entry_[i]
def add_schema_entry(self):
x = SchemaEntry()
self.schema_entry_.append(x)
return x
def clear_schema_entry(self):
self.schema_entry_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_topic()): self.set_topic(x.topic())
if (x.has_sub_id()): self.set_sub_id(x.sub_id())
if (x.has_lease_duration_sec()): self.set_lease_duration_sec(x.lease_duration_sec())
if (x.has_vanilla_query()): self.set_vanilla_query(x.vanilla_query())
for i in xrange(x.schema_entry_size()): self.add_schema_entry().CopyFrom(x.schema_entry(i))
def Equals(self, x):
if x is self: return 1
if self.has_topic_ != x.has_topic_: return 0
if self.has_topic_ and self.topic_ != x.topic_: return 0
if self.has_sub_id_ != x.has_sub_id_: return 0
if self.has_sub_id_ and self.sub_id_ != x.sub_id_: return 0
if self.has_lease_duration_sec_ != x.has_lease_duration_sec_: return 0
if self.has_lease_duration_sec_ and self.lease_duration_sec_ != x.lease_duration_sec_: return 0
if self.has_vanilla_query_ != x.has_vanilla_query_: return 0
if self.has_vanilla_query_ and self.vanilla_query_ != x.vanilla_query_: return 0
if len(self.schema_entry_) != len(x.schema_entry_): return 0
for e1, e2 in zip(self.schema_entry_, x.schema_entry_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_topic_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: topic not set.')
if (not self.has_sub_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sub_id not set.')
if (not self.has_lease_duration_sec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: lease_duration_sec not set.')
if (not self.has_vanilla_query_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: vanilla_query not set.')
for p in self.schema_entry_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.topic_))
n += self.lengthString(len(self.sub_id_))
n += self.lengthString(len(self.vanilla_query_))
n += 1 * len(self.schema_entry_)
for i in xrange(len(self.schema_entry_)): n += self.lengthString(self.schema_entry_[i].ByteSize())
return n + 12
def ByteSizePartial(self):
n = 0
if (self.has_topic_):
n += 1
n += self.lengthString(len(self.topic_))
if (self.has_sub_id_):
n += 1
n += self.lengthString(len(self.sub_id_))
if (self.has_lease_duration_sec_):
n += 9
if (self.has_vanilla_query_):
n += 1
n += self.lengthString(len(self.vanilla_query_))
n += 1 * len(self.schema_entry_)
for i in xrange(len(self.schema_entry_)): n += self.lengthString(self.schema_entry_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_topic()
self.clear_sub_id()
self.clear_lease_duration_sec()
self.clear_vanilla_query()
self.clear_schema_entry()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
out.putVarInt32(18)
out.putPrefixedString(self.sub_id_)
out.putVarInt32(25)
out.putDouble(self.lease_duration_sec_)
out.putVarInt32(34)
out.putPrefixedString(self.vanilla_query_)
for i in xrange(len(self.schema_entry_)):
out.putVarInt32(42)
out.putVarInt32(self.schema_entry_[i].ByteSize())
self.schema_entry_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_topic_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
if (self.has_sub_id_):
out.putVarInt32(18)
out.putPrefixedString(self.sub_id_)
if (self.has_lease_duration_sec_):
out.putVarInt32(25)
out.putDouble(self.lease_duration_sec_)
if (self.has_vanilla_query_):
out.putVarInt32(34)
out.putPrefixedString(self.vanilla_query_)
for i in xrange(len(self.schema_entry_)):
out.putVarInt32(42)
out.putVarInt32(self.schema_entry_[i].ByteSizePartial())
self.schema_entry_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_topic(d.getPrefixedString())
continue
if tt == 18:
self.set_sub_id(d.getPrefixedString())
continue
if tt == 25:
self.set_lease_duration_sec(d.getDouble())
continue
if tt == 34:
self.set_vanilla_query(d.getPrefixedString())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_schema_entry().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_topic_: res+=prefix+("topic: %s\n" % self.DebugFormatString(self.topic_))
if self.has_sub_id_: res+=prefix+("sub_id: %s\n" % self.DebugFormatString(self.sub_id_))
if self.has_lease_duration_sec_: res+=prefix+("lease_duration_sec: %s\n" % self.DebugFormat(self.lease_duration_sec_))
if self.has_vanilla_query_: res+=prefix+("vanilla_query: %s\n" % self.DebugFormatString(self.vanilla_query_))
cnt=0
for e in self.schema_entry_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("schema_entry%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic = 1
ksub_id = 2
klease_duration_sec = 3
kvanilla_query = 4
kschema_entry = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic",
2: "sub_id",
3: "lease_duration_sec",
4: "vanilla_query",
5: "schema_entry",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscribeRequest'
class SubscribeResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscribeResponse'
class UnsubscribeRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
has_sub_id_ = 0
sub_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def topic(self): return self.topic_
def set_topic(self, x):
self.has_topic_ = 1
self.topic_ = x
def clear_topic(self):
if self.has_topic_:
self.has_topic_ = 0
self.topic_ = ""
def has_topic(self): return self.has_topic_
def sub_id(self): return self.sub_id_
def set_sub_id(self, x):
self.has_sub_id_ = 1
self.sub_id_ = x
def clear_sub_id(self):
if self.has_sub_id_:
self.has_sub_id_ = 0
self.sub_id_ = ""
def has_sub_id(self): return self.has_sub_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_topic()): self.set_topic(x.topic())
if (x.has_sub_id()): self.set_sub_id(x.sub_id())
def Equals(self, x):
if x is self: return 1
if self.has_topic_ != x.has_topic_: return 0
if self.has_topic_ and self.topic_ != x.topic_: return 0
if self.has_sub_id_ != x.has_sub_id_: return 0
if self.has_sub_id_ and self.sub_id_ != x.sub_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_topic_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: topic not set.')
if (not self.has_sub_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sub_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.topic_))
n += self.lengthString(len(self.sub_id_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_topic_):
n += 1
n += self.lengthString(len(self.topic_))
if (self.has_sub_id_):
n += 1
n += self.lengthString(len(self.sub_id_))
return n
def Clear(self):
self.clear_topic()
self.clear_sub_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
out.putVarInt32(18)
out.putPrefixedString(self.sub_id_)
def OutputPartial(self, out):
if (self.has_topic_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
if (self.has_sub_id_):
out.putVarInt32(18)
out.putPrefixedString(self.sub_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_topic(d.getPrefixedString())
continue
if tt == 18:
self.set_sub_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_topic_: res+=prefix+("topic: %s\n" % self.DebugFormatString(self.topic_))
if self.has_sub_id_: res+=prefix+("sub_id: %s\n" % self.DebugFormatString(self.sub_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic = 1
ksub_id = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic",
2: "sub_id",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.UnsubscribeRequest'
class UnsubscribeResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.UnsubscribeResponse'
class SubscriptionRecord(ProtocolBuffer.ProtocolMessage):
OK = 0
PENDING = 1
ERROR = 2
_State_NAMES = {
0: "OK",
1: "PENDING",
2: "ERROR",
}
def State_Name(cls, x): return cls._State_NAMES.get(x, "")
State_Name = classmethod(State_Name)
has_id_ = 0
id_ = ""
has_vanilla_query_ = 0
vanilla_query_ = ""
has_expiration_time_sec_ = 0
expiration_time_sec_ = 0.0
has_state_ = 0
state_ = 0
has_error_message_ = 0
error_message_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = ""
def has_id(self): return self.has_id_
def vanilla_query(self): return self.vanilla_query_
def set_vanilla_query(self, x):
self.has_vanilla_query_ = 1
self.vanilla_query_ = x
def clear_vanilla_query(self):
if self.has_vanilla_query_:
self.has_vanilla_query_ = 0
self.vanilla_query_ = ""
def has_vanilla_query(self): return self.has_vanilla_query_
def expiration_time_sec(self): return self.expiration_time_sec_
def set_expiration_time_sec(self, x):
self.has_expiration_time_sec_ = 1
self.expiration_time_sec_ = x
def clear_expiration_time_sec(self):
if self.has_expiration_time_sec_:
self.has_expiration_time_sec_ = 0
self.expiration_time_sec_ = 0.0
def has_expiration_time_sec(self): return self.has_expiration_time_sec_
def state(self): return self.state_
def set_state(self, x):
self.has_state_ = 1
self.state_ = x
def clear_state(self):
if self.has_state_:
self.has_state_ = 0
self.state_ = 0
def has_state(self): return self.has_state_
def error_message(self): return self.error_message_
def set_error_message(self, x):
self.has_error_message_ = 1
self.error_message_ = x
def clear_error_message(self):
if self.has_error_message_:
self.has_error_message_ = 0
self.error_message_ = ""
def has_error_message(self): return self.has_error_message_
def MergeFrom(self, x):
assert x is not self
if (x.has_id()): self.set_id(x.id())
if (x.has_vanilla_query()): self.set_vanilla_query(x.vanilla_query())
if (x.has_expiration_time_sec()): self.set_expiration_time_sec(x.expiration_time_sec())
if (x.has_state()): self.set_state(x.state())
if (x.has_error_message()): self.set_error_message(x.error_message())
def Equals(self, x):
if x is self: return 1
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_vanilla_query_ != x.has_vanilla_query_: return 0
if self.has_vanilla_query_ and self.vanilla_query_ != x.vanilla_query_: return 0
if self.has_expiration_time_sec_ != x.has_expiration_time_sec_: return 0
if self.has_expiration_time_sec_ and self.expiration_time_sec_ != x.expiration_time_sec_: return 0
if self.has_state_ != x.has_state_: return 0
if self.has_state_ and self.state_ != x.state_: return 0
if self.has_error_message_ != x.has_error_message_: return 0
if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: id not set.')
if (not self.has_vanilla_query_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: vanilla_query not set.')
if (not self.has_expiration_time_sec_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: expiration_time_sec not set.')
if (not self.has_state_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: state not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.id_))
n += self.lengthString(len(self.vanilla_query_))
n += self.lengthVarInt64(self.state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
return n + 12
def ByteSizePartial(self):
n = 0
if (self.has_id_):
n += 1
n += self.lengthString(len(self.id_))
if (self.has_vanilla_query_):
n += 1
n += self.lengthString(len(self.vanilla_query_))
if (self.has_expiration_time_sec_):
n += 9
if (self.has_state_):
n += 1
n += self.lengthVarInt64(self.state_)
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
return n
def Clear(self):
self.clear_id()
self.clear_vanilla_query()
self.clear_expiration_time_sec()
self.clear_state()
self.clear_error_message()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.id_)
out.putVarInt32(18)
out.putPrefixedString(self.vanilla_query_)
out.putVarInt32(25)
out.putDouble(self.expiration_time_sec_)
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_error_message_):
out.putVarInt32(42)
out.putPrefixedString(self.error_message_)
def OutputPartial(self, out):
if (self.has_id_):
out.putVarInt32(10)
out.putPrefixedString(self.id_)
if (self.has_vanilla_query_):
out.putVarInt32(18)
out.putPrefixedString(self.vanilla_query_)
if (self.has_expiration_time_sec_):
out.putVarInt32(25)
out.putDouble(self.expiration_time_sec_)
if (self.has_state_):
out.putVarInt32(32)
out.putVarInt32(self.state_)
if (self.has_error_message_):
out.putVarInt32(42)
out.putPrefixedString(self.error_message_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_id(d.getPrefixedString())
continue
if tt == 18:
self.set_vanilla_query(d.getPrefixedString())
continue
if tt == 25:
self.set_expiration_time_sec(d.getDouble())
continue
if tt == 32:
self.set_state(d.getVarInt32())
continue
if tt == 42:
self.set_error_message(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatString(self.id_))
if self.has_vanilla_query_: res+=prefix+("vanilla_query: %s\n" % self.DebugFormatString(self.vanilla_query_))
if self.has_expiration_time_sec_: res+=prefix+("expiration_time_sec: %s\n" % self.DebugFormat(self.expiration_time_sec_))
if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kid = 1
kvanilla_query = 2
kexpiration_time_sec = 3
kstate = 4
kerror_message = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "id",
2: "vanilla_query",
3: "expiration_time_sec",
4: "state",
5: "error_message",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.SubscriptionRecord'
class ListSubscriptionsRequest(ProtocolBuffer.ProtocolMessage):
has_topic_ = 0
topic_ = ""
has_max_results_ = 0
max_results_ = 1000
has_expires_before_ = 0
expires_before_ = 0
has_subscription_id_start_ = 0
subscription_id_start_ = ""
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def topic(self): return self.topic_
def set_topic(self, x):
self.has_topic_ = 1
self.topic_ = x
def clear_topic(self):
if self.has_topic_:
self.has_topic_ = 0
self.topic_ = ""
def has_topic(self): return self.has_topic_
def max_results(self): return self.max_results_
def set_max_results(self, x):
self.has_max_results_ = 1
self.max_results_ = x
def clear_max_results(self):
if self.has_max_results_:
self.has_max_results_ = 0
self.max_results_ = 1000
def has_max_results(self): return self.has_max_results_
def expires_before(self): return self.expires_before_
def set_expires_before(self, x):
self.has_expires_before_ = 1
self.expires_before_ = x
def clear_expires_before(self):
if self.has_expires_before_:
self.has_expires_before_ = 0
self.expires_before_ = 0
def has_expires_before(self): return self.has_expires_before_
def subscription_id_start(self): return self.subscription_id_start_
def set_subscription_id_start(self, x):
self.has_subscription_id_start_ = 1
self.subscription_id_start_ = x
def clear_subscription_id_start(self):
if self.has_subscription_id_start_:
self.has_subscription_id_start_ = 0
self.subscription_id_start_ = ""
def has_subscription_id_start(self): return self.has_subscription_id_start_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_topic()): self.set_topic(x.topic())
if (x.has_max_results()): self.set_max_results(x.max_results())
if (x.has_expires_before()): self.set_expires_before(x.expires_before())
if (x.has_subscription_id_start()): self.set_subscription_id_start(x.subscription_id_start())
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_topic_ != x.has_topic_: return 0
if self.has_topic_ and self.topic_ != x.topic_: return 0
if self.has_max_results_ != x.has_max_results_: return 0
if self.has_max_results_ and self.max_results_ != x.max_results_: return 0
if self.has_expires_before_ != x.has_expires_before_: return 0
if self.has_expires_before_ and self.expires_before_ != x.expires_before_: return 0
if self.has_subscription_id_start_ != x.has_subscription_id_start_: return 0
if self.has_subscription_id_start_ and self.subscription_id_start_ != x.subscription_id_start_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_topic_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: topic not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.topic_))
if (self.has_max_results_): n += 1 + self.lengthVarInt64(self.max_results_)
if (self.has_expires_before_): n += 1 + self.lengthVarInt64(self.expires_before_)
if (self.has_subscription_id_start_): n += 1 + self.lengthString(len(self.subscription_id_start_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_topic_):
n += 1
n += self.lengthString(len(self.topic_))
if (self.has_max_results_): n += 1 + self.lengthVarInt64(self.max_results_)
if (self.has_expires_before_): n += 1 + self.lengthVarInt64(self.expires_before_)
if (self.has_subscription_id_start_): n += 1 + self.lengthString(len(self.subscription_id_start_))
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_topic()
self.clear_max_results()
self.clear_expires_before()
self.clear_subscription_id_start()
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
if (self.has_max_results_):
out.putVarInt32(16)
out.putVarInt64(self.max_results_)
if (self.has_expires_before_):
out.putVarInt32(24)
out.putVarInt64(self.expires_before_)
if (self.has_subscription_id_start_):
out.putVarInt32(34)
out.putPrefixedString(self.subscription_id_start_)
if (self.has_app_id_):
out.putVarInt32(42)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_topic_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
if (self.has_max_results_):
out.putVarInt32(16)
out.putVarInt64(self.max_results_)
if (self.has_expires_before_):
out.putVarInt32(24)
out.putVarInt64(self.expires_before_)
if (self.has_subscription_id_start_):
out.putVarInt32(34)
out.putPrefixedString(self.subscription_id_start_)
if (self.has_app_id_):
out.putVarInt32(42)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_topic(d.getPrefixedString())
continue
if tt == 16:
self.set_max_results(d.getVarInt64())
continue
if tt == 24:
self.set_expires_before(d.getVarInt64())
continue
if tt == 34:
self.set_subscription_id_start(d.getPrefixedString())
continue
if tt == 42:
self.set_app_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_topic_: res+=prefix+("topic: %s\n" % self.DebugFormatString(self.topic_))
if self.has_max_results_: res+=prefix+("max_results: %s\n" % self.DebugFormatInt64(self.max_results_))
if self.has_expires_before_: res+=prefix+("expires_before: %s\n" % self.DebugFormatInt64(self.expires_before_))
if self.has_subscription_id_start_: res+=prefix+("subscription_id_start: %s\n" % self.DebugFormatString(self.subscription_id_start_))
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic = 1
kmax_results = 2
kexpires_before = 3
ksubscription_id_start = 4
kapp_id = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic",
2: "max_results",
3: "expires_before",
4: "subscription_id_start",
5: "app_id",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListSubscriptionsRequest'
class ListSubscriptionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.subscription_ = []
if contents is not None: self.MergeFromString(contents)
def subscription_size(self): return len(self.subscription_)
def subscription_list(self): return self.subscription_
def subscription(self, i):
return self.subscription_[i]
def mutable_subscription(self, i):
return self.subscription_[i]
def add_subscription(self):
x = SubscriptionRecord()
self.subscription_.append(x)
return x
def clear_subscription(self):
self.subscription_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.subscription_size()): self.add_subscription().CopyFrom(x.subscription(i))
def Equals(self, x):
if x is self: return 1
if len(self.subscription_) != len(x.subscription_): return 0
for e1, e2 in zip(self.subscription_, x.subscription_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.subscription_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.subscription_)
for i in xrange(len(self.subscription_)): n += self.lengthString(self.subscription_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.subscription_)
for i in xrange(len(self.subscription_)): n += self.lengthString(self.subscription_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_subscription()
def OutputUnchecked(self, out):
for i in xrange(len(self.subscription_)):
out.putVarInt32(10)
out.putVarInt32(self.subscription_[i].ByteSize())
self.subscription_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.subscription_)):
out.putVarInt32(10)
out.putVarInt32(self.subscription_[i].ByteSizePartial())
self.subscription_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_subscription().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.subscription_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("subscription%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksubscription = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "subscription",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListSubscriptionsResponse'
class ListTopicsRequest(ProtocolBuffer.ProtocolMessage):
has_topic_start_ = 0
topic_start_ = ""
has_max_results_ = 0
max_results_ = 1000
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def topic_start(self): return self.topic_start_
def set_topic_start(self, x):
self.has_topic_start_ = 1
self.topic_start_ = x
def clear_topic_start(self):
if self.has_topic_start_:
self.has_topic_start_ = 0
self.topic_start_ = ""
def has_topic_start(self): return self.has_topic_start_
def max_results(self): return self.max_results_
def set_max_results(self, x):
self.has_max_results_ = 1
self.max_results_ = x
def clear_max_results(self):
if self.has_max_results_:
self.has_max_results_ = 0
self.max_results_ = 1000
def has_max_results(self): return self.has_max_results_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_topic_start()): self.set_topic_start(x.topic_start())
if (x.has_max_results()): self.set_max_results(x.max_results())
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_topic_start_ != x.has_topic_start_: return 0
if self.has_topic_start_ and self.topic_start_ != x.topic_start_: return 0
if self.has_max_results_ != x.has_max_results_: return 0
if self.has_max_results_ and self.max_results_ != x.max_results_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_topic_start_): n += 1 + self.lengthString(len(self.topic_start_))
if (self.has_max_results_): n += 1 + self.lengthVarInt64(self.max_results_)
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_topic_start_): n += 1 + self.lengthString(len(self.topic_start_))
if (self.has_max_results_): n += 1 + self.lengthVarInt64(self.max_results_)
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_topic_start()
self.clear_max_results()
self.clear_app_id()
def OutputUnchecked(self, out):
if (self.has_topic_start_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_start_)
if (self.has_max_results_):
out.putVarInt32(16)
out.putVarInt64(self.max_results_)
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_topic_start_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_start_)
if (self.has_max_results_):
out.putVarInt32(16)
out.putVarInt64(self.max_results_)
if (self.has_app_id_):
out.putVarInt32(26)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_topic_start(d.getPrefixedString())
continue
if tt == 16:
self.set_max_results(d.getVarInt64())
continue
if tt == 26:
self.set_app_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_topic_start_: res+=prefix+("topic_start: %s\n" % self.DebugFormatString(self.topic_start_))
if self.has_max_results_: res+=prefix+("max_results: %s\n" % self.DebugFormatInt64(self.max_results_))
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic_start = 1
kmax_results = 2
kapp_id = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic_start",
2: "max_results",
3: "app_id",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListTopicsRequest'
class ListTopicsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.topic_ = []
if contents is not None: self.MergeFromString(contents)
def topic_size(self): return len(self.topic_)
def topic_list(self): return self.topic_
def topic(self, i):
return self.topic_[i]
def set_topic(self, i, x):
self.topic_[i] = x
def add_topic(self, x):
self.topic_.append(x)
def clear_topic(self):
self.topic_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.topic_size()): self.add_topic(x.topic(i))
def Equals(self, x):
if x is self: return 1
if len(self.topic_) != len(x.topic_): return 0
for e1, e2 in zip(self.topic_, x.topic_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.topic_)
for i in xrange(len(self.topic_)): n += self.lengthString(len(self.topic_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.topic_)
for i in xrange(len(self.topic_)): n += self.lengthString(len(self.topic_[i]))
return n
def Clear(self):
self.clear_topic()
def OutputUnchecked(self, out):
for i in xrange(len(self.topic_)):
out.putVarInt32(10)
out.putPrefixedString(self.topic_[i])
def OutputPartial(self, out):
for i in xrange(len(self.topic_)):
out.putVarInt32(10)
out.putPrefixedString(self.topic_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_topic(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.topic_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("topic%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.ListTopicsResponse'
class MatchRequest(ProtocolBuffer.ProtocolMessage):
ENTITY = 1
MODEL = 2
_PythonDocumentClass_NAMES = {
1: "ENTITY",
2: "MODEL",
}
def PythonDocumentClass_Name(cls, x): return cls._PythonDocumentClass_NAMES.get(x, "")
PythonDocumentClass_Name = classmethod(PythonDocumentClass_Name)
has_topic_ = 0
topic_ = ""
has_document_ = 0
has_result_batch_size_ = 0
result_batch_size_ = 0
has_result_task_queue_ = 0
result_task_queue_ = ""
has_result_relative_url_ = 0
result_relative_url_ = ""
has_result_key_ = 0
result_key_ = ""
has_result_python_document_class_ = 0
result_python_document_class_ = 0
def __init__(self, contents=None):
self.document_ = EntityProto()
if contents is not None: self.MergeFromString(contents)
def topic(self): return self.topic_
def set_topic(self, x):
self.has_topic_ = 1
self.topic_ = x
def clear_topic(self):
if self.has_topic_:
self.has_topic_ = 0
self.topic_ = ""
def has_topic(self): return self.has_topic_
def document(self): return self.document_
def mutable_document(self): self.has_document_ = 1; return self.document_
def clear_document(self):self.has_document_ = 0; self.document_.Clear()
def has_document(self): return self.has_document_
def result_batch_size(self): return self.result_batch_size_
def set_result_batch_size(self, x):
self.has_result_batch_size_ = 1
self.result_batch_size_ = x
def clear_result_batch_size(self):
if self.has_result_batch_size_:
self.has_result_batch_size_ = 0
self.result_batch_size_ = 0
def has_result_batch_size(self): return self.has_result_batch_size_
def result_task_queue(self): return self.result_task_queue_
def set_result_task_queue(self, x):
self.has_result_task_queue_ = 1
self.result_task_queue_ = x
def clear_result_task_queue(self):
if self.has_result_task_queue_:
self.has_result_task_queue_ = 0
self.result_task_queue_ = ""
def has_result_task_queue(self): return self.has_result_task_queue_
def result_relative_url(self): return self.result_relative_url_
def set_result_relative_url(self, x):
self.has_result_relative_url_ = 1
self.result_relative_url_ = x
def clear_result_relative_url(self):
if self.has_result_relative_url_:
self.has_result_relative_url_ = 0
self.result_relative_url_ = ""
def has_result_relative_url(self): return self.has_result_relative_url_
def result_key(self): return self.result_key_
def set_result_key(self, x):
self.has_result_key_ = 1
self.result_key_ = x
def clear_result_key(self):
if self.has_result_key_:
self.has_result_key_ = 0
self.result_key_ = ""
def has_result_key(self): return self.has_result_key_
def result_python_document_class(self): return self.result_python_document_class_
def set_result_python_document_class(self, x):
self.has_result_python_document_class_ = 1
self.result_python_document_class_ = x
def clear_result_python_document_class(self):
if self.has_result_python_document_class_:
self.has_result_python_document_class_ = 0
self.result_python_document_class_ = 0
def has_result_python_document_class(self): return self.has_result_python_document_class_
def MergeFrom(self, x):
assert x is not self
if (x.has_topic()): self.set_topic(x.topic())
if (x.has_document()): self.mutable_document().MergeFrom(x.document())
if (x.has_result_batch_size()): self.set_result_batch_size(x.result_batch_size())
if (x.has_result_task_queue()): self.set_result_task_queue(x.result_task_queue())
if (x.has_result_relative_url()): self.set_result_relative_url(x.result_relative_url())
if (x.has_result_key()): self.set_result_key(x.result_key())
if (x.has_result_python_document_class()): self.set_result_python_document_class(x.result_python_document_class())
def Equals(self, x):
if x is self: return 1
if self.has_topic_ != x.has_topic_: return 0
if self.has_topic_ and self.topic_ != x.topic_: return 0
if self.has_document_ != x.has_document_: return 0
if self.has_document_ and self.document_ != x.document_: return 0
if self.has_result_batch_size_ != x.has_result_batch_size_: return 0
if self.has_result_batch_size_ and self.result_batch_size_ != x.result_batch_size_: return 0
if self.has_result_task_queue_ != x.has_result_task_queue_: return 0
if self.has_result_task_queue_ and self.result_task_queue_ != x.result_task_queue_: return 0
if self.has_result_relative_url_ != x.has_result_relative_url_: return 0
if self.has_result_relative_url_ and self.result_relative_url_ != x.result_relative_url_: return 0
if self.has_result_key_ != x.has_result_key_: return 0
if self.has_result_key_ and self.result_key_ != x.result_key_: return 0
if self.has_result_python_document_class_ != x.has_result_python_document_class_: return 0
if self.has_result_python_document_class_ and self.result_python_document_class_ != x.result_python_document_class_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_topic_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: topic not set.')
if (not self.has_document_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: document not set.')
elif not self.document_.IsInitialized(debug_strs): initialized = 0
if (not self.has_result_batch_size_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result_batch_size not set.')
if (not self.has_result_task_queue_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result_task_queue not set.')
if (not self.has_result_relative_url_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result_relative_url not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.topic_))
n += self.lengthString(self.document_.ByteSize())
n += self.lengthVarInt64(self.result_batch_size_)
n += self.lengthString(len(self.result_task_queue_))
n += self.lengthString(len(self.result_relative_url_))
if (self.has_result_key_): n += 1 + self.lengthString(len(self.result_key_))
if (self.has_result_python_document_class_): n += 1 + self.lengthVarInt64(self.result_python_document_class_)
return n + 5
def ByteSizePartial(self):
n = 0
if (self.has_topic_):
n += 1
n += self.lengthString(len(self.topic_))
if (self.has_document_):
n += 1
n += self.lengthString(self.document_.ByteSizePartial())
if (self.has_result_batch_size_):
n += 1
n += self.lengthVarInt64(self.result_batch_size_)
if (self.has_result_task_queue_):
n += 1
n += self.lengthString(len(self.result_task_queue_))
if (self.has_result_relative_url_):
n += 1
n += self.lengthString(len(self.result_relative_url_))
if (self.has_result_key_): n += 1 + self.lengthString(len(self.result_key_))
if (self.has_result_python_document_class_): n += 1 + self.lengthVarInt64(self.result_python_document_class_)
return n
def Clear(self):
self.clear_topic()
self.clear_document()
self.clear_result_batch_size()
self.clear_result_task_queue()
self.clear_result_relative_url()
self.clear_result_key()
self.clear_result_python_document_class()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
out.putVarInt32(18)
out.putVarInt32(self.document_.ByteSize())
self.document_.OutputUnchecked(out)
out.putVarInt32(24)
out.putVarInt32(self.result_batch_size_)
out.putVarInt32(34)
out.putPrefixedString(self.result_task_queue_)
out.putVarInt32(42)
out.putPrefixedString(self.result_relative_url_)
if (self.has_result_key_):
out.putVarInt32(50)
out.putPrefixedString(self.result_key_)
if (self.has_result_python_document_class_):
out.putVarInt32(56)
out.putVarInt32(self.result_python_document_class_)
def OutputPartial(self, out):
if (self.has_topic_):
out.putVarInt32(10)
out.putPrefixedString(self.topic_)
if (self.has_document_):
out.putVarInt32(18)
out.putVarInt32(self.document_.ByteSizePartial())
self.document_.OutputPartial(out)
if (self.has_result_batch_size_):
out.putVarInt32(24)
out.putVarInt32(self.result_batch_size_)
if (self.has_result_task_queue_):
out.putVarInt32(34)
out.putPrefixedString(self.result_task_queue_)
if (self.has_result_relative_url_):
out.putVarInt32(42)
out.putPrefixedString(self.result_relative_url_)
if (self.has_result_key_):
out.putVarInt32(50)
out.putPrefixedString(self.result_key_)
if (self.has_result_python_document_class_):
out.putVarInt32(56)
out.putVarInt32(self.result_python_document_class_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_topic(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_document().TryMerge(tmp)
continue
if tt == 24:
self.set_result_batch_size(d.getVarInt32())
continue
if tt == 34:
self.set_result_task_queue(d.getPrefixedString())
continue
if tt == 42:
self.set_result_relative_url(d.getPrefixedString())
continue
if tt == 50:
self.set_result_key(d.getPrefixedString())
continue
if tt == 56:
self.set_result_python_document_class(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_topic_: res+=prefix+("topic: %s\n" % self.DebugFormatString(self.topic_))
if self.has_document_:
res+=prefix+"document <\n"
res+=self.document_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_result_batch_size_: res+=prefix+("result_batch_size: %s\n" % self.DebugFormatInt32(self.result_batch_size_))
if self.has_result_task_queue_: res+=prefix+("result_task_queue: %s\n" % self.DebugFormatString(self.result_task_queue_))
if self.has_result_relative_url_: res+=prefix+("result_relative_url: %s\n" % self.DebugFormatString(self.result_relative_url_))
if self.has_result_key_: res+=prefix+("result_key: %s\n" % self.DebugFormatString(self.result_key_))
if self.has_result_python_document_class_: res+=prefix+("result_python_document_class: %s\n" % self.DebugFormatInt32(self.result_python_document_class_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ktopic = 1
kdocument = 2
kresult_batch_size = 3
kresult_task_queue = 4
kresult_relative_url = 5
kresult_key = 6
kresult_python_document_class = 7
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "topic",
2: "document",
3: "result_batch_size",
4: "result_task_queue",
5: "result_relative_url",
6: "result_key",
7: "result_python_document_class",
}, 7)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
}, 7, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.MatchRequest'
class MatchResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.prospective_search.MatchResponse'
if _extension_runtime:
pass
__all__ = ['SchemaEntry','SubscribeRequest','SubscribeResponse','UnsubscribeRequest','UnsubscribeResponse','SubscriptionRecord','ListSubscriptionsRequest','ListSubscriptionsResponse','ListTopicsRequest','ListTopicsResponse','MatchRequest','MatchResponse']<|fim▁end|>
|
def Clear(self):
|
<|file_name|>node-tests.ts<|end_file_name|><|fim▁begin|>import * as assert from "assert";
import * as fs from "fs";
import * as events from "events";
import * as zlib from "zlib";
import * as url from "url";
import * as util from "util";
import * as crypto from "crypto";
import * as tls from "tls";
import * as http from "http";
import * as https from "https";
import * as net from "net";
import * as tty from "tty";
import * as dgram from "dgram";
import * as querystring from "querystring";
import * as path from "path";
import * as readline from "readline";
import * as childProcess from "child_process";
import * as cluster from "cluster";
import * as os from "os";
import * as vm from "vm";
import * as string_decoder from "string_decoder";
import * as dns from "dns";
// Specifically test buffer module regression.
import { Buffer as ImportedBuffer, SlowBuffer as ImportedSlowBuffer } from "buffer";
//////////////////////////////////////////////////////////
/// Global Tests : https://nodejs.org/api/global.html ///
//////////////////////////////////////////////////////////
namespace global_tests {
{
let x: NodeModule;
let y: NodeModule;
x.children.push(y);
x.parent = require.main;
require.main = y;
}
}
//////////////////////////////////////////////////////////
/// Assert Tests : https://nodejs.org/api/assert.html ///
//////////////////////////////////////////////////////////
namespace assert_tests {
{
assert(1 + 1 - 2 === 0, "The universe isn't how it should.");
assert.deepEqual({ x: { y: 3 } }, { x: { y: 3 } }, "DEEP WENT DERP");
assert.deepStrictEqual({ a: 1 }, { a: 1 }, "uses === comparator");
assert.doesNotThrow(() => {
const b = false;
if (b) { throw "a hammer at your face"; }
}, undefined, "What the...*crunch*");
assert.equal(3, "3", "uses == comparator");
assert.fail(1, 2, undefined, '>');
assert.ifError(0);
assert.notDeepStrictEqual({ x: { y: "3" } }, { x: { y: 3 } }, "uses !== comparator");
assert.notEqual(1, 2, "uses != comparator");
assert.notStrictEqual(2, "2", "uses === comparator");
assert.ok(true);
assert.ok(1);
assert.strictEqual(1, 1, "uses === comparator");
assert.throws(() => { throw "a hammer at your face"; }, undefined, "DODGED IT");
}
}
////////////////////////////////////////////////////
/// Events tests : http://nodejs.org/api/events.html
////////////////////////////////////////////////////
namespace events_tests {
let emitter: events.EventEmitter;
let event: string;
let listener: Function;
let any: any;
{
let result: events.EventEmitter;
result = emitter.addListener(event, listener);
result = emitter.on(event, listener);
result = emitter.once(event, listener);
result = emitter.removeListener(event, listener);
result = emitter.removeAllListeners();
result = emitter.removeAllListeners(event);
result = emitter.setMaxListeners(42);
}
{
let result: number;
result = events.EventEmitter.defaultMaxListeners;
result = events.EventEmitter.listenerCount(emitter, event); // deprecated
result = emitter.getMaxListeners();
result = emitter.listenerCount(event);
}
{
let result: Function[];
result = emitter.listeners(event);
}
{
let result: boolean;
result = emitter.emit(event);
result = emitter.emit(event, any);
result = emitter.emit(event, any, any);
result = emitter.emit(event, any, any, any);
}
{
class Networker extends events.EventEmitter {
constructor() {
super();
this.emit("mingling");
}
}
}
}
////////////////////////////////////////////////////
/// File system tests : http://nodejs.org/api/fs.html
////////////////////////////////////////////////////
namespace fs_tests {
{
fs.writeFile("thebible.txt",
"Do unto others as you would have them do unto you.",
assert.ifError);
fs.write(1234, "test");
fs.writeFile("Harry Potter",
"\"You be wizzing, Harry,\" jived Dumbledore.",
{
encoding: "ascii"
},
assert.ifError);
fs.writeFile("testfile", "content", "utf8", assert.ifError);
fs.writeFileSync("testfile", "content", "utf8");
fs.writeFileSync("testfile", "content", { encoding: "utf8" });
}
{
fs.appendFile("testfile", "foobar", "utf8", assert.ifError);
fs.appendFile("testfile", "foobar", { encoding: "utf8" }, assert.ifError);
fs.appendFileSync("testfile", "foobar", "utf8");
fs.appendFileSync("testfile", "foobar", { encoding: "utf8" });
}
{
var content: string;
var buffer: Buffer;
var stringOrBuffer: string | Buffer;
var nullEncoding: string | null = null;
var stringEncoding: string | null = 'utf8';
content = fs.readFileSync('testfile', 'utf8');
content = fs.readFileSync('testfile', { encoding: 'utf8' });
stringOrBuffer = fs.readFileSync('testfile', stringEncoding);
stringOrBuffer = fs.readFileSync('testfile', { encoding: stringEncoding });
buffer = fs.readFileSync('testfile');
buffer = fs.readFileSync('testfile', null);
buffer = fs.readFileSync('testfile', { encoding: null });
stringOrBuffer = fs.readFileSync('testfile', nullEncoding);
stringOrBuffer = fs.readFileSync('testfile', { encoding: nullEncoding });
buffer = fs.readFileSync('testfile', { flag: 'r' });
fs.readFile('testfile', 'utf8', (err, data) => content = data);
fs.readFile('testfile', { encoding: 'utf8' }, (err, data) => content = data);
fs.readFile('testfile', stringEncoding, (err, data) => stringOrBuffer = data);
fs.readFile('testfile', { encoding: stringEncoding }, (err, data) => stringOrBuffer = data);
fs.readFile('testfile', (err, data) => buffer = data);
fs.readFile('testfile', null, (err, data) => buffer = data);
fs.readFile('testfile', { encoding: null }, (err, data) => buffer = data);
fs.readFile('testfile', nullEncoding, (err, data) => stringOrBuffer = data);
fs.readFile('testfile', { encoding: nullEncoding }, (err, data) => stringOrBuffer = data);
fs.readFile('testfile', { flag: 'r' }, (err, data) => buffer = data);
}
{
var errno: number;
fs.readFile('testfile', (err, data) => {
if (err && err.errno) {
errno = err.errno;
}
});
}
{
fs.mkdtemp('/tmp/foo-', (err, folder) => {
console.log(folder);
// Prints: /tmp/foo-itXde2
});
}
{
var tempDir: string;
tempDir = fs.mkdtempSync('/tmp/foo-');
}
}
///////////////////////////////////////////////////////
/// Buffer tests : https://nodejs.org/api/buffer.html
///////////////////////////////////////////////////////
function bufferTests() {
var utf8Buffer = new Buffer('test');
var base64Buffer = new Buffer('', 'base64');
var octets: Uint8Array = null;
var octetBuffer = new Buffer(octets);
var sharedBuffer = new Buffer(octets.buffer);
var copiedBuffer = new Buffer(utf8Buffer);
console.log(Buffer.isBuffer(octetBuffer));
console.log(Buffer.isEncoding('utf8'));
console.log(Buffer.byteLength('xyz123'));
console.log(Buffer.byteLength('xyz123', 'ascii'));
var result1 = Buffer.concat([utf8Buffer, base64Buffer]);
var result2 = Buffer.concat([utf8Buffer, base64Buffer], 9999999);
// Class Method: Buffer.from(array)
{
const buf: Buffer = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
}
// Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]])
{
const arr: Uint16Array = new Uint16Array(2);
arr[0] = 5000;
arr[1] = 4000;
let buf: Buffer;
buf = Buffer.from(arr.buffer);
buf = Buffer.from(arr.buffer, 1);
buf = Buffer.from(arr.buffer, 0, 1);
}
// Class Method: Buffer.from(buffer)
{
const buf1: Buffer = Buffer.from('buffer');
const buf2: Buffer = Buffer.from(buf1);
}
// Class Method: Buffer.from(str[, encoding])
{
const buf1: Buffer = Buffer.from('this is a tést');
const buf2: Buffer = Buffer.from('7468697320697320612074c3a97374', 'hex');
}
// Test that TS 1.6 works with the 'as Buffer' annotation
// on isBuffer.
var a: Buffer | number;
a = new Buffer(10);
if (Buffer.isBuffer(a)) {
a.writeUInt8(3, 4);
}
// write* methods return offsets.
var b = new Buffer(16);
var result: number = b.writeUInt32LE(0, 0);
result = b.writeUInt16LE(0, 4);
result = b.writeUInt8(0, 6);
result = b.writeInt8(0, 7);
result = b.writeDoubleLE(0, 8);
// fill returns the input buffer.
b.fill('a').fill('b');
{
let buffer = new Buffer('123');
let index: number;
index = buffer.indexOf("23");
index = buffer.indexOf("23", 1);
index = buffer.indexOf(23);
index = buffer.indexOf(buffer);
}
// Imported Buffer from buffer module works properly
{
let b = new ImportedBuffer('123');
b.writeUInt8(0, 6);
let sb = new ImportedSlowBuffer(43);
b.writeUInt8(0, 6);
}
// Buffer has Uint8Array's buffer field (an ArrayBuffer).
{
let buffer = new Buffer('123');
let octets = new Uint8Array(buffer.buffer);
}
}
////////////////////////////////////////////////////
/// Url tests : http://nodejs.org/api/url.html
////////////////////////////////////////////////////
namespace url_tests {
{
url.format(url.parse('http://www.example.com/xyz'));
// https://google.com/search?q=you're%20a%20lizard%2C%20gary
url.format({
protocol: 'https',
host: "google.com",
pathname: 'search',
query: { q: "you're a lizard, gary" }
});
}
{
var helloUrl = url.parse('http://example.com/?hello=world', true)
assert.equal(helloUrl.query.hello, 'world');
}
}
/////////////////////////////////////////////////////
/// util tests : https://nodejs.org/api/util.html ///
/////////////////////////////////////////////////////
namespace util_tests {
{
// Old and new util.inspect APIs
util.inspect(["This is nice"], false, 5);
util.inspect(["This is nice"], false, null);
util.inspect(["This is nice"], { colors: true, depth: 5, customInspect: false });
util.inspect(["This is nice"], { colors: true, depth: null, customInspect: false });
// util.deprecate
const foo = () => {};
// $ExpectType () => void
util.deprecate(foo, 'foo() is deprecated, use bar() instead');
// $ExpectType <T extends Function>(fn: T, message: string) => T
util.deprecate(util.deprecate, 'deprecate() is deprecated, use bar() instead');
}
}
////////////////////////////////////////////////////
/// Stream tests : http://nodejs.org/api/stream.html
////////////////////////////////////////////////////
// http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
function stream_readable_pipe_test() {
var r = fs.createReadStream('file.txt');
var z = zlib.createGzip();
var w = fs.createWriteStream('file.txt.gz');
r.pipe(z).pipe(w);
r.close();
}
////////////////////////////////////////////////////
/// zlib tests : http://nodejs.org/api/zlib.html ///
////////////////////////////////////////////////////
namespace zlib_tests {
{
const gzipped = zlib.gzipSync('test');
const unzipped = zlib.gunzipSync(gzipped.toString());
}
{
const deflate = zlib.deflateSync('test');
const inflate = zlib.inflateSync(deflate.toString());
}
}
////////////////////////////////////////////////////////
/// Crypto tests : http://nodejs.org/api/crypto.html ///
////////////////////////////////////////////////////////
namespace crypto_tests {
{
var hmacResult: string = crypto.createHmac('md5', 'hello').update('world').digest('hex');
}
{
let hmac: crypto.Hmac;
(hmac = crypto.createHmac('md5', 'hello')).end('world', 'utf8', () => {
let hash: Buffer | string = hmac.read();
});
}
{
//crypto_cipher_decipher_string_test
let key: Buffer = new Buffer([1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7]);
let clearText: string = "This is the clear text.";
let cipher: crypto.Cipher = crypto.createCipher("aes-128-ecb", key);
let cipherText: string = cipher.update(clearText, "utf8", "hex");
cipherText += cipher.final("hex");
let decipher: crypto.Decipher = crypto.createDecipher("aes-128-ecb", key);
let clearText2: string = decipher.update(cipherText, "hex", "utf8");
clearText2 += decipher.final("utf8");
assert.equal(clearText2, clearText);
}
{
//crypto_cipher_decipher_buffer_test
let key: Buffer = new Buffer([1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7]);
let clearText: Buffer = new Buffer([1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 7, 6, 5, 4]);
let cipher: crypto.Cipher = crypto.createCipher("aes-128-ecb", key);
let cipherBuffers: Buffer[] = [];
cipherBuffers.push(cipher.update(clearText));
cipherBuffers.push(cipher.final());
let cipherText: Buffer = Buffer.concat(cipherBuffers);
let decipher: crypto.Decipher = crypto.createDecipher("aes-128-ecb", key);
let decipherBuffers: Buffer[] = [];
decipherBuffers.push(decipher.update(cipherText));
decipherBuffers.push(decipher.final());
let clearText2: Buffer = Buffer.concat(decipherBuffers);
assert.deepEqual(clearText2, clearText);
}
}
//////////////////////////////////////////////////
/// TLS tests : http://nodejs.org/api/tls.html ///
//////////////////////////////////////////////////
namespace tls_tests {
var ctx: tls.SecureContext = tls.createSecureContext({
key: "NOT REALLY A KEY",
cert: "SOME CERTIFICATE",
});
var blah = ctx.context;
var connOpts: tls.ConnectionOptions = {
host: "127.0.0.1",
port: 55
};
var tlsSocket = tls.connect(connOpts);
}
////////////////////////////////////////////////////
/// Http tests : http://nodejs.org/api/http.html ///
////////////////////////////////////////////////////
namespace http_tests {
{
// Status codes
var codeMessage = http.STATUS_CODES['400'];
var codeMessage = http.STATUS_CODES[400];
}
{
var agent: http.Agent = new http.Agent({
keepAlive: true,
keepAliveMsecs: 10000,
maxSockets: Infinity,
maxFreeSockets: 256
});
var agent: http.Agent = http.globalAgent;
http.request({ agent: false });
http.request({ agent: agent });
http.request({ agent: undefined });
}
{
http.request('http://www.example.com/xyz');
}
{
// Make sure .listen() and .close() retuern a Server instance
http.createServer().listen(0).close().address();
net.createServer().listen(0).close().address();
}
{
var request = http.request({ path: 'http://0.0.0.0' });
request.once('error', function() { });
request.setNoDelay(true);
request.abort();
}
// http request options
{
const requestOpts: http.RequestOptions = {
timeout: 30000
};
const clientArgs: http.ClientRequestArgs = {
timeout: 30000
};
}
// http headers
{
const headers: http.IncomingHttpHeaders = {
'content-type': 'application/json',
'set-cookie': [ 'type=ninja', 'language=javascript' ]
};
}
}
//////////////////////////////////////////////////////
/// Https tests : http://nodejs.org/api/https.html ///
//////////////////////////////////////////////////////
namespace https_tests {
var agent: https.Agent = new https.Agent({
keepAlive: true,
keepAliveMsecs: 10000,
maxSockets: Infinity,
maxFreeSockets: 256,
maxCachedSessions: 100
});
var agent: https.Agent = https.globalAgent;
https.request({
agent: false
});
https.request({
agent: agent
});
https.request({
agent: undefined
});
https.request('http://www.example.com/xyz');
}
////////////////////////////////////////////////////
/// TTY tests : http://nodejs.org/api/tty.html
////////////////////////////////////////////////////
namespace tty_tests {
let rs: tty.ReadStream;
let ws: tty.WriteStream;
let rsIsRaw: boolean = rs.isRaw;
rs.setRawMode(true);
let wsColumns: number = ws.columns;
let wsRows: number = ws.rows;
let isTTY: boolean = tty.isatty(1);
}
////////////////////////////////////////////////////
/// Dgram tests : http://nodejs.org/api/dgram.html
////////////////////////////////////////////////////
namespace dgram_tests {
var ds: dgram.Socket = dgram.createSocket("udp4", (msg: Buffer, rinfo: dgram.RemoteInfo): void => {
});
var ai: dgram.AddressInfo = ds.address();
ds.send(new Buffer("hello"), 0, 5, 5000, "127.0.0.1", (error: Error, bytes: number): void => {
});
}
////////////////////////////////////////////////////
///Querystring tests : https://nodejs.org/api/querystring.html
////////////////////////////////////////////////////
namespace querystring_tests {
type SampleObject = { a: string; b: number; }
{
let obj: SampleObject;
let sep: string;
let eq: string;
let options: querystring.StringifyOptions;
let result: string;
result = querystring.stringify<SampleObject>(obj);
result = querystring.stringify<SampleObject>(obj, sep);
result = querystring.stringify<SampleObject>(obj, sep, eq);
result = querystring.stringify<SampleObject>(obj, sep, eq);
result = querystring.stringify<SampleObject>(obj, sep, eq, options);
}
{
let str: string;
let sep: string;
let eq: string;
let options: querystring.ParseOptions;
let result: SampleObject;
result = querystring.parse<SampleObject>(str);
result = querystring.parse<SampleObject>(str, sep);
result = querystring.parse<SampleObject>(str, sep, eq);
result = querystring.parse<SampleObject>(str, sep, eq, options);
}
{
let str: string;
let result: string;
result = querystring.escape(str);
result = querystring.unescape(str);
}
}
////////////////////////////////////////////////////
/// path tests : http://nodejs.org/api/path.html
////////////////////////////////////////////////////
namespace path_tests {
path.normalize('/foo/bar//baz/asdf/quux/..');
path.join('/foo', 'bar', 'baz/asdf', 'quux', '..');
// returns
//'/foo/bar/baz/asdf'
try {
path.join('foo', {}, 'bar');
}
catch (error) {
}
path.resolve('foo/bar', '/tmp/file/', '..', 'a/../subfile');
//Is similar to:
//
//cd foo/bar
//cd /tmp/file/
//cd ..
// cd a/../subfile
//pwd
path.resolve('/foo/bar', './baz')
// returns
// '/foo/bar/baz'
path.resolve('/foo/bar', '/tmp/file/')
// returns
// '/tmp/file'
path.resolve('wwwroot', 'static_files/png/', '../gif/image.gif')
// if currently in /home/myself/node, it returns
// '/home/myself/node/wwwroot/static_files/gif/image.gif'
path.isAbsolute('/foo/bar') // true
path.isAbsolute('/baz/..') // true
path.isAbsolute('qux/') // false
path.isAbsolute('.') // false
path.isAbsolute('//server') // true
path.isAbsolute('C:/foo/..') // true
path.isAbsolute('bar\\baz') // false
path.isAbsolute('.') // false
path.relative('C:\\orandea\\test\\aaa', 'C:\\orandea\\impl\\bbb')
// returns
// '..\\..\\impl\\bbb'
path.relative('/data/orandea/test/aaa', '/data/orandea/impl/bbb')
// returns
// '../../impl/bbb'
path.dirname('/foo/bar/baz/asdf/quux')
// returns
// '/foo/bar/baz/asdf'
path.basename('/foo/bar/baz/asdf/quux.html')
// returns
// 'quux.html'
path.basename('/foo/bar/baz/asdf/quux.html', '.html')
// returns
// 'quux'
path.extname('index.html')
// returns
// '.html'
path.extname('index.coffee.md')
// returns
// '.md'
path.extname('index.')
// returns
// '.'
path.extname('index')
// returns
// ''
'foo/bar/baz'.split(path.sep)
// returns
// ['foo', 'bar', 'baz']
'foo\\bar\\baz'.split(path.sep)
// returns
// ['foo', 'bar', 'baz']
console.log(process.env.PATH)
// '/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin'
process.env.PATH.split(path.delimiter)
// returns
// ['/usr/bin', '/bin', '/usr/sbin', '/sbin', '/usr/local/bin']
console.log(process.env.PATH)
// 'C:\Windows\system32;C:\Windows;C:\Program Files\nodejs\'
process.env.PATH.split(path.delimiter)
// returns
// ['C:\Windows\system32', 'C:\Windows', 'C:\Program Files\nodejs\']
path.parse('/home/user/dir/file.txt')
// returns
// {
// root : "/",
// dir : "/home/user/dir",
// base : "file.txt",
// ext : ".txt",
// name : "file"
// }
path.parse('C:\\path\\dir\\index.html')
// returns
// {
// root : "C:\",
// dir : "C:\path\dir",
// base : "index.html",
// ext : ".html",
// name : "index"
// }
path.format({
root: "/",
dir: "/home/user/dir",
base: "file.txt",
ext: ".txt",
name: "file"
});
// returns
// '/home/user/dir/file.txt'
}
////////////////////////////////////////////////////
/// readline tests : https://nodejs.org/api/readline.html
////////////////////////////////////////////////////
namespace readline_tests {
let rl: readline.ReadLine;
{
let options: readline.ReadLineOptions;
let input: NodeJS.ReadableStream;
let output: NodeJS.WritableStream;
let completer: readline.Completer;
let terminal: boolean;
let result: readline.ReadLine;
result = readline.createInterface(options);
result = readline.createInterface(input);
result = readline.createInterface(input, output);
result = readline.createInterface(input, output, completer);
result = readline.createInterface(input, output, completer, terminal);
result = readline.createInterface({
input: input,
completer: function(str: string): readline.CompleterResult {
return [['test'], 'test'];
}
});
}
{
let prompt: string;
rl.setPrompt(prompt);
}
{
let preserveCursor: boolean;
rl.prompt();
rl.prompt(preserveCursor);
}
{
let query: string;
let callback: (answer: string) => void;
rl.question(query, callback);
}
{
let result: readline.ReadLine;
result = rl.pause();
}
{
let result: readline.ReadLine;
result = rl.resume();
}
{
rl.close();
}
{
let data: string | Buffer;
let key: readline.Key;
rl.write(data);
rl.write(null, key);
}
{
let stream: NodeJS.WritableStream;
let x: number;
let y: number;
readline.cursorTo(stream, x, y);
}
{
let stream: NodeJS.WritableStream;
let dx: number | string;
let dy: number | string;
readline.moveCursor(stream, dx, dy);
}
{
let stream: NodeJS.WritableStream;
let dir: number;
readline.clearLine(stream, dir);
}
{
let stream: NodeJS.WritableStream;
readline.clearScreenDown(stream);
}
}
////////////////////////////////////////////////////
/// string_decoder tests : https://nodejs.org/api/string_decoder.html
////////////////////////////////////////////////////
namespace string_decoder_tests {
const StringDecoder = string_decoder.StringDecoder;
const buffer = new Buffer('test');
const decoder = new StringDecoder('utf8');
const part: string = decoder.write(new Buffer('test'));
const end: string = decoder.end();
}
//////////////////////////////////////////////////////////////////////
/// Child Process tests: https://nodejs.org/api/child_process.html ///
//////////////////////////////////////////////////////////////////////
namespace child_process_tests {
{
childProcess.exec("echo test");
childProcess.spawnSync("echo test");
}
}
//////////////////////////////////////////////////////////////////////
/// cluster tests: https://nodejs.org/api/cluster.html ///
//////////////////////////////////////////////////////////////////////
namespace cluster_tests {
{
cluster.fork();
Object.keys(cluster.workers).forEach(key => {
const worker = cluster.workers[key];
if (worker.isDead()) {
console.log('worker %d is dead', worker.process.pid);
}
});
}
}
////////////////////////////////////////////////////
/// os tests : https://nodejs.org/api/os.html
////////////////////////////////////////////////////
namespace os_tests {
{
let result: string;
result = os.tmpdir();
result = os.homedir();
result = os.endianness();
result = os.hostname();
result = os.type();
result = os.platform();
result = os.arch();
result = os.release();
result = os.EOL;
}
{
let result: number;
result = os.uptime();
result = os.totalmem();
result = os.freemem();
}
{
let result: number[];
result = os.loadavg();
}
{
let result: os.CpuInfo[];
result = os.cpus();
}
{
let result: { [index: string]: os.NetworkInterfaceInfo[] };
result = os.networkInterfaces();
}
}
////////////////////////////////////////////////////
/// vm tests : https://nodejs.org/api/vm.html
////////////////////////////////////////////////////
namespace vm_tests {
{
const sandbox = {
animal: 'cat',
count: 2
};
const context = vm.createContext(sandbox);
console.log(vm.isContext(context));
const script = new vm.Script('count += 1; name = "kitty"');
for (let i = 0; i < 10; ++i) {
script.runInContext(context);
}
console.log(util.inspect(sandbox));
vm.runInNewContext('count += 1; name = "kitty"', sandbox);
console.log(util.inspect(sandbox));
}
{
const sandboxes = [{}, {}, {}];
const script = new vm.Script('globalVar = "set"');
sandboxes.forEach((sandbox) => {
script.runInNewContext(sandbox);
script.runInThisContext();
});
console.log(util.inspect(sandboxes));
var localVar = 'initial value';
vm.runInThisContext('localVar = "vm";');
console.log(localVar);
}
{
const Debug = vm.runInDebugContext('Debug');
Debug.scripts().forEach(function(script: any) { console.log(script.name); });
}
}
///////////////////////////////////////////////////////////////////////////////
/// Errors Tests : https://nodejs.org/dist/latest-v4.x/docs/api/errors.html ///
///////////////////////////////////////////////////////////////////////////////
namespace errors_tests {
{
Error.stackTraceLimit = Infinity;
}
{
const myObject = {};
Error.captureStackTrace(myObject);
}
}
///////////////////////////////////////////////////////////
/// Process Tests : https://nodejs.org/api/process.html ///
///////////////////////////////////////////////////////////
import * as p from "process";
namespace process_tests {
{
var eventEmitter: events.EventEmitter;
eventEmitter = process; // Test that process implements EventEmitter...
var _p: NodeJS.Process = process;
_p = p;<|fim▁hole|> module = process.mainModule;
}
}
///////////////////////////////////////////////////////////
/// Console Tests : https://nodejs.org/api/console.html ///
///////////////////////////////////////////////////////////
import * as c from "console";
namespace console_tests {
{
var _c: Console = console;
_c = c;
}
}
///////////////////////////////////////////////////
/// Net Tests : https://nodejs.org/api/net.html ///
///////////////////////////////////////////////////
namespace net_tests {
{
// Make sure .listen() and .close() retuern a Server instance
net.createServer().listen(0).close().address();
}
{
/**
* net.Socket - events.EventEmitter
*/
let _socket: net.Socket = new net.Socket({
fd: 1,
allowHalfOpen: false,
readable: false,
writable: false
});
}
}
///////////////////////////////////////////////////
/// DNS Tests : https://nodejs.org/api/dns.html ///
///////////////////////////////////////////////////
namespace dns_tests {
dns.lookup("nodejs.org", (err, address, family) => {
const _err: NodeJS.ErrnoException = err;
const _address: string = address;
const _family: number = family;
});
dns.lookup("nodejs.org", 4, (err, address, family) => {
const _err: NodeJS.ErrnoException = err;
const _address: string = address;
const _family: number = family;
});
dns.lookup("nodejs.org", 6, (err, address, family) => {
const _err: NodeJS.ErrnoException = err;
const _address: string = address;
const _family: number = family;
});
dns.lookup("nodejs.org", {}, (err, address, family) => {
const _err: NodeJS.ErrnoException = err;
const _address: string = address;
const _family: number = family;
});
dns.lookup(
"nodejs.org",
{
family: 4,
hints: dns.ADDRCONFIG | dns.V4MAPPED,
all: false
},
(err, address, family) => {
const _err: NodeJS.ErrnoException = err;
const _address: string = address;
const _family: number = family;
}
);
dns.lookup("nodejs.org", {all: true}, (err, addresses) => {
const _err: NodeJS.ErrnoException = err;
const _address: dns.LookupAddress[] = addresses;
});
function trueOrFalse(): boolean {
return Math.random() > 0.5 ? true : false;
}
dns.lookup("nodejs.org", {all: trueOrFalse()}, (err, addresses, family) => {
const _err: NodeJS.ErrnoException = err;
const _addresses: string | dns.LookupAddress[] = addresses;
const _family: number | undefined = family;
});
dns.resolve("nodejs.org", (err, addresses) => {
const _addresses: string[] = addresses;
});
dns.resolve("nodejs.org", "A", (err, addresses) => {
const _addresses: string[] = addresses;
});
dns.resolve("nodejs.org", "AAAA", (err, addresses) => {
const _addresses: string[] = addresses;
});
dns.resolve("nodejs.org", "MX", (err, addresses) => {
const _addresses: dns.MxRecord[] = addresses;
});
dns.resolve4("nodejs.org", (err, addresses) => {
const _addresses: string[] = addresses;
});
dns.resolve6("nodejs.org", (err, addresses) => {
const _addresses: string[] = addresses;
});
}<|fim▁end|>
|
}
{
var module: NodeModule | undefined;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.