prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>DialupNetwork.py<|end_file_name|><|fim▁begin|># coding=utf-8
from blueman.services.meta import SerialService<|fim▁hole|>
class DialupNetwork(SerialService):
__group__ = 'serial'
__svclass_id__ = DIALUP_NET_SVCLASS_ID
__icon__ = "modem"
__priority__ = 50<|fim▁end|>
|
from blueman.Sdp import DIALUP_NET_SVCLASS_ID
|
<|file_name|>32.d.ts<|end_file_name|><|fim▁begin|>import { WatsonHealthEdgeEnhancement_0132 } from "../../../";
<|fim▁hole|><|fim▁end|>
|
export = WatsonHealthEdgeEnhancement_0132;
|
<|file_name|>plot_johnson_lindenstrauss_bound.py<|end_file_name|><|fim▁begin|>r"""
=====================================================================
The Johnson-Lindenstrauss bound for embedding with random projections
=====================================================================
The `Johnson-Lindenstrauss lemma`_ states that any high dimensional
dataset can be randomly projected into a lower dimensional Euclidean
space while controlling the distortion in the pairwise distances.
.. _`Johnson-Lindenstrauss lemma`: https://en.wikipedia.org/wiki/\
Johnson%E2%80%93Lindenstrauss_lemma
"""
print(__doc__)
import sys
from time import time
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from sklearn.random_projection import johnson_lindenstrauss_min_dim
from sklearn.random_projection import SparseRandomProjection
from sklearn.datasets import fetch_20newsgroups_vectorized
from sklearn.datasets import load_digits
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.utils.fixes import parse_version
# `normed` is being deprecated in favor of `density` in histograms
if parse_version(matplotlib.__version__) >= parse_version('2.1'):
density_param = {'density': True}
else:
density_param = {'normed': True}
# %%
# Theoretical bounds
# ==================
# The distortion introduced by a random projection `p` is asserted by
# the fact that `p` is defining an eps-embedding with good probability
# as defined by:
#
# .. math::
# (1 - eps) \|u - v\|^2 < \|p(u) - p(v)\|^2 < (1 + eps) \|u - v\|^2
#
# Where u and v are any rows taken from a dataset of shape [n_samples,
# n_features] and p is a projection by a random Gaussian N(0, 1) matrix
# with shape [n_components, n_features] (or a sparse Achlioptas matrix).
#
# The minimum number of components to guarantees the eps-embedding is
# given by:
#
# .. math::
# n\_components >= 4 log(n\_samples) / (eps^2 / 2 - eps^3 / 3)
#
#
# The first plot shows that with an increasing number of samples ``n_samples``,
# the minimal number of dimensions ``n_components`` increased logarithmically
# in order to guarantee an ``eps``-embedding.
# range of admissible distortions
eps_range = np.linspace(0.1, 0.99, 5)
colors = plt.cm.Blues(np.linspace(0.3, 1.0, len(eps_range)))
# range of number of samples (observation) to embed
n_samples_range = np.logspace(1, 9, 9)
plt.figure()
for eps, color in zip(eps_range, colors):
min_n_components = johnson_lindenstrauss_min_dim(n_samples_range, eps=eps)
plt.loglog(n_samples_range, min_n_components, color=color)
plt.legend(["eps = %0.1f" % eps for eps in eps_range], loc="lower right")
plt.xlabel("Number of observations to eps-embed")
plt.ylabel("Minimum number of dimensions")
plt.title("Johnson-Lindenstrauss bounds:\nn_samples vs n_components")
plt.show()
# %%
# The second plot shows that an increase of the admissible
# distortion ``eps`` allows to reduce drastically the minimal number of
# dimensions ``n_components`` for a given number of samples ``n_samples``
# range of admissible distortions
eps_range = np.linspace(0.01, 0.99, 100)
# range of number of samples (observation) to embed
n_samples_range = np.logspace(2, 6, 5)
colors = plt.cm.Blues(np.linspace(0.3, 1.0, len(n_samples_range)))
plt.figure()
for n_samples, color in zip(n_samples_range, colors):
min_n_components = johnson_lindenstrauss_min_dim(n_samples, eps=eps_range)
plt.semilogy(eps_range, min_n_components, color=color)
<|fim▁hole|>plt.xlabel("Distortion eps")
plt.ylabel("Minimum number of dimensions")
plt.title("Johnson-Lindenstrauss bounds:\nn_components vs eps")
plt.show()
# %%
# Empirical validation
# ====================
#
# We validate the above bounds on the 20 newsgroups text document
# (TF-IDF word frequencies) dataset or on the digits dataset:
#
# - for the 20 newsgroups dataset some 500 documents with 100k
# features in total are projected using a sparse random matrix to smaller
# euclidean spaces with various values for the target number of dimensions
# ``n_components``.
#
# - for the digits dataset, some 8x8 gray level pixels data for 500
# handwritten digits pictures are randomly projected to spaces for various
# larger number of dimensions ``n_components``.
#
# The default dataset is the 20 newsgroups dataset. To run the example on the
# digits dataset, pass the ``--use-digits-dataset`` command line argument to
# this script.
if '--use-digits-dataset' in sys.argv:
data = load_digits().data[:500]
else:
data = fetch_20newsgroups_vectorized().data[:500]
# %%
# For each value of ``n_components``, we plot:
#
# - 2D distribution of sample pairs with pairwise distances in original
# and projected spaces as x and y axis respectively.
#
# - 1D histogram of the ratio of those distances (projected / original).
n_samples, n_features = data.shape
print("Embedding %d samples with dim %d using various random projections"
% (n_samples, n_features))
n_components_range = np.array([300, 1000, 10000])
dists = euclidean_distances(data, squared=True).ravel()
# select only non-identical samples pairs
nonzero = dists != 0
dists = dists[nonzero]
for n_components in n_components_range:
t0 = time()
rp = SparseRandomProjection(n_components=n_components)
projected_data = rp.fit_transform(data)
print("Projected %d samples from %d to %d in %0.3fs"
% (n_samples, n_features, n_components, time() - t0))
if hasattr(rp, 'components_'):
n_bytes = rp.components_.data.nbytes
n_bytes += rp.components_.indices.nbytes
print("Random matrix with size: %0.3fMB" % (n_bytes / 1e6))
projected_dists = euclidean_distances(
projected_data, squared=True).ravel()[nonzero]
plt.figure()
min_dist = min(projected_dists.min(), dists.min())
max_dist = max(projected_dists.max(), dists.max())
plt.hexbin(dists, projected_dists, gridsize=100, cmap=plt.cm.PuBu,
extent=[min_dist, max_dist, min_dist, max_dist])
plt.xlabel("Pairwise squared distances in original space")
plt.ylabel("Pairwise squared distances in projected space")
plt.title("Pairwise distances distribution for n_components=%d" %
n_components)
cb = plt.colorbar()
cb.set_label('Sample pairs counts')
rates = projected_dists / dists
print("Mean distances rate: %0.2f (%0.2f)"
% (np.mean(rates), np.std(rates)))
plt.figure()
plt.hist(rates, bins=50, range=(0., 2.), edgecolor='k', **density_param)
plt.xlabel("Squared distances rate: projected / original")
plt.ylabel("Distribution of samples pairs")
plt.title("Histogram of pairwise distance rates for n_components=%d" %
n_components)
# TODO: compute the expected value of eps and add them to the previous plot
# as vertical lines / region
plt.show()
# %%
# We can see that for low values of ``n_components`` the distribution is wide
# with many distorted pairs and a skewed distribution (due to the hard
# limit of zero ratio on the left as distances are always positives)
# while for larger values of n_components the distortion is controlled
# and the distances are well preserved by the random projection.
# %%
# Remarks
# =======
#
# According to the JL lemma, projecting 500 samples without too much distortion
# will require at least several thousands dimensions, irrespective of the
# number of features of the original dataset.
#
# Hence using random projections on the digits dataset which only has 64
# features in the input space does not make sense: it does not allow
# for dimensionality reduction in this case.
#
# On the twenty newsgroups on the other hand the dimensionality can be
# decreased from 56436 down to 10000 while reasonably preserving
# pairwise distances.<|fim▁end|>
|
plt.legend(["n_samples = %d" % n for n in n_samples_range], loc="upper right")
|
<|file_name|>generator-desc.rs<|end_file_name|><|fim▁begin|>// edition:2018
#![feature(async_closure)]
use std::future::Future;
async fn one() {}
async fn two() {}
fn fun<F: Future<Output = ()>>(f1: F, f2: F) {}
fn main() {
fun(async {}, async {});
//~^ ERROR mismatched types<|fim▁hole|> //~^ ERROR mismatched types
fun((async || {})(), (async || {})());
//~^ ERROR mismatched types
}<|fim▁end|>
|
fun(one(), two());
|
<|file_name|>entity-extract-store.js<|end_file_name|><|fim▁begin|>import {observable, runInAction, computed, action, reaction, autorun} from "mobx";
import LynlpApi from "../common/lynlp-api"
import _ from "lodash";
class EntityExtractStore {
@observable isFetching = false;
@observable currentItem = '图形展示';
@observable entity = {};
@action
fetchData(content) {
this.isFetching = true;
LynlpApi.entity(content).then((result)=>{
this.entity = result;
this.isFetching = false;
})
}
}<|fim▁hole|>
const entityExtractStore = new EntityExtractStore();
export default entityExtractStore<|fim▁end|>
| |
<|file_name|>API.py<|end_file_name|><|fim▁begin|># kate: replace-tabs on; indent-width 4; remove-trailing-spaces all; show-tabs on; newline-at-eof on;
# -*- coding:utf-8 -*-
'''
Copyright (C) 2014 Peter Urbanec
All Right Reserved
License: Proprietary / Commercial - contact enigma.licensing (at) urbanec.net
'''
import requests
import json
from fcntl import ioctl
from struct import pack
from socket import socket, create_connection, AF_INET, SOCK_DGRAM, SHUT_RDWR, error as sockerror
from . import config, saveConfigFile
from boxbranding import getMachineBrand, getMachineName
_version_string = "20141027"
_protocol = "http://"
_server = "api.icetv.com.au"
_device_type_id = 22
_debug_level = 0 # 1 = request/reply, 2 = 1+headers, 3 = 2+partial body, 4 = 2+full body
def isServerReachable():
try:
sock = create_connection((_server, 80), 3)
sock.shutdown(SHUT_RDWR)
sock.close()<|fim▁hole|>
def getMacAddress(ifname):
result = "00:00:00:00:00:00"
sock = socket(AF_INET, SOCK_DGRAM)
# noinspection PyBroadException
try:
iface = pack('256s', ifname[:15])
info = ioctl(sock.fileno(), 0x8927, iface)
result = ''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1].upper()
except:
pass
sock.close()
return result
def haveCredentials():
return bool(config.plugins.icetv.member.token.value)
def getCredentials():
return {
"email_address": config.plugins.icetv.member.email_address.value,
"token": config.plugins.icetv.member.token.value,
}
def clearCredentials():
config.plugins.icetv.member.token.value = ""
config.plugins.icetv.member.token.save()
saveConfigFile()
def showIdToEventId(show_id):
# Fit within 16 bits, but avoid 0 and 0xFFF8 - 0xFFFF
return (int(show_id) % 0xFFF7) + 1
class Request(object):
def __init__(self, resource):
super(Request, self).__init__()
self.params = {
"api_key": "9019fa88-bd0c-4b1b-94ac-6761aa6a664f",
"application_version": _version_string,
}
self.headers = {
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "SystemPlugins.IceTV/%s (%s; %s)" % (_version_string, getMachineBrand(), getMachineName()),
}
self.url = _protocol + _server + resource
self.data = {}
self.response = None
def _shorten(self, text):
if len(text) < 4000:
return text
return text[:2000] + "\n...\n" + text[-2000:]
def send(self, method):
data = json.dumps(self.data)
r = requests.request(method, self.url, params=self.params, headers=self.headers, data=data, verify=False)
err = not r.ok
if err or _debug_level > 0:
print "[IceTV]", r.request.method, r.request.url
if err or _debug_level > 1:
print "[IceTV] headers", r.request.headers
if err or _debug_level == 3:
print "[IceTV]", self._shorten(r.request.body)
elif err or _debug_level > 3:
print "[IceTV]", r.request.body
if err or _debug_level > 0:
print "[IceTV]", r.status_code, r.reason
if err or _debug_level > 1:
print "[IceTV] headers", r.headers
if err or _debug_level == 3:
print "[IceTV]", self._shorten(r.text)
elif err or _debug_level > 3:
print "[IceTV]", r.text
self.response = r
if r.status_code == 401:
clearCredentials()
r.raise_for_status()
return r
class AuthRequest(Request):
def __init__(self, resource):
super(AuthRequest, self).__init__(resource)
self.params.update(getCredentials())
class Regions(Request):
def __init__(self):
super(Regions, self).__init__("/regions")
def get(self):
return self.send("get")
class Region(Request):
def __init__(self, region):
super(Region, self).__init__("/regions/" + str(int(region)))
def get(self):
return self.send("get")
class Channels(Request):
def __init__(self, region=None):
if region is None:
super(Channels, self).__init__("/regions/channels")
else:
super(Channels, self).__init__("/regions/" + str(int(region)) + "/channels")
def get(self):
return self.send("get")
class Login(Request):
def __init__(self, email, password, region=None):
super(Login, self).__init__("/login")
self.data["device"] = {
"uid": getMacAddress('eth0'),
"label": config.plugins.icetv.device.label.value,
"type_id": config.plugins.icetv.device.type_id.value,
}
self.data["member"] = {
"email_address": email,
"password": password,
}
if region:
self.data["member"]["region_id"] = region
def post(self):
return self.send("post")
def put(self):
return self.send("put")
def send(self, method):
r = super(Login, self).send(method)
result = r.json()
config.plugins.icetv.member.email_address.value = result["member"]["email_address"]
config.plugins.icetv.member.token.value = result["member"]["token"]
config.plugins.icetv.member.id.value = result["member"]["id"]
config.plugins.icetv.member.region_id.value = result["member"]["region_id"]
config.plugins.icetv.device.id.value = result["device"]["id"]
config.plugins.icetv.device.label.value = result["device"]["label"]
config.plugins.icetv.device.type_id.value = result["device"]["type_id"]
config.plugins.icetv.save()
saveConfigFile()
return r
class Logout(AuthRequest):
def __init__(self):
super(Logout, self).__init__("/logout")
def delete(self):
return self.send("delete")
def send(self, method):
r = super(Logout, self).send(method)
clearCredentials()
return r
class Devices(AuthRequest):
def __init__(self):
super(Devices, self).__init__("/devices")
def get(self):
return self.send("get")
def post(self):
return self.send("post")
class Device(AuthRequest):
def __init__(self, deviceid):
super(Device, self).__init__("/devices/" + str(int(deviceid)))
def get(self):
return self.send("get")
def put(self):
return self.send("put")
def delete(self):
return self.send("delete")
class DeviceTypes(AuthRequest):
def __init__(self):
super(DeviceTypes, self).__init__("/devices/types")
def get(self):
return self.send("get")
class DeviceType(AuthRequest):
def __init__(self, deviceid):
super(DeviceType, self).__init__("/devices/types/" + str(int(deviceid)))
def get(self):
return self.send("get")
class DeviceManufacturers(AuthRequest):
def __init__(self):
super(DeviceManufacturers, self).__init__("/devices/manufacturers")
def get(self):
return self.send("get")
class DeviceManufacturer(AuthRequest):
def __init__(self, deviceid):
super(DeviceManufacturer, self).__init__("/devices/manufacturers/" + str(int(deviceid)))
def get(self):
return self.send("get")
class Shows(AuthRequest):
def __init__(self):
super(Shows, self).__init__("/shows")
def get(self):
return self.send("get")
class Timers(AuthRequest):
def __init__(self):
super(Timers, self).__init__("/shows/timers")
def get(self):
return self.send("get")
def post(self):
return self.send("post")
def put(self):
return self.send("put")
class Timer(AuthRequest):
def __init__(self, timerid):
super(Timer, self).__init__("/shows/timers/" + str(timerid))
def get(self):
return self.send("get")
def put(self):
return self.send("put")
def delete(self):
return self.send("delete")<|fim▁end|>
|
return True
except sockerror as ex:
print "[IceTV] Can not connect to IceTV server:", str(ex)
return False
|
<|file_name|>server_exp2.py<|end_file_name|><|fim▁begin|>import os
import random
import time
import json
from locust import HttpLocust, TaskSet, task
from lib.baseTaskSet import baseTaskSet
# TODO - make these config-driven
from lib.openstack.keystone import get_auth_token
from lib.openstack.nova import list_servers
from lib.openstack.nova import list_servers_detail
from lib.openstack.nova import list_server_detail
from lib.openstack.nova import create_server
from lib.openstack.nova import delete_server
from lib.openstack.nova import reboot_server
from lib.openstack.nova import resize_server
from lib.openstack.nova import confirm_resize_server
from lib.openstack.nova import revert_resize_server
from lib.openstack.nova import list_limits
from lib.openstack.nova import nova_get_server_id
class UserBehavior(baseTaskSet):
def on_start(self):
super(UserBehavior, self).on_start()
self.server_count = 0
self.min_server_count = 7
self.max_server_count = 10
self.auth_token, self.tenant_id, self.service_catalog = get_auth_token(self)
@task(2)
def nova_create_server(self):
flavor_id = random.choice([42,84])
response = create_server(self,
flavor_id=flavor_id,
name="server-%s-%s" % (self.id, self.server_count))
server_id = json.loads(response.content)['server']['id']
self.server_count += 1
time.sleep(random.choice([1,1,3,3,3,5,5,5,5,5,5,10,10,10,10,25]))
self.nova_resize_server()
self.output("server id: %s" % server_id)
@task(5)
def nova_resize_server(self):
server_id = nova_get_server_id(self)
flavor_id = random.choice([42,84,
9999, 9999, 9999, 9999,
9998, 9998, 9998, 9998,
451, 451, 451])
self.output("Resize server | %s | %s " % (server_id, flavor_id))
if server_id:
resize_server(self, server_id, flavor_id)
time.sleep(random.choice([5,9,9,9,9,10,10,10,10,10,10,10,10,15,15,15,25,25,25,25]))
choices = [1,1,1,1,1,2,2]
#if random.choice(choices) %2 != 0:
if choices:
self.output("RESIZE YUSSSS!")
confirm_resize_server(self, server_id)
else:
revert_resize_server(self,server_id)
else:
pass
@task(1)
def nova_confirm_resize_server(self):
server_id = nova_get_server_id(self)
confirm_resize_server(self, server_id)
@task(1)
def nova_revert_resize_server(self):
server_id = nova_get_server_id(self)
revert_resize_server(self, server_id)
@task(2)
def nova_reboot_server(self):
server_id = nova_get_server_id(self)
reboot_server(self, server_id)
time.sleep(random.choice([1,1,1,1,3,3,3,5,10,25]))
#@task(1)
def nova_delete_server(self):
server_id = nova_get_server_id(self)
delete_server(self, server_id)
@task(3)
def nova_list_servers(self):
self.output("LIST_SERVERS")
response = list_servers(self)
@task(3)
def check_server_pool(self):
response = list_servers(self)
servers = json.loads(response.content)['servers']
if len(servers) < self.min_server_count:
self.nova_create_server()
elif len(servers) == self.max_server_count:
self.nova_delete_server()
@task(4)
def nova_list_servers_detail(self):
self.output("LIST_SERVERS_DETAIL")
list_servers_detail(self)
@task(4)
def nova_list_limits(self):
list_limits(self)
@task(3)
def keystone_auth_token(self):<|fim▁hole|>
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait=500
max_wait=5000<|fim▁end|>
|
self.auth_token, self.tenant_id, self.service_catalog = get_auth_token(self)
|
<|file_name|>PROC_A_SUBJECT_D002015.py<|end_file_name|><|fim▁begin|>#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_A_SUBJECT_D002015').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
ACRM_F_CI_ASSET_BUSI_PROTO = sqlContext.read.parquet(hdfs+'/ACRM_F_CI_ASSET_BUSI_PROTO/*')
ACRM_F_CI_ASSET_BUSI_PROTO.registerTempTable("ACRM_F_CI_ASSET_BUSI_PROTO")
#任务[21] 001-01::
V_STEP = V_STEP + 1
sql = """
SELECT CAST(A.CUST_ID AS VARCHAR(32)) AS CUST_ID
,CAST('' AS VARCHAR(20)) AS ORG_ID --插入的空值,包顺龙2017/05/13
,CAST('D002015' AS VARCHAR(20)) AS INDEX_CODE
,CAST(SUM(TAKE_CGT_LINE) AS DECIMAL(22,2)) AS INDEX_VALUE
,CAST(SUBSTR(V_DT, 1, 7) AS VARCHAR(7)) AS YEAR_MONTH
,CAST(V_DT AS DATE) AS ETL_DATE
,CAST(A.CUST_TYP AS VARCHAR(5)) AS CUST_TYPE
,CAST(A.FR_ID AS VARCHAR(5)) AS FR_ID
FROM ACRM_F_CI_ASSET_BUSI_PROTO A
WHERE A.BAL > 0
AND A.LN_APCL_FLG = 'N'
AND(A.PRODUCT_ID LIKE '1010%'
OR A.PRODUCT_ID LIKE '1030%'
OR A.PRODUCT_ID LIKE '1040%'
OR A.PRODUCT_ID LIKE '1050%'
OR A.PRODUCT_ID LIKE '1060%'
OR A.PRODUCT_ID LIKE '1070%'
OR A.PRODUCT_ID LIKE '2010%'
OR A.PRODUCT_ID LIKE '2020%'
OR A.PRODUCT_ID LIKE '2030%'
OR A.PRODUCT_ID LIKE '2040%'
OR A.PRODUCT_ID LIKE '2050%')
GROUP BY A.CUST_ID
,A.CUST_TYP
,A.FR_ID """
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
ACRM_A_TARGET_D002015 = sqlContext.sql(sql)
ACRM_A_TARGET_D002015.registerTempTable("ACRM_A_TARGET_D002015")
dfn="ACRM_A_TARGET_D002015/"+V_DT+".parquet"<|fim▁hole|>ACRM_F_CI_ASSET_BUSI_PROTO.unpersist()
ret = os.system("hdfs dfs -rm -r /"+dbname+"/ACRM_A_TARGET_D002015/"+V_DT_LD+".parquet")
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert ACRM_A_TARGET_D002015 lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrows)<|fim▁end|>
|
ACRM_A_TARGET_D002015.cache()
nrows = ACRM_A_TARGET_D002015.count()
ACRM_A_TARGET_D002015.write.save(path=hdfs + '/' + dfn, mode='overwrite')
ACRM_A_TARGET_D002015.unpersist()
|
<|file_name|>test_timeseries.py<|end_file_name|><|fim▁begin|>import os
from pvfactors.geometry.timeseries import TsPointCoords, TsLineCoords
from pvfactors.geometry.pvrow import TsPVRow
from pvfactors.geometry.pvground import TsGround, TsGroundElement
import pandas as pd
import numpy as np
from pvfactors.geometry.pvrow import PVRow
from pvfactors.geometry.base import \
BaseSide, PVSegment, PVSurface, ShadeCollection
from pvfactors.config import MIN_X_GROUND, MAX_X_GROUND
def test_ts_pvrow():
"""Test timeseries pv row creation and shading cases.
Note that shading must always be zero when pv rows are flat"""
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back)
# check segment index
assert len(ts_pvrow.front.list_segments) == 3
assert [s.index for s in ts_pvrow.front.list_segments] == [0, 1, 2]
# Check timeseries length of front and back segments
for seg in ts_pvrow.front.list_segments:
np.testing.assert_allclose(width / cut['front'], seg.length)
for seg in ts_pvrow.back.list_segments:
np.testing.assert_allclose(width / cut['back'], seg.length)
# Check shaded length on either sides of pv rows
expected_front_shading = np.where(df_inputs.rotation_vec,
df_inputs.shaded_length_front, 0.)
expected_back_shading = np.where(df_inputs.rotation_vec,
df_inputs.shaded_length_back, 0.)
np.testing.assert_allclose(expected_front_shading,
ts_pvrow.front.shaded_length)
np.testing.assert_allclose(expected_back_shading,
ts_pvrow.back.shaded_length)
def test_plot_ts_pvrow():
is_ci = os.environ.get('CI', False)
if not is_ci:
import matplotlib.pyplot as plt
# Create a PV row
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back)
# Plot it at ts 0
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(0, ax)
plt.show()
# Plot it at ts 1
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(1, ax)
plt.show()
# Plot it at ts 2: flat case
f, ax = plt.subplots()
ts_pvrow.plot_at_idx(2, ax)
plt.show()
def test_ts_pvrow_to_geometry():
"""Check that the geometries are created correctly"""
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -30., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
param_names = ['test1', 'test2']
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back, param_names=param_names)
pvrow = ts_pvrow.at(0)
# Check classes of geometries
assert isinstance(pvrow, PVRow)
assert isinstance(pvrow.front, BaseSide)
assert isinstance(pvrow.back, BaseSide)
assert isinstance(pvrow.front.list_segments[0], PVSegment)
assert isinstance(pvrow.back.list_segments[0].illum_collection,
ShadeCollection)
assert isinstance(pvrow.front.list_segments[1].illum_collection
.list_surfaces[0], PVSurface)
# Check some values
np.testing.assert_allclose(pvrow.front.shaded_length, 1.3)
front_surface = (pvrow.front.list_segments[1].illum_collection
.list_surfaces[0])
back_surface = (pvrow.back.list_segments[1].illum_collection
.list_surfaces[0])
n_vector_front = front_surface.n_vector
n_vector_back = back_surface.n_vector
expected_n_vec_front = np.array([-0.68404029, 1.87938524])
np.testing.assert_allclose(n_vector_front, expected_n_vec_front)
np.testing.assert_allclose(n_vector_back, - expected_n_vec_front)
assert front_surface.param_names == param_names
assert back_surface.param_names == param_names
def test_ts_ground_from_ts_pvrow():
"""Check that ground geometries are created correctly from ts pvrow"""
# Create a ts pv row
xy_center = (0, 2)
width = 2.
df_inputs = pd.DataFrame({
'rotation_vec': [20., -90., 0.],
'shaded_length_front': [1.3, 0., 1.9],
'shaded_length_back': [0, 0.3, 0.6]})
cut = {'front': 3, 'back': 4}
param_names = ['test1', 'test2']
ts_pvrow = TsPVRow.from_raw_inputs(
xy_center, width, df_inputs.rotation_vec,
cut, df_inputs.shaded_length_front,
df_inputs.shaded_length_back, param_names=param_names)
# Create ground from it
alpha_vec = np.deg2rad([80., 90., 70.])
ts_ground = TsGround.from_ts_pvrows_and_angles(
[ts_pvrow], alpha_vec, df_inputs.rotation_vec, param_names=param_names)
assert len(ts_ground.shadow_elements) == 1
# Check at specific times
ground_0 = ts_ground.at(0)
assert ground_0.n_surfaces == 4
assert ground_0.list_segments[0].shaded_collection.n_surfaces == 1
ground_1 = ts_ground.at(1) # vertical, sun above
assert ground_1.n_surfaces == 2 # only 2 illuminated surfaces
assert ground_1.list_segments[0].shaded_collection.n_surfaces == 0
assert ground_1.shaded_length == 0 # no shadow (since shadow length 0ish)
np.testing.assert_allclose(ground_0.shaded_length, 1.7587704831436)
np.testing.assert_allclose(ts_ground.at(2).shaded_length, width) # flat
# Check that all have surface params
for surf in ground_0.all_surfaces:
assert surf.param_names == param_names
def test_ts_ground_overlap():
shadow_coords = np.array([
[[[0, 0], [0, 0]], [[2, 1], [0, 0]]],
[[[1, 2], [0, 0]], [[5, 5], [0, 0]]]
])
overlap = [True, False]
# Test without overlap
ts_ground = TsGround.from_ordered_shadows_coords(shadow_coords)
np.testing.assert_allclose(ts_ground.shadow_elements[0].b2.x, [2, 1])
# Test with overlap
ts_ground = TsGround.from_ordered_shadows_coords(shadow_coords,
flag_overlap=overlap)
np.testing.assert_allclose(ts_ground.shadow_elements[0].b2.x, [1, 1])
def test_ts_ground_to_geometry():
# There should be an overlap
shadow_coords = np.array([
[[[0, 0], [0, 0]], [[2, 1], [0, 0]]],
[[[1, 2], [0, 0]], [[5, 5], [0, 0]]]
])
overlap = [True, False]
cut_point_coords = [TsPointCoords.from_array(np.array([[2, 2], [0, 0]]))]
# Test with overlap
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap, cut_point_coords=cut_point_coords)
# Run some checks for index 0
pvground = ts_ground.at(0, merge_if_flag_overlap=False,
with_cut_points=False)
assert pvground.n_surfaces == 4
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
# Run some checks for index 1
pvground = ts_ground.at(1, with_cut_points=False)
assert pvground.n_surfaces == 5
assert pvground.list_segments[0].illum_collection.n_surfaces == 3
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 4
np.testing.assert_allclose(pvground.shaded_length, 4)
# Run some checks for index 0, when merging
pvground = ts_ground.at(0, merge_if_flag_overlap=True,
with_cut_points=False)
assert pvground.n_surfaces == 3
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 1
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
# Run some checks for index 0, when merging and with cut points
pvground = ts_ground.at(0, merge_if_flag_overlap=True,
with_cut_points=True)
assert pvground.n_surfaces == 4
assert pvground.list_segments[0].illum_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.n_surfaces == 2
assert pvground.list_segments[0].shaded_collection.length == 5
np.testing.assert_allclose(pvground.shaded_length, 5)
def test_shadows_coords_left_right_of_cut_point():
"""Test that coords left and right of cut point are created correctly"""
# Ground inputs
shadow_coords = np.array([<|fim▁hole|> [[[3], [0]], [[5], [0]]]
], dtype=float)
overlap = [False]
# --- Create timeseries ground
cut_point = TsPointCoords([2.5], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get left and right shadows
shadows_left = ts_ground.shadow_coords_left_of_cut_point(0)
shadows_right = ts_ground.shadow_coords_right_of_cut_point(0)
# Reformat for testing
shadows_left = [shadow.as_array for shadow in shadows_left]
shadows_right = [shadow.as_array for shadow in shadows_right]
expected_shadows_left = [shadow_coords[0],
[cut_point.as_array, cut_point.as_array]]
expected_shadows_right = [[cut_point.as_array, cut_point.as_array],
shadow_coords[1]]
# Test that correct
np.testing.assert_allclose(shadows_left, expected_shadows_left)
np.testing.assert_allclose(shadows_right, expected_shadows_right)
# --- Case where pv rows are flat, cut point are inf
cut_point = TsPointCoords([np.inf], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get right shadows
shadows_right = ts_ground.shadow_coords_right_of_cut_point(0)
# Test that correct
maxi = MAX_X_GROUND
expected_shadows_right = np.array([[[[maxi], [0.]], [[maxi], [0.]]],
[[[maxi], [0.]], [[maxi], [0.]]]])
shadows_right = [shadow.as_array for shadow in shadows_right]
np.testing.assert_allclose(shadows_right, expected_shadows_right)
# --- Case where pv rows are flat, cut point are - inf
cut_point = TsPointCoords([- np.inf], [0])
ts_ground = TsGround.from_ordered_shadows_coords(
shadow_coords, flag_overlap=overlap,
cut_point_coords=[cut_point])
# Get left shadows
shadows_left = ts_ground.shadow_coords_left_of_cut_point(0)
# Test that correct
mini = MIN_X_GROUND
expected_shadows_left = np.array([[[[mini], [0.]], [[mini], [0.]]],
[[[mini], [0.]], [[mini], [0.]]]])
shadows_left = [shadow.as_array for shadow in shadows_left]
np.testing.assert_allclose(shadows_left, expected_shadows_left)
def test_ts_ground_elements_surfaces():
"""Check timeseries ground elements are created correctly"""
# Create timeseries coords
gnd_element_coords = TsLineCoords.from_array(
np.array([[[-1, -1], [0, 0]], [[1, 1], [0, 0]]]))
pt_coords_1 = TsPointCoords.from_array(np.array([[-0.5, -1], [0, 0]]))
pt_coords_2 = TsPointCoords.from_array(np.array([[0.5, 0], [0, 0]]))
# Create gnd element
gnd_element = TsGroundElement(
gnd_element_coords,
list_ordered_cut_pts_coords=[pt_coords_1, pt_coords_2])
# Check that structures contain the correct number of ts surfaces
assert len(gnd_element.surface_list) == 3
assert len(gnd_element.surface_dict[0]['left']) == 1
assert len(gnd_element.surface_dict[1]['left']) == 2
assert len(gnd_element.surface_dict[0]['right']) == 2
assert len(gnd_element.surface_dict[1]['right']) == 1
# Check that the objects are the same
assert (gnd_element.surface_list[0]
== gnd_element.surface_dict[0]['left'][0])
assert (gnd_element.surface_list[0]
== gnd_element.surface_dict[1]['left'][0])
assert (gnd_element.surface_list[1]
== gnd_element.surface_dict[0]['right'][0])
assert (gnd_element.surface_list[1]
== gnd_element.surface_dict[1]['left'][1])
assert (gnd_element.surface_list[2]
== gnd_element.surface_dict[0]['right'][1])
assert (gnd_element.surface_list[2]
== gnd_element.surface_dict[1]['right'][0])
# Now check surfaces lengths
np.testing.assert_allclose(gnd_element.surface_list[0].length, [0.5, 0])
np.testing.assert_allclose(gnd_element.surface_list[1].length, [1, 1])
np.testing.assert_allclose(gnd_element.surface_list[2].length, [0.5, 1])
# Check coords of surfaces
np.testing.assert_allclose(gnd_element.surface_list[0].b1.x, [-1, -1])
np.testing.assert_allclose(gnd_element.surface_list[0].b2.x, [-0.5, -1])<|fim▁end|>
|
[[[0], [0]], [[2], [0]]],
|
<|file_name|>response_container_paged_recent_traces_search.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six<|fim▁hole|>
from wavefront_api_client.configuration import Configuration
class ResponseContainerPagedRecentTracesSearch(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'response': 'PagedRecentTracesSearch',
'status': 'ResponseStatus'
}
attribute_map = {
'response': 'response',
'status': 'status'
}
def __init__(self, response=None, status=None, _configuration=None): # noqa: E501
"""ResponseContainerPagedRecentTracesSearch - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._response = None
self._status = None
self.discriminator = None
if response is not None:
self.response = response
self.status = status
@property
def response(self):
"""Gets the response of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:return: The response of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:rtype: PagedRecentTracesSearch
"""
return self._response
@response.setter
def response(self, response):
"""Sets the response of this ResponseContainerPagedRecentTracesSearch.
:param response: The response of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:type: PagedRecentTracesSearch
"""
self._response = response
@property
def status(self):
"""Gets the status of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:return: The status of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:rtype: ResponseStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ResponseContainerPagedRecentTracesSearch.
:param status: The status of this ResponseContainerPagedRecentTracesSearch. # noqa: E501
:type: ResponseStatus
"""
if self._configuration.client_side_validation and status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResponseContainerPagedRecentTracesSearch, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResponseContainerPagedRecentTracesSearch):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ResponseContainerPagedRecentTracesSearch):
return True
return self.to_dict() != other.to_dict()<|fim▁end|>
| |
<|file_name|>test_primitive_base.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import numpy as np
import pandas as pd
from pytest import raises
from featuretools.primitives import Haversine, IsIn, IsNull, Max, TimeSinceLast
from featuretools.primitives.base import TransformPrimitive
def test_call_agg():
primitive = Max()
# the assert is run twice on purpose
for _ in range(2):
assert 5 == primitive(range(6))
def test_call_trans():
primitive = IsNull()
for _ in range(2):
assert pd.Series([False] * 6).equals(primitive(range(6)))
def test_uses_calc_time():
primitive = TimeSinceLast()
primitive_h = TimeSinceLast(unit="hours")
datetimes = pd.Series([datetime(2015, 6, 6), datetime(2015, 6, 7)])
answer = 86400.0
answer_h = 24.0
assert answer == primitive(datetimes, time=datetime(2015, 6, 8))
assert answer_h == primitive_h(datetimes, time=datetime(2015, 6, 8))
def test_call_multiple_args():
primitive = Haversine()
data1 = [(42.4, -71.1), (40.0, -122.4)]
data2 = [(40.0, -122.4), (41.2, -96.75)]
answer = [2631.231, 1343.289]
for _ in range(2):
assert np.round(primitive(data1, data2), 3).tolist() == answer
def test_get_function_called_once():
class TestPrimitive(TransformPrimitive):
def __init__(self):
self.get_function_call_count = 0
def get_function(self):
self.get_function_call_count += 1
def test(x):
return x
return test
primitive = TestPrimitive()
for _ in range(2):
primitive(range(6))<|fim▁hole|> assert primitive.get_function_call_count == 1
def test_multiple_arg_string():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=True, int=4, float=.1)
string = primitive.get_args_string()
assert string == ', int=4, float=0.1'
def test_single_args_string():
assert IsIn([1, 2, 3]).get_args_string() == ', list_of_outputs=[1, 2, 3]'
def test_args_string_default():
assert IsIn().get_args_string() == ''
def test_args_string_mixed():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
self.bool = bool
self.int = int
self.float = float
primitive = Primitive(bool=False, int=0)
string = primitive.get_args_string()
assert string == ', bool=False'
def test_args_string_undefined():
string = Max().get_args_string()
assert string == ''
def test_args_string_error():
class Primitive(TransformPrimitive):
def __init__(self, bool=True, int=0, float=None):
pass
with raises(AssertionError, match='must be attribute'):
Primitive(bool=True, int=4, float=.1).get_args_string()<|fim▁end|>
| |
<|file_name|>issue-33174-restricted-type-in-public-interface.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)] // genus is always capitalized
pub(crate) struct Snail;
//~^ NOTE `Snail` declared as crate-visible
mod sea {
pub(super) struct Turtle;
//~^ NOTE `sea::Turtle` declared as restricted
}
struct Tortoise;
//~^ NOTE `Tortoise` declared as private<|fim▁hole|>
pub type Helix_pomatia = Shell<Snail>;
//~^ ERROR crate-visible type `Snail` in public interface
//~| NOTE can't leak crate-visible type
pub type Dermochelys_coriacea = Shell<sea::Turtle>;
//~^ ERROR restricted type `sea::Turtle` in public interface
//~| NOTE can't leak restricted type
pub type Testudo_graeca = Shell<Tortoise>;
//~^ ERROR private type `Tortoise` in public interface
//~| NOTE can't leak private type
fn main() {}<|fim▁end|>
|
pub struct Shell<T> {
pub(crate) creature: T,
}
|
<|file_name|>ja.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/<|fim▁hole|> btnUpload: 'サーバーに送信',
button2Img: '選択したボタンを画像に置き換えますか?',
hSpace: '横間隔',
img2Button: '選択した画像をボタンに置き換えますか?',
infoTab: 'イメージ 情報',
linkTab: 'リンク',
lockRatio: 'ロック比率',
menu: 'イメージ プロパティ',
resetSize: 'サイズリセット',
title: 'イメージ プロパティ',
titleButton: '画像ボタン プロパティ',
upload: 'アップロード',
urlMissing: 'イメージのURLを入力してください。',
vSpace: '縦間隔',
validateBorder: 'ボーダーは数値で入力してください。',
validateHSpace: '横間隔は数値で入力してください。',
validateVSpace: '縦間隔は数値で入力してください。'
});<|fim▁end|>
|
CKEDITOR.plugins.setLang( 'image', 'ja', {
alertUrl: 'イメージのURLを入力してください。',
alt: '代替テキスト',
border: 'ボーダー',
|
<|file_name|>class5.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class A(object):
pass
class A(x.y()):
pass
class A(B, C):
pass<|fim▁end|>
|
class A(B):
pass
|
<|file_name|>version.go<|end_file_name|><|fim▁begin|>package features
import "github.com/Azure/azure-sdk-for-go/version"
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// UserAgent returns the UserAgent string to use when sending http.Requests.
func UserAgent() string {
return "Azure-SDK-For-Go/" + Version() + " features/2021-07-01"<|fim▁hole|>// Version returns the semantic version (see http://semver.org) of the client.
func Version() string {
return version.Number
}<|fim▁end|>
|
}
|
<|file_name|>test_spark.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import json
import os
from string import Template
from uuid import uuid4
import pytest
from moztelemetry.store import InMemoryStore
from moztelemetry.dataset import Dataset
from moztelemetry.spark import get_pings
@pytest.fixture()
def test_store(monkeypatch):
data_dir = os.path.join(os.path.dirname(__file__), 'data')
with open(os.path.join(data_dir, 'schema.json')) as s:
schema = json.loads(s.read())
dimensions = [f['field_name'] for f in schema['dimensions']]
dataset = Dataset('test-bucket', dimensions, InMemoryStore('test-bucket'))
@staticmethod
def from_source(source_name):
return dataset
monkeypatch.setattr(Dataset, 'from_source', from_source)
return dataset.store
def upload_ping(store, value, **kwargs):
"""Upload value to a given store"""
ping_key_template = Template('$submission_date/$source_name/'
'$source_version/$doc_type/$app/$channel/'
'$version/$build_id/$filename')
dimensions = {
'submission_date': '20160805',
'source_name': 'telemetry',
'source_version': '4',
'doc_type': 'saved_session',
'app': 'Firefox',
'channel': 'nightly',
'version': '51.0a1',<|fim▁hole|> key = ping_key_template.substitute(**dimensions)
store.store[key] = value
@pytest.fixture
def mock_message_parser(monkeypatch):
# monkeypatch the default `decoder` argument of `records`
monkeypatch.setattr('moztelemetry.heka_message_parser.parse_heka_message',
lambda message: (message.getvalue(),))
test_data_for_exact_match = [
('doc_type', 'saved_session', 'main'),
('app', 'Firefox', 'Thunderbird'),
('version', '48.0', '46.0'),
('source_name', 'telemetry', 'other source'),
('source_version', '4', '2'),
]
@pytest.mark.slow
@pytest.mark.parametrize('filter_name,exact,wrong', test_data_for_exact_match)
def test_get_pings_by_exact_match(test_store, mock_message_parser, spark_context,
filter_name, exact, wrong):
upload_ping(test_store, 'value1', **{filter_name: exact})
upload_ping(test_store, 'value2', **{filter_name: wrong})
pings = get_pings(spark_context, **{filter_name: exact})
assert pings.collect() == ['value1']
test_data_for_range_match = [
('submission_date', '20160110', '20150101', '20160101', '20160120'),
('build_id', '20160801074050', '20160801074055', '20160801074049', '20160801074052'),
]
@pytest.mark.slow
@pytest.mark.parametrize('filter_name,exact,wrong,start,end', test_data_for_range_match)
def test_get_pings_by_range(test_store, mock_message_parser, spark_context,
filter_name, exact, wrong, start, end):
upload_ping(test_store, 'value1', **{filter_name: exact})
upload_ping(test_store, 'value2', **{filter_name: wrong})
pings = get_pings(spark_context, **{filter_name: exact})
assert pings.collect() == ['value1']
pings = get_pings(spark_context, **{filter_name: (start, end)})
assert pings.collect() == ['value1']
@pytest.mark.slow
def test_get_pings_multiple_by_range(test_store, mock_message_parser, spark_context):
upload_ping(test_store, 'value1', **{f[0]: f[1] for f in test_data_for_range_match})
upload_ping(test_store, 'value2', **{f[0]: f[2] for f in test_data_for_range_match})
pings = get_pings(spark_context, **{f[0]: f[1] for f in test_data_for_range_match})
assert pings.collect() == ['value1']
pings = get_pings(spark_context, **{f[0]: (f[3], f[4]) for f in test_data_for_range_match})
assert pings.collect() == ['value1']
def test_get_pings_fraction(test_store, mock_message_parser, spark_context):
for i in range(1, 10+1):
upload_ping(test_store, 'value', build_id=str(i))
pings = get_pings(spark_context)
assert pings.count() == 10
pings = get_pings(spark_context, fraction=0.1)
assert pings.count() == 1
def test_get_pings_wrong_schema(test_store, mock_message_parser, spark_context):
with pytest.raises(ValueError):
pings = get_pings(spark_context, schema=1)
def test_get_pings_multiple_filters(test_store, mock_message_parser, spark_context):
filters = dict(submission_date='20160101', channel='beta')
upload_ping(test_store, 'value1', **filters)
filters['app'] = 'Thunderbird'
upload_ping(test_store, 'value2', **filters)
pings = get_pings(spark_context, **filters)
assert pings.collect() == ['value2']
def test_get_pings_none_filter(test_store, mock_message_parser, spark_context):
upload_ping(test_store, 'value1', app='Firefox')
upload_ping(test_store, 'value2', app='Thuderbird')
pings = get_pings(spark_context, app=None)
assert sorted(pings.collect()) == ['value1', 'value2']
pings = get_pings(spark_context, app='*')
assert sorted(pings.collect()) == ['value1', 'value2']<|fim▁end|>
|
'build_id': '20160801074053',
'filename': uuid4()
}
dimensions.update(kwargs)
|
<|file_name|>pand.py<|end_file_name|><|fim▁begin|>#http://pandas.pydata.org/pandas-docs/stable/tutorials.html
#file='pand.py'
#exec(compile(open(file).read(), file, 'exec'))
from pandas import DataFrame, read_csv
import matplotlib.pyplot as plt
import pandas as pd
#import sys
#import matplotlib
names = ['Bob','Jessica','Mary','John','Mel']
births = [968, 155, 77, 578, 973]
BabyDataSet = list(zip(names,births)) # zip pairs entries together and list combines the entries to a list
print(BabyDataSet)
#The DataFrame attribute of pandas reorganizes the list into a tabular panda object
#similar to an sql table or an excel spreadsheet.
df = pd.DataFrame(data = BabyDataSet, columns=['Names', 'Births'])
print(df)
wait = input("PRESS ENTER TO CONTINUE.")
#We can now save the content as a standard tabular data format (csv)<|fim▁hole|>#We can also read back from the same file into a panda object
df = pd.read_csv(r'births1880.csv')
print(df)
print('Wrong header. read_cvs treated the first record as the header')
print('set the header to null')
wait = input("PRESS ENTER TO CONTINUE.")
df = pd.read_csv(r'births1880.csv',header=None)
print(df)
print('Now we have the right data but no header')
print('Label the headers')
wait = input("PRESS ENTER TO CONTINUE.")
df = pd.read_csv(r'births1880.csv', names=['Names','Births'])
print(df)
print('This looks like the table we need')
print('Numbers of 0,1,2,3,4 are row numbers similar to an Excel spreadsheet')
wait = input("PRESS ENTER TO CONTINUE.")
print('Lets do something with this tabulated data')
print('Sort the dataframe and select the top row')
Sorted1=df.sort_values(['Births'], ascending=False)
#Sorted2=df.sort_values(by='Births', ascending=False)
#Sorted.head(1)
print(Sorted1)
wait = input("PRESS ENTER TO CONTINUE.")
print('Use the max() attribute to find the maximum value')
MaxValue=df['Births'].max()
print('MaxValue is ',MaxValue)
wait = input("PRESS ENTER TO CONTINUE.")
print('Convert a column to an array')
print(df['Names'].values)
print('Reference the second entry')
print(df['Names'][1:2].values)
print('Apply a booleen mask on the Births column when compared to the MaxValue')
mask = df['Births']==MaxValue
print(mask)
print('Find the name associated with the maximum value')
MaxName = df['Names'][mask].values
print('Name at Max Value is ',MaxName)
wait = input("PRESS ENTER TO CONTINUE.")
#Create a graph object
print('Create a graph object')
df['Births'].plot()
# Text to display on graph
print('Construct a string to display on the graph')
Text = str(MaxValue) + " - " + MaxName
print(Text)
# Add text to graph
print('Annonate the graph')
plt.annotate(Text, xy=(1, MaxValue), xytext=(8, 0),
xycoords=('axes fraction', 'data'), textcoords='offset points')
print('Show the graph')
plt.show()
#Uncomment the following to save it as a png file
#plt.savefig('mygraph.png')<|fim▁end|>
|
df.to_csv('births1880.csv',index=False,header=False)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The main parser interface
use ast;
use codemap::{Span, CodeMap, FileMap};
use diagnostic::{SpanHandler, mk_span_handler, default_handler};
use parse::attr::ParserAttr;
use parse::parser::Parser;
use std::cell::RefCell;
use std::io::File;
use std::rc::Rc;
use std::str;
pub mod lexer;
pub mod parser;
pub mod token;
pub mod comments;
pub mod attr;
pub mod common;
pub mod classify;
pub mod obsolete;
// info about a parsing session.
pub struct ParseSess {
pub span_diagnostic: SpanHandler, // better be the same as the one in the reader!
/// Used to determine and report recursive mod inclusions
included_mod_stack: RefCell<Vec<Path>>,
}
pub fn new_parse_sess() -> ParseSess {
ParseSess {
span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()),
included_mod_stack: RefCell::new(Vec::new()),
}
}
pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess {
ParseSess {
span_diagnostic: sh,
included_mod_stack: RefCell::new(Vec::new()),
}
}
// a bunch of utility functions of the form parse_<thing>_from_<source>
// where <thing> includes crate, expr, item, stmt, tts, and one that
// uses a HOF to parse anything, and <source> includes file and
// source_str.
pub fn parse_crate_from_file(
input: &Path,
cfg: ast::CrateConfig,
sess: &ParseSess
) -> ast::Crate {
new_parser_from_file(sess, cfg, input).parse_crate_mod()
// why is there no p.abort_if_errors here?
}
pub fn parse_crate_attrs_from_file(
input: &Path,
cfg: ast::CrateConfig,
sess: &ParseSess
) -> Vec<ast::Attribute> {
let mut parser = new_parser_from_file(sess, cfg, input);
let (inner, _) = parser.parse_inner_attrs_and_next();
inner
}
pub fn parse_crate_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> ast::Crate {
let mut p = new_parser_from_source_str(sess,
cfg,
name,
source);
maybe_aborted(p.parse_crate_mod(),p)
}
pub fn parse_crate_attrs_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Vec<ast::Attribute> {
let mut p = new_parser_from_source_str(sess,
cfg,
name,
source);
let (inner, _) = maybe_aborted(p.parse_inner_attrs_and_next(),p);
inner
}
pub fn parse_expr_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> @ast::Expr {
let mut p = new_parser_from_source_str(sess, cfg, name, source);
maybe_aborted(p.parse_expr(), p)
}
pub fn parse_item_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Option<@ast::Item> {
let mut p = new_parser_from_source_str(sess, cfg, name, source);
let attrs = p.parse_outer_attributes();
maybe_aborted(p.parse_item(attrs),p)
}
pub fn parse_meta_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> @ast::MetaItem {
let mut p = new_parser_from_source_str(sess, cfg, name, source);
maybe_aborted(p.parse_meta_item(),p)
}
pub fn parse_stmt_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
attrs: Vec<ast::Attribute> ,
sess: &ParseSess)
-> @ast::Stmt {
let mut p = new_parser_from_source_str(
sess,
cfg,
name,
source
);
maybe_aborted(p.parse_stmt(attrs),p)
}
pub fn parse_tts_from_source_str(name: StrBuf,
source: StrBuf,
cfg: ast::CrateConfig,
sess: &ParseSess)
-> Vec<ast::TokenTree> {
let mut p = new_parser_from_source_str(
sess,
cfg,
name,
source
);
p.quote_depth += 1u;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
}
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
name: StrBuf,
source: StrBuf)
-> Parser<'a> {
filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
}
/// Create a new parser, handling errors as appropriate
/// if the file doesn't exist
pub fn new_parser_from_file<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
path: &Path) -> Parser<'a> {
filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
}
/// Given a session, a crate config, a path, and a span, add
/// the file at the given path to the codemap, and return a parser.
/// On an error, use the given span as the source of the problem.
pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
path: &Path,
sp: Span) -> Parser<'a> {
filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
}
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
filemap: Rc<FileMap>,
cfg: ast::CrateConfig) -> Parser<'a> {
tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
}
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
tts: Vec<ast::TokenTree>) -> Parser<'a> {
tts_to_parser(sess, tts, cfg)
}
// base abstractions
/// Given a session and a path and an optional span (for error reporting),
/// add the path to the session's codemap and return the new filemap.
pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Rc<FileMap> {
let err = |msg: &str| {
match spanopt {
Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
None => sess.span_diagnostic.handler().fatal(msg),
}
};
let bytes = match File::open(path).read_to_end() {
Ok(bytes) => bytes,
Err(e) => {
err(format!("couldn't read {}: {}", path.display(), e));
unreachable!()
}
};
match str::from_utf8(bytes.as_slice()) {
Some(s) => {
return string_to_filemap(sess, s.to_strbuf(),
path.as_str().unwrap().to_strbuf())
}
None => err(format!("{} is not UTF-8 encoded", path.display())),
}
unreachable!()
}
// given a session and a string, add the string to
// the session's codemap and return the new filemap
pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
-> Rc<FileMap> {
sess.span_diagnostic.cm.new_filemap(path, source)
}
// given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
-> Vec<ast::TokenTree> {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = Vec::new();
let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser(sess, cfg, box srdr);
p1.parse_all_token_trees()
}
// given tts and cfg, produce a parser
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
tts: Vec<ast::TokenTree>,
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
Parser(sess, cfg, box trdr)
}
// abort if necessary
pub fn maybe_aborted<T>(result: T, mut p: Parser) -> T {
p.abort_if_errors();
result
}
#[cfg(test)]
mod test {
use super::*;
use serialize::{json, Encodable};
use std::io;
use std::io::MemWriter;
use std::str;
use codemap::{Span, BytePos, Spanned};
use owned_slice::OwnedSlice;
use ast;
use abi;
use parse::parser::Parser;
use parse::token::{str_to_ident};
use util::parser_testing::{string_to_tts, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item};
use util::parser_testing::string_to_stmt;
fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf {
let mut writer = MemWriter::new();
let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer);
let _ = val.encode(&mut encoder);
str::from_utf8(writer.unwrap().as_slice()).unwrap().to_strbuf()
}
// produce a codemap::span
fn sp(a: u32, b: u32) -> Span {
Span{lo:BytePos(a),hi:BytePos(b),expn_info:None}
}
#[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_strbuf()) ==
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
span: sp(0, 1),
global: false,
segments: vec!(
ast::PathSegment {
identifier: str_to_ident("a"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
}),
span: sp(0, 1)
})
}
#[test] fn path_exprs_2 () {
assert!(string_to_expr("::a::b".to_strbuf()) ==
@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
span: sp(0, 6),
global: true,
segments: vec!(
ast::PathSegment {
identifier: str_to_ident("a"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
},
ast::PathSegment {
identifier: str_to_ident("b"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
)
}),
span: sp(0, 6)
})
}
#[should_fail]
#[test] fn bad_path_expr_1() {
string_to_expr("::abc::def::return".to_strbuf());
}
// check the token-tree-ization of macros
#[test] fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_strbuf());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
[ast::TTTok(_,_),
ast::TTTok(_,token::NOT),
ast::TTTok(_,_),
ast::TTDelim(ref delim_elts)] => {
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
match delim_elts {
[ast::TTTok(_,token::LPAREN),
ast::TTDelim(ref first_set),
ast::TTTok(_,token::FAT_ARROW),
ast::TTDelim(ref second_set),
ast::TTTok(_,token::RPAREN)] => {
let first_set: &[ast::TokenTree] =
first_set.as_slice();
match first_set {
[ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR),
ast::TTTok(_,_),
ast::TTTok(_,token::RPAREN)] => {
let second_set: &[ast::TokenTree] =
second_set.as_slice();
match second_set {
[ast::TTTok(_,token::LPAREN),
ast::TTTok(_,token::DOLLAR),
ast::TTTok(_,_),
ast::TTTok(_,token::RPAREN)] => {
assert_eq!("correct","correct")
}
_ => assert_eq!("wrong 4","correct")
}
},
_ => {
error!("failing value 3: {:?}",first_set);
assert_eq!("wrong 3","correct")
}
}
},
_ => {
error!("failing value 2: {:?}",delim_elts);
assert_eq!("wrong","correct");
}
}
},
_ => {
error!("failing value: {:?}",tts);
assert_eq!("wrong 1","correct");
}
}
}
#[test] fn string_to_tts_1 () {
let tts = string_to_tts("fn a (b : int) { b; }".to_strbuf());
assert_eq!(to_json_str(&tts),
"[\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"fn\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"a\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTDelim\",\
\"fields\":[\
[\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"LPAREN\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"COLON\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"int\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"RPAREN\"\
]\
}\
]\
]\
},\
{\
\"variant\":\"TTDelim\",\
\"fields\":[\
[\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"LBRACE\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
{\
\"variant\":\"IDENT\",\
\"fields\":[\
\"b\",\
false\
]\
}\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"SEMI\"\
]\
},\
{\
\"variant\":\"TTTok\",\
\"fields\":[\
null,\
\"RBRACE\"\
]\
}\
]\
]\
}\
]".to_strbuf()
);
}
#[test] fn ret_expr() {
assert!(string_to_expr("return d".to_strbuf()) ==
@ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprRet(Some(@ast::Expr{
id: ast::DUMMY_NODE_ID,
node:ast::ExprPath(ast::Path{
span: sp(7, 8),
global: false,
segments: vec!(
ast::PathSegment {
identifier: str_to_ident("d"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
}),
span:sp(7,8)
})),
span:sp(0,8)
})
}
#[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_strbuf()) ==
@Spanned{
node: ast::StmtExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path {
span:sp(0,1),
global:false,
segments: vec!(
ast::PathSegment {
identifier: str_to_ident("b"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
}),
span: sp(0,1)},
ast::DUMMY_NODE_ID),
span: sp(0,1)})
}
fn parser_done(p: Parser){
assert_eq!(p.token.clone(), token::EOF);
}
#[test] fn parse_ident_pat () {
let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, "b".to_strbuf());
assert!(parser.parse_pat() ==
@ast::Pat{id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(
ast::BindByValue(ast::MutImmutable),
ast::Path {
span:sp(0,1),
global:false,
segments: vec!(
ast::PathSegment {
identifier: str_to_ident("b"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
},
None /* no idea */),
span: sp(0,1)});
parser_done(parser);
}
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int"
assert!(string_to_item("fn a (b : int) { b; }".to_strbuf()) ==
Some(
@ast::Item{ident:str_to_ident("a"),
attrs:Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemFn(ast::P(ast::FnDecl {
inputs: vec!(ast::Arg{
ty: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyPath(ast::Path{
span:sp(10,13),
global:false,
segments: vec!(
ast::PathSegment {
identifier:
str_to_ident("int"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
}, None, ast::DUMMY_NODE_ID),
span:sp(10,13)
}),
pat: @ast::Pat {
id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(
ast::BindByValue(ast::MutImmutable),
ast::Path {
span:sp(6,7),
global:false,
segments: vec!(
ast::PathSegment {
identifier:
str_to_ident("b"),
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
),
},
None // no idea
),
span: sp(6,7)
},
id: ast::DUMMY_NODE_ID
}),
output: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyNil,
span:sp(15,15)}), // not sure
cf: ast::Return,
variadic: false
}),
ast::NormalFn,
abi::Rust,
ast::Generics{ // no idea on either of these:
lifetimes: Vec::new(),
ty_params: OwnedSlice::empty(),
},
ast::P(ast::Block {
view_items: Vec::new(),
stmts: vec!(@Spanned{
node: ast::StmtSemi(@ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(
ast::Path{
span:sp(17,18),
global:false,
segments: vec!(
ast::PathSegment {
identifier:
str_to_ident(
"b"),
lifetimes:
Vec::new(),
types:
OwnedSlice::empty()
}
),
}),
span: sp(17,18)},
ast::DUMMY_NODE_ID),
span: sp(17,19)}),
expr: None,
id: ast::DUMMY_NODE_ID,
rules: ast::DefaultBlock, // no idea<|fim▁hole|> span: sp(15,21),
})),
vis: ast::Inherited,
span: sp(0,21)}));
}
#[test] fn parse_exprs () {
// just make sure that they parse....
string_to_expr("3 + 4".to_strbuf());
string_to_expr("a::z.froob(b,@(987+3))".to_strbuf());
}
#[test] fn attrs_fix_bug () {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, StrBuf> {
#[cfg(windows)]
fn wb() -> c_int {
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
}
#[cfg(unix)]
fn wb() -> c_int { O_WRONLY as c_int }
let mut fflags: c_int = wb();
}".to_strbuf());
}
}<|fim▁end|>
| |
<|file_name|>test_exports.py<|end_file_name|><|fim▁begin|>"""Test the csv/json export functionality."""
import binascii
import textwrap
import dnstwister.tools
import patches
from dnstwister.core.domain import Domain
def test_csv_export(webapp, monkeypatch):
"""Test CSV export"""
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domain = Domain('a.com')
hexdomain = domain.to_hex()
response = webapp.get('/search/{}/csv'.format(hexdomain))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_a.com.csv'
assert '\n'.join(sorted(response.text.strip().split('\n'))) == textwrap.dedent("""
Domain,Type,Tweak,IP,Error
a.com,Addition,aa.com,999.999.999.999,False
a.com,Addition,ab.com,999.999.999.999,False
a.com,Addition,ac.com,999.999.999.999,False
a.com,Addition,ad.com,999.999.999.999,False
a.com,Addition,ae.com,999.999.999.999,False
a.com,Addition,af.com,999.999.999.999,False
a.com,Addition,ag.com,999.999.999.999,False
a.com,Addition,ah.com,999.999.999.999,False
a.com,Addition,ai.com,999.999.999.999,False
a.com,Addition,aj.com,999.999.999.999,False
a.com,Addition,ak.com,999.999.999.999,False
a.com,Addition,al.com,999.999.999.999,False
a.com,Addition,am.com,999.999.999.999,False
a.com,Addition,an.com,999.999.999.999,False
a.com,Addition,ao.com,999.999.999.999,False
a.com,Addition,ap.com,999.999.999.999,False
a.com,Addition,aq.com,999.999.999.999,False
a.com,Addition,ar.com,999.999.999.999,False
a.com,Addition,as.com,999.999.999.999,False
a.com,Addition,at.com,999.999.999.999,False
a.com,Addition,au.com,999.999.999.999,False
a.com,Addition,av.com,999.999.999.999,False
a.com,Addition,aw.com,999.999.999.999,False
a.com,Addition,ax.com,999.999.999.999,False
a.com,Addition,ay.com,999.999.999.999,False
a.com,Addition,az.com,999.999.999.999,False
a.com,Bitsquatting,c.com,999.999.999.999,False
a.com,Bitsquatting,e.com,999.999.999.999,False
a.com,Bitsquatting,i.com,999.999.999.999,False
a.com,Bitsquatting,q.com,999.999.999.999,False
a.com,Original*,a.com,999.999.999.999,False
a.com,Replacement,1.com,999.999.999.999,False
a.com,Replacement,2.com,999.999.999.999,False
a.com,Replacement,s.com,999.999.999.999,False
a.com,Replacement,w.com,999.999.999.999,False
a.com,Replacement,y.com,999.999.999.999,False
a.com,Replacement,z.com,999.999.999.999,False
a.com,Various,acom.com,999.999.999.999,False
a.com,Various,wwa.com,999.999.999.999,False
a.com,Various,www-a.com,999.999.999.999,False
a.com,Various,wwwa.com,999.999.999.999,False
a.com,Vowel swap,o.com,999.999.999.999,False
a.com,Vowel swap,u.com,999.999.999.999,False
""").strip()
def test_json_export(webapp, monkeypatch):
"""Test JSON export"""
monkeypatch.setattr(
'dnstwister.tools.dnstwist.DomainFuzzer', patches.SimpleFuzzer
)
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domain = Domain('a.com')
path = domain.to_hex()
response = webapp.get('/search/{}/json'.format(path))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_a.com.json'
assert response.json == {
u'a.com': {
u'fuzzy_domains': [
{
u'domain-name': u'a.com',
u'fuzzer': u'Original*',
u'hex': u'612e636f6d',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
},
{
u'domain-name': u'a.co',
u'fuzzer': u'Pretend',
u'hex': u'612e636f',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
}
]
}
}
def test_json_export_one_domain(webapp, monkeypatch):
"""Test JSON export when no reports"""
monkeypatch.setattr(
'dnstwister.tools.dnstwist.DomainFuzzer', patches.SimpleFuzzer
)
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domains = ('a.com',)
path = ','.join([Domain(d).to_hex() for d in domains])
response = webapp.get('/search/{}/json'.format(path))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_a.com.json'
assert response.json == {
u'a.com': {
u'fuzzy_domains': [
{
u'domain-name': u'a.com',
u'fuzzer': u'Original*',
u'hex': u'612e636f6d',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
},
{
u'domain-name': u'a.co',
u'fuzzer': u'Pretend',
u'hex': u'612e636f',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
}
]
}
}
def test_json_export_no_fuzzy(webapp, monkeypatch):
"""Test JSON export when no fuzzy domains."""
monkeypatch.setattr(
'dnstwister.tools.dnstwist.DomainFuzzer', patches.NoFuzzer
)
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domains = ('a.com',)
path = ','.join([Domain(d).to_hex() for d in domains])
response = webapp.get('/search/{}/json'.format(path))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_a.com.json'
assert response.json == {
u'a.com': {
u'fuzzy_domains': [
{
u'domain-name': u'a.com',
u'fuzzer': u'Original*',
u'hex': u'612e636f6d',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
}
]
}
}
def test_json_export_formatting(webapp, monkeypatch):
"""Test JSON export looks nice :)"""
monkeypatch.setattr(
'dnstwister.tools.dnstwist.DomainFuzzer', patches.SimpleFuzzer
)
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domain = 'a.com'
path = Domain(domain).to_hex()
response = webapp.get('/search/{}/json'.format(path))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_a.com.json'
assert response.text.strip() == textwrap.dedent("""
{
"a.com": {
"fuzzy_domains": [
{
"domain-name": "a.com",
"fuzzer": "Original*",
"hex": "612e636f6d",
"resolution": {
"error": false,
"ip": "999.999.999.999"
}
},
{
"domain-name": "a.co",
"fuzzer": "Pretend",
"hex": "612e636f",
"resolution": {
"error": false,
"ip": "999.999.999.999"
}
}
]
}
}
""").strip()
def test_failed_export(webapp):
"""Test unknown-format export"""
domain = 'a.com'
hexdomain = Domain(domain).to_hex()
response = webapp.get('/search/{}/xlsx'.format(hexdomain), expect_errors=True)
assert response.status_code == 400
def test_links_on_report(webapp):
"""Make sure the export links are working."""
domain = Domain('a.com')
hexdomain = domain.to_hex()
page_html = webapp.get('/search/{}'.format(hexdomain)).text
assert '/search/{}/csv'.format(hexdomain) in page_html
assert '/search/{}/json'.format(hexdomain) in page_html
def test_json_export_unicode_domain(webapp, monkeypatch):
"""Test JSON export when no reports"""
monkeypatch.setattr(
'dnstwister.tools.dnstwist.DomainFuzzer', patches.SimpleFuzzer
)
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domain = u'a\u00E0.com' # almost 'aa.com'
hexdomain = Domain(domain).to_hex()
response = webapp.get('/search/{}/json'.format(hexdomain))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_xn--a-sfa.com.json'
assert response.json == {
u'xn--a-sfa.com': {
u'fuzzy_domains': [
{
u'domain-name': u'xn--a-sfa.com',
u'fuzzer': u'Original*',
u'hex': u'786e2d2d612d7366612e636f6d',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
},
{
u'domain-name': u'xn--a-sfa.co',
u'fuzzer': u'Pretend',
u'hex': u'786e2d2d612d7366612e636f',
u'resolution': {
u'error': False,
u'ip': u'999.999.999.999'
}
}
]
}
}
def test_unicode_csv_export(webapp, monkeypatch):
"""Test CSV export with Unicode"""
monkeypatch.setattr(
'dnstwister.tools.resolve', lambda domain: ('999.999.999.999', False)
)
domain = u'a\u00E0.com' # almost 'aa.com'
hexdomain = Domain(domain).to_hex()
response = webapp.get('/search/{}/csv'.format(hexdomain))
assert response.headers['Content-Disposition'] == 'attachment; filename=dnstwister_report_xn--a-sfa.com.csv'
assert '\n'.join(sorted(response.text.strip().split('\n'))) == textwrap.dedent("""
Domain,Type,Tweak,IP,Error
xn--a-sfa.com,Addition,xn--aa-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ab-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ac-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ad-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ae-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--af-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ag-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ah-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ai-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--aj-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ak-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--al-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--am-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--an-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ao-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ap-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--aq-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ar-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--as-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--at-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--au-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--av-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--aw-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ax-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--ay-jia.com,999.999.999.999,False
xn--a-sfa.com,Addition,xn--az-jia.com,999.999.999.999,False
xn--a-sfa.com,Bitsquatting,xn--c-sfa.com,999.999.999.999,False
xn--a-sfa.com,Bitsquatting,xn--e-sfa.com,999.999.999.999,False
xn--a-sfa.com,Bitsquatting,xn--i-sfa.com,999.999.999.999,False
xn--a-sfa.com,Bitsquatting,xn--q-sfa.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca15e.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca3e.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca743m.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca76d.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca7e.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0ca98b.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0caa.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0cab.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0cad.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0caf.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0cah.com,999.999.999.999,False
xn--a-sfa.com,Homoglyph,xn--0caj.com,999.999.999.999,False
xn--a-sfa.com,Hyphenation,xn--a--kia.com,999.999.999.999,False
xn--a-sfa.com,Omission,a.com,999.999.999.999,False
xn--a-sfa.com,Omission,xn--0ca.com,999.999.999.999,False
xn--a-sfa.com,Original*,xn--a-sfa.com,999.999.999.999,False
xn--a-sfa.com,Repetition,xn--a-sfaa.com,999.999.999.999,False
xn--a-sfa.com,Repetition,xn--aa-kia.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--1-sfa.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--2-sfa.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--s-sfa.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--w-sfa.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--y-sfa.com,999.999.999.999,False
xn--a-sfa.com,Replacement,xn--z-sfa.com,999.999.999.999,False
xn--a-sfa.com,Subdomain,a.xn--0ca.com,999.999.999.999,False
xn--a-sfa.com,Transposition,xn--a-rfa.com,999.999.999.999,False
xn--a-sfa.com,Various,xn--acom-0na.com,999.999.999.999,False<|fim▁hole|> xn--a-sfa.com,Vowel swap,xn--o-sfa.com,999.999.999.999,False
xn--a-sfa.com,Vowel swap,xn--u-sfa.com,999.999.999.999,False
""").strip()<|fim▁end|>
|
xn--a-sfa.com,Various,xn--wwa-cla.com,999.999.999.999,False
xn--a-sfa.com,Various,xn--www-a-vqa.com,999.999.999.999,False
xn--a-sfa.com,Various,xn--wwwa-3na.com,999.999.999.999,False
|
<|file_name|>data_exportor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# @date 161103 - Export excel with get_work_order_report function
"""
Data exportor (Excel, CSV...)
"""
import io
import math
from datetime import datetime
from xlsxwriter.workbook import Workbook
import tablib
from utils.tools import get_product_size
def get_customers(customer_list=None, file_format='csv'):
"""Generate customer data file for download."""
if customer_list is None:
customer_list = []
data = tablib.Dataset()
data.headers = ('客戶代碼', '客戶名稱')
for c in customer_list:
data.append((c.c_code, c.c_name))
if file_format == 'csv':
return data.csv
return data
def get_maintenance_log(log_list=None, file_format='csv'):
"""Generate maintenance log to csv file for download."""
if log_list is None:
log_list = []
data = tablib.Dataset()
data.headers = ('機台', '維修項目', '開始時間',
'員工', '結束時間', '員工',
'總計時間')
for log in log_list:
m_code = log['m_code'].replace('<br>', '\n')
data.append((log['machine_id'], m_code, log['start_time'],
log['who_start'], log['end_time'], log['who_end'],
log['total_time'][0])
)
if file_format == 'csv':
return data.csv
return data
def get_w_m_performance_report(file_format='xls'):
"""Generate excel file for download by worker and machine performance."""
row_number = 11
data = tablib.Dataset()
data.append(['個人效率期間表 ({})'.format(
datetime.now().strftime("%Y/%m/%d"))] + [''] * (row_number - 1))
data.append(['工號', '姓名', '日期', '標準量', '效率標準量',
'實質生產量', '總稼動時間', '總停機時間', '稼動 %', '數量效率 %',
'平均效率 %'])
if file_format == 'xls':
return data.xls
return data
def get_loss_rate_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate."""
data = tablib.Dataset()
data.headers = ('機台', '機型', '良品數', '不良品數', '損耗率(%)',
'損耗金額(RMB)', '損耗率排名')
rank = 0
old_loss_rate = None
for r in sorted(report_data, key=lambda k: k['loss_rate'], reverse=True):
if old_loss_rate != r['loss_rate']:
rank += 1
old_loss_rate = r['loss_rate']
record = [r['machine_id'], r['machine_type'], r['count_qty'],
r['event_qty'], r['loss_rate'], r['total_loss_money'],
rank]
data.append(record)
if file_format == 'csv':
return data.csv
return data
def get_loss_rate_detail_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate detail."""
data = tablib.Dataset()
data.headers = ('日期', '良品數', '不良品數', '損耗率(%)',
'損耗金額(RMB)')
for r in sorted(report_data, key=lambda k: k['record_date']):
record = [r['record_date'], r['count_qty'], r['event_qty'],
r['loss_rate'], r['total_loss_money']]
data.append(record)
if file_format == 'csv':
return data.csv
return data
def get_uptime_report(report_data='', file_format='xls'):
"""Generate excel file for download by uptime information."""
data = tablib.Dataset()
data.append_separator('製造部各工程稼動率一覽表')
data.append(['月份:10', '星期', '', '', '', '', '',
'目標', '', '', '', ''])
data.append(['', '', '加締卷取(%)', '組立(%)', '老化(%)',
'CUTTING(%)', 'TAPPING(%)', '加締卷取',
'組立', '老化', 'CUTTING', 'TAPPING'])
if file_format == 'xls':
return data.xls
return data
def get_work_order_report(report_data, file_format='csv'):
"""Generate csv file for download by work order."""
# data = tablib.Dataset()
# data.headers = ('製令編號', '料號', '客戶', '產品規格',
# '投入數', '應繳庫數',
# '加締捲取', '組立', '老化', '選別', '加工切角')
# for r in sorted(report_data, key=lambda k: k['order_no']):
# try:
# intput_count = int(r['input_count'])
# except (TypeError, ValueError):
# intput_count = -1
# record = [r['order_no'], r['part_no'], r['customer'], r['product'],
# intput_count, math.floor(intput_count / 1.03),
# r['step1_status'], r['step2_status'], r['step3_status'],
# r['step4_status'], r['step5_status']]
# data.append(record)
# if file_format == 'csv':
# return data.csv
# return data
output = io.BytesIO()
if file_format == 'xls':
workbook = Workbook(output, {'in_memory': True})
worksheet = workbook.add_worksheet()
# merge_format = workbook.add_format({
# 'bold': 1,
# 'border': 1,
# 'align': 'center',
# 'valign': 'vcenter'})
worksheet.merge_range('A1:A3', '製令編號')
worksheet.merge_range('B1:B3', '料號')
worksheet.merge_range('C1:C3', '客戶')
worksheet.merge_range('D1:D3', '產品規格')
worksheet.merge_range('E1:E3', '投入數')
worksheet.merge_range('F1:F3', '應繳庫數')
worksheet.write('G1', '加締捲取')
worksheet.write('H1', '組立')
worksheet.write('I1', '老化')
worksheet.write('J1', '選別')
worksheet.write('K1', '加工切角')
for col_name in ('G', 'H', 'I', 'J', 'K'):
worksheet.write(col_name + '2', '機器')
worksheet.write(col_name + '3', '良品數')<|fim▁hole|> row = 4
for r in sorted(report_data, key=lambda k: k['order_no']):
try:
intput_count = int(r['input_count'])
except (TypeError, ValueError):
intput_count = -1
worksheet.merge_range('A{}:A{}'.format(row, row + 2),
r['order_no'])
worksheet.merge_range('B{}:B{}'.format(row, row + 2), r['part_no'])
worksheet.merge_range('C{}:C{}'.format(row, row + 2),
r['customer'])
worksheet.merge_range('D{}:D{}'.format(row, row + 2), r['product'])
worksheet.merge_range('E{}:E{}'.format(row, row + 2), intput_count)
worksheet.merge_range('F{}:F{}'.format(row, row + 2),
math.floor(intput_count / 1.03))
for process in range(1, 6):
row_tag = chr(71 + process - 1)
worksheet.write_string('{}{}'.format(row_tag, row),
r['step{}_status'.format(process)])
machine = r['step{}_machine'.format(process)]
count = r['step{}_count'.format(process)]
worksheet.write_string('{}{}'.format(row_tag, row + 1),
machine if machine else '')
worksheet.write_string('{}{}'.format(row_tag, row + 2),
str(count) if count else '')
row += 3
workbook.close()
output.seek(0)
return output.read()
def get_order_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate detail."""
data = tablib.Dataset()
data.headers = ('製令編號', '客戶', '規格', '投入數', '需求數',
'加締捲曲', '組立', '老化', '選別', '加工切腳')
for r in sorted(report_data, key=lambda k: k['order_no']):
record = [r['order_no'], r['customer'], get_product_size(r['part_no']),
r['input_count'], r['require_count'],
r['step1_prod_qty'], r['step2_prod_qty'],
r['step3_prod_qty'], r['step4_prod_qty'],
r['step5_prod_qty']]
data.append(record)
if file_format == 'csv':
return data.csv
return data<|fim▁end|>
| |
<|file_name|>hir-res-hygiene.rs<|end_file_name|><|fim▁begin|>// check-pass
// edition:2018
// aux-build:not-libstd.rs
// Check that paths created in HIR are not affected by in scope names.
extern crate not_libstd as std;
<|fim▁hole|> async {}.await;
}
fn main() -> Result<(), ()> {
for i in 0..10 {}
for j in 0..=10 {}
Ok(())?;
Ok(())
}<|fim▁end|>
|
async fn the_future() {
|
<|file_name|>Final_P4_1and2.py<|end_file_name|><|fim▁begin|>#Final Exam Problem 4-2
import random, pylab
# You are given this function
def getMeanAndStd(X):
mean = sum(X)/float(len(X))
tot = 0.0
for x in X:
tot += (x - mean)**2
std = (tot/len(X))**0.5
return mean, std
# You are given this class
class Die(object):
def __init__(self, valList):
""" valList is not empty """
self.possibleVals = valList[:]
def roll(self):
return random.choice(self.possibleVals)
# Implement this -- Coding Part 1 of 2
def makeHistogram(values, numBins, xLabel, yLabel, title=None):
"""
- values, a sequence of numbers
- numBins, a positive int
- xLabel, yLabel, title, are strings
- Produces a histogram of values with numBins bins and the indicated labels
for the x and y axis
- If title is provided by caller, puts that title on the figure and otherwise
does not title the figure
"""
pylab.hist(values, numBins)
pylab.xlabel(xLabel)
pylab.ylabel(yLabel)
if title != None:
pylab.title(title)
pylab.show()
# Implement this -- Coding Part 2 of 2
def getAverage(die, numRolls, numTrials):
"""
- die, a Die
- numRolls, numTrials, are positive ints
- Calculates the expected mean value of the longest run of a number
over numTrials runs of numRolls rolls.
- Calls makeHistogram to produce a histogram of the longest runs for all<|fim▁hole|> - Choose appropriate labels for the x and y axes.
- Returns the mean calculated to 3 decimal places
"""
longest_runs = []
for x in range(numTrials):
rolls = [die.roll() for x in range(numRolls)]
run = 0
longest = 0
for i in range(len(rolls)):
if i == 0:
run += 1
longest += 1
else:
if rolls[i] == rolls[i-1]:
run += 1
if run > longest:
longest = run
else:
run = 1
longest_runs.append(longest)
makeHistogram(longest_runs, 10, 'Longest Run', 'Frequency', \
'Frequency of Longest Consecutive Dice Rolls')
return sum(longest_runs)/len(longest_runs)
# One test case
print(getAverage(Die([1,2,3,4,5,6,6,6,7]), 1, 1000))<|fim▁end|>
|
the trials. There should be 10 bins in the histogram
|
<|file_name|>multiclass_sklearn.py<|end_file_name|><|fim▁begin|>import deepchem as dc
import numpy as np
import sklearn
from sklearn.ensemble import RandomForestClassifier
N = 100
n_feat = 5
n_classes = 3
X = np.random.rand(N, n_feat)
y = np.random.randint(3, size=(N,))<|fim▁hole|>dataset = dc.data.NumpyDataset(X, y)
sklearn_model = RandomForestClassifier(class_weight="balanced", n_estimators=50)
model = dc.models.SklearnModel(sklearn_model)
# Fit trained model
print("About to fit model")
model.fit(dataset)
model.save()
print("About to evaluate model")
train_scores = model.evaluate(dataset, sklearn.metrics.roc_auc_score, [])
print("Train scores")
print(train_scores)<|fim▁end|>
| |
<|file_name|>static_symbol.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* A token representing the a reference to a static type.
*
* This token is unique for a filePath and name and can be used as a hash table key.
*/
export declare class StaticSymbol {
filePath: string;
name: string;
members: string[];
constructor(filePath: string, name: string, members?: string[]);
}
/**
* A cache of static symbol used by the StaticReflector to return the same symbol for the
* same symbol values.
*/<|fim▁hole|>}<|fim▁end|>
|
export declare class StaticSymbolCache {
private cache;
get(declarationFile: string, name: string, members?: string[]): StaticSymbol;
|
<|file_name|>dialogflow_generated_dialogflow_v2beta1_participants_list_suggestions_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListSuggestions
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2beta1_Participants_ListSuggestions_async]
from google.cloud import dialogflow_v2beta1
async def sample_list_suggestions():
# Create a client
client = dialogflow_v2beta1.ParticipantsAsyncClient()
# Initialize request argument(s)
request = dialogflow_v2beta1.ListSuggestionsRequest(
)
<|fim▁hole|> # Make the request
page_result = client.list_suggestions(request=request)
# Handle the response
async for response in page_result:
print(response)
# [END dialogflow_generated_dialogflow_v2beta1_Participants_ListSuggestions_async]<|fim▁end|>
| |
<|file_name|>nstimer.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nstimer(base_resource) :
""" Configuration for Timer resource. """
def __init__(self) :
self._name = ""
self._interval = 0
self._unit = ""
self._comment = ""
self._newname = ""
self.___count = 0
@property
def name(self) :
ur"""Timer name.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Timer name.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def interval(self) :
ur"""The frequency at which the policies bound to this timer are invoked. The minimum value is 20 msec. The maximum value is 20940 in seconds and 349 in minutes.<br/>Default value: 5<br/>Minimum length = 1<br/>Maximum length = 20940000.
"""
try :
return self._interval
except Exception as e:
raise e
@interval.setter
def interval(self, interval) :
ur"""The frequency at which the policies bound to this timer are invoked. The minimum value is 20 msec. The maximum value is 20940 in seconds and 349 in minutes.<br/>Default value: 5<br/>Minimum length = 1<br/>Maximum length = 20940000
"""
try :
self._interval = interval
except Exception as e:
raise e
@property
def unit(self) :
ur"""Timer interval unit.<br/>Default value: SEC<br/>Possible values = SEC, MIN.
"""
try :
return self._unit
except Exception as e:
raise e
@unit.setter
def unit(self, unit) :
ur"""Timer interval unit.<br/>Default value: SEC<br/>Possible values = SEC, MIN
"""
try :
self._unit = unit
except Exception as e:
raise e
@property
def comment(self) :
ur"""Comments associated with this timer.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
ur"""Comments associated with this timer.
"""
try :
self._comment = comment
except Exception as e:
raise e
@property
def newname(self) :
ur"""The new name of the timer.<br/>Minimum length = 1.
"""
try :
return self._newname
except Exception as e:
raise e
@newname.setter
def newname(self, newname) :
ur"""The new name of the timer.<br/>Minimum length = 1
"""
try :
self._newname = newname
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nstimer_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nstimer
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
ur""" Use this API to add nstimer.
"""
try :
if type(resource) is not list :
addresource = nstimer()
addresource.name = resource.name
addresource.interval = resource.interval
addresource.unit = resource.unit
addresource.comment = resource.comment
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].interval = resource[i].interval
addresources[i].unit = resource[i].unit
addresources[i].comment = resource[i].comment
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
ur""" Use this API to delete nstimer.
"""
try :
if type(resource) is not list :
deleteresource = nstimer()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
ur""" Use this API to update nstimer.
"""
try :
if type(resource) is not list :
updateresource = nstimer()
updateresource.name = resource.name
updateresource.interval = resource.interval
updateresource.unit = resource.unit
updateresource.comment = resource.comment
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].interval = resource[i].interval
updateresources[i].unit = resource[i].unit
updateresources[i].comment = resource[i].comment
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
ur""" Use this API to unset the properties of nstimer resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = nstimer()
if type(resource) != type(unsetresource):
unsetresource.name = resource
else :
unsetresource.name = resource.name
unsetresource.interval = resource.interval
unsetresource.unit = resource.unit
unsetresource.comment = resource.comment
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ nstimer() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i].name
unsetresources[i].interval = resource[i].interval
unsetresources[i].unit = resource[i].unit
unsetresources[i].comment = resource[i].comment
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def rename(cls, client, resource, new_name) :
ur""" Use this API to rename a nstimer resource.
"""
try :
renameresource = nstimer()
if type(resource) == cls :
renameresource.name = resource.name
else :
renameresource.name = resource
return renameresource.rename_resource(client,new_name)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the nstimer resources that are configured on netscaler.
"""
try :
if not name :
obj = nstimer()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = nstimer()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [nstimer() for _ in range(len(name))]
obj = [nstimer() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = nstimer()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
ur""" Use this API to fetch filtered set of nstimer resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = nstimer()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
ur""" Use this API to count the nstimer resources configured on NetScaler.
"""
try :
obj = nstimer()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
ur""" Use this API to count filtered the set of nstimer resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = nstimer()
option_ = options()<|fim▁hole|> return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Unit:
SEC = "SEC"
MIN = "MIN"
class nstimer_response(base_response) :
def __init__(self, length=1) :
self.nstimer = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nstimer = [nstimer() for _ in range(length)]<|fim▁end|>
|
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
|
<|file_name|>fields.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from .settings import MARTOR_ENABLE_LABEL
from .widgets import MartorWidget
class MartorFormField(forms.CharField):
def __init__(self, *args, **kwargs):
# to setup the editor without label
if not MARTOR_ENABLE_LABEL:
kwargs['label'] = ''
<|fim▁hole|>
if not issubclass(self.widget.__class__, MartorWidget):
self.widget = MartorWidget()<|fim▁end|>
|
super(MartorFormField, self).__init__(*args, **kwargs)
|
<|file_name|>0005_auto_20160518_0031.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('reddit', '0004_auto_20160518_0017'),
]
operations = [
migrations.AlterField(
model_name='redditcredentials',<|fim▁hole|> field=models.ForeignKey(to=settings.AUTH_USER_MODEL, unique=True),
),
]<|fim▁end|>
|
name='user',
|
<|file_name|>collection.go<|end_file_name|><|fim▁begin|>package ebpf
import (
"errors"
"fmt"
"math"
"reflect"
"strings"
"github.com/cilium/ebpf/asm"
"github.com/cilium/ebpf/internal"
"github.com/cilium/ebpf/internal/btf"
)
// CollectionOptions control loading a collection into the kernel.
//
// Maps and Programs are passed to NewMapWithOptions and NewProgramsWithOptions.
type CollectionOptions struct {
Maps MapOptions
Programs ProgramOptions
}
// CollectionSpec describes a collection.
type CollectionSpec struct {
Maps map[string]*MapSpec
Programs map[string]*ProgramSpec
}
// Copy returns a recursive copy of the spec.
func (cs *CollectionSpec) Copy() *CollectionSpec {
if cs == nil {
return nil
}
cpy := CollectionSpec{
Maps: make(map[string]*MapSpec, len(cs.Maps)),
Programs: make(map[string]*ProgramSpec, len(cs.Programs)),
}
for name, spec := range cs.Maps {
cpy.Maps[name] = spec.Copy()
}
for name, spec := range cs.Programs {
cpy.Programs[name] = spec.Copy()
}
return &cpy
}
// RewriteMaps replaces all references to specific maps.
//
// Use this function to use pre-existing maps instead of creating new ones
// when calling NewCollection. Any named maps are removed from CollectionSpec.Maps.
//
// Returns an error if a named map isn't used in at least one program.
func (cs *CollectionSpec) RewriteMaps(maps map[string]*Map) error {
for symbol, m := range maps {
// have we seen a program that uses this symbol / map
seen := false
fd := m.FD()
for progName, progSpec := range cs.Programs {
err := progSpec.Instructions.RewriteMapPtr(symbol, fd)
switch {
case err == nil:
seen = true
case asm.IsUnreferencedSymbol(err):
// Not all programs need to use the map
default:
return fmt.Errorf("program %s: %w", progName, err)
}
}
if !seen {
return fmt.Errorf("map %s not referenced by any programs", symbol)
}
// Prevent NewCollection from creating rewritten maps
delete(cs.Maps, symbol)
}
return nil
}
// RewriteConstants replaces the value of multiple constants.
//
// The constant must be defined like so in the C program:
//
// static volatile const type foobar;
// static volatile const type foobar = default;
//
// Replacement values must be of the same length as the C sizeof(type).
// If necessary, they are marshalled according to the same rules as
// map values.
//
// From Linux 5.5 the verifier will use constants to eliminate dead code.
//
// Returns an error if a constant doesn't exist.
func (cs *CollectionSpec) RewriteConstants(consts map[string]interface{}) error {
rodata := cs.Maps[".rodata"]
if rodata == nil {
return errors.New("missing .rodata section")
}
if rodata.BTF == nil {
return errors.New(".rodata section has no BTF")
}
if n := len(rodata.Contents); n != 1 {
return fmt.Errorf("expected one key in .rodata, found %d", n)
}
kv := rodata.Contents[0]
value, ok := kv.Value.([]byte)
if !ok {
return fmt.Errorf("first value in .rodata is %T not []byte", kv.Value)
}
buf := make([]byte, len(value))
copy(buf, value)
err := patchValue(buf, btf.MapValue(rodata.BTF), consts)
if err != nil {
return err
}
rodata.Contents[0] = MapKV{kv.Key, buf}
return nil
}
// Assign the contents of a CollectionSpec to a struct.
//
// This function is a short-cut to manually checking the presence
// of maps and programs in a collection spec. Consider using bpf2go if this
// sounds useful.
//
// The argument to must be a pointer to a struct. A field of the
// struct is updated with values from Programs or Maps if it
// has an `ebpf` tag and its type is *ProgramSpec or *MapSpec.
// The tag gives the name of the program or map as found in
// the CollectionSpec.
//
// struct {
// Foo *ebpf.ProgramSpec `ebpf:"xdp_foo"`
// Bar *ebpf.MapSpec `ebpf:"bar_map"`
// Ignored int
// }
//
// Returns an error if any of the fields can't be found, or
// if the same map or program is assigned multiple times.
func (cs *CollectionSpec) Assign(to interface{}) error {
valueOf := func(typ reflect.Type, name string) (reflect.Value, error) {
switch typ {
case reflect.TypeOf((*ProgramSpec)(nil)):
p := cs.Programs[name]
if p == nil {
return reflect.Value{}, fmt.Errorf("missing program %q", name)
}
return reflect.ValueOf(p), nil
case reflect.TypeOf((*MapSpec)(nil)):
m := cs.Maps[name]
if m == nil {
return reflect.Value{}, fmt.Errorf("missing map %q", name)
}
return reflect.ValueOf(m), nil
default:
return reflect.Value{}, fmt.Errorf("unsupported type %s", typ)
}
}
return assignValues(to, valueOf)
}
// LoadAndAssign maps and programs into the kernel and assign them to a struct.
//
// This function is a short-cut to manually checking the presence
// of maps and programs in a collection spec. Consider using bpf2go if this
// sounds useful.
//
// The argument to must be a pointer to a struct. A field of the
// struct is updated with values from Programs or Maps if it
// has an `ebpf` tag and its type is *Program or *Map.
// The tag gives the name of the program or map as found in
// the CollectionSpec.
//
// struct {
// Foo *ebpf.Program `ebpf:"xdp_foo"`
// Bar *ebpf.Map `ebpf:"bar_map"`
// Ignored int
// }
//
// opts may be nil.
//
// Returns an error if any of the fields can't be found, or
// if the same map or program is assigned multiple times.
func (cs *CollectionSpec) LoadAndAssign(to interface{}, opts *CollectionOptions) error {
if opts == nil {
opts = &CollectionOptions{}
}
loadMap, loadProgram, done, cleanup := lazyLoadCollection(cs, opts)
defer cleanup()
valueOf := func(typ reflect.Type, name string) (reflect.Value, error) {
switch typ {
case reflect.TypeOf((*Program)(nil)):
p, err := loadProgram(name)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(p), nil
case reflect.TypeOf((*Map)(nil)):
m, err := loadMap(name)
if err != nil {
return reflect.Value{}, err
}
return reflect.ValueOf(m), nil
default:
return reflect.Value{}, fmt.Errorf("unsupported type %s", typ)
}
}
if err := assignValues(to, valueOf); err != nil {
return err
}
done()
return nil
}
// Collection is a collection of Programs and Maps associated
// with their symbols
type Collection struct {
Programs map[string]*Program
Maps map[string]*Map
}
// NewCollection creates a Collection from a specification.
func NewCollection(spec *CollectionSpec) (*Collection, error) {
return NewCollectionWithOptions(spec, CollectionOptions{})
}
// NewCollectionWithOptions creates a Collection from a specification.
func NewCollectionWithOptions(spec *CollectionSpec, opts CollectionOptions) (*Collection, error) {
loadMap, loadProgram, done, cleanup := lazyLoadCollection(spec, &opts)
defer cleanup()
for mapName := range spec.Maps {
_, err := loadMap(mapName)
if err != nil {
return nil, err
}
}
for progName := range spec.Programs {
_, err := loadProgram(progName)
if err != nil {
return nil, err
}
}
maps, progs := done()
return &Collection{
progs,
maps,
}, nil
}
type btfHandleCache map[*btf.Spec]*btf.Handle
func (btfs btfHandleCache) load(spec *btf.Spec) (*btf.Handle, error) {
if btfs[spec] != nil {
return btfs[spec], nil
}
handle, err := btf.NewHandle(spec)
if err != nil {
return nil, err
}
btfs[spec] = handle
return handle, nil
}
func (btfs btfHandleCache) close() {
for _, handle := range btfs {
handle.Close()
}
}
func lazyLoadCollection(coll *CollectionSpec, opts *CollectionOptions) (
loadMap func(string) (*Map, error),
loadProgram func(string) (*Program, error),
done func() (map[string]*Map, map[string]*Program),
cleanup func(),
) {
var (
maps = make(map[string]*Map)
progs = make(map[string]*Program)
btfs = make(btfHandleCache)
skipMapsAndProgs = false
)
cleanup = func() {
btfs.close()
if skipMapsAndProgs {
return
}
for _, m := range maps {
m.Close()
}
for _, p := range progs {
p.Close()
}
}
done = func() (map[string]*Map, map[string]*Program) {
skipMapsAndProgs = true
return maps, progs
}
loadMap = func(mapName string) (*Map, error) {
if m := maps[mapName]; m != nil {
return m, nil
}
mapSpec := coll.Maps[mapName]
if mapSpec == nil {
return nil, fmt.Errorf("missing map %s", mapName)
}
m, err := newMapWithOptions(mapSpec, opts.Maps, btfs)
if err != nil {
return nil, fmt.Errorf("map %s: %w", mapName, err)
}
maps[mapName] = m
return m, nil
}
loadProgram = func(progName string) (*Program, error) {
if prog := progs[progName]; prog != nil {
return prog, nil
}
progSpec := coll.Programs[progName]
if progSpec == nil {
return nil, fmt.Errorf("unknown program %s", progName)
}
progSpec = progSpec.Copy()
// Rewrite any reference to a valid map.
for i := range progSpec.Instructions {
ins := &progSpec.Instructions[i]
if ins.OpCode != asm.LoadImmOp(asm.DWord) || ins.Reference == "" {
continue
}
if uint32(ins.Constant) != math.MaxUint32 {
// Don't overwrite maps already rewritten, users can
// rewrite programs in the spec themselves
continue
}
m, err := loadMap(ins.Reference)
if err != nil {
return nil, fmt.Errorf("program %s: %s", progName, err)
}
fd := m.FD()
if fd < 0 {
return nil, fmt.Errorf("map %s: %w", ins.Reference, internal.ErrClosedFd)
}
if err := ins.RewriteMapPtr(m.FD()); err != nil {
return nil, fmt.Errorf("progam %s: map %s: %w", progName, ins.Reference, err)
}
}
prog, err := newProgramWithOptions(progSpec, opts.Programs, btfs)
if err != nil {
return nil, fmt.Errorf("program %s: %w", progName, err)
}
progs[progName] = prog
return prog, nil
}
return
}
// LoadCollection parses an object file and converts it to a collection.
func LoadCollection(file string) (*Collection, error) {
spec, err := LoadCollectionSpec(file)
if err != nil {
return nil, err
}
return NewCollection(spec)
}
// Close frees all maps and programs associated with the collection.
//
// The collection mustn't be used afterwards.
func (coll *Collection) Close() {
for _, prog := range coll.Programs {
prog.Close()
}
for _, m := range coll.Maps {
m.Close()
}
}
// DetachMap removes the named map from the Collection.
//
// This means that a later call to Close() will not affect this map.
//
// Returns nil if no map of that name exists.
func (coll *Collection) DetachMap(name string) *Map {
m := coll.Maps[name]
delete(coll.Maps, name)
return m
}
// DetachProgram removes the named program from the Collection.
//
// This means that a later call to Close() will not affect this program.
//
// Returns nil if no program of that name exists.
func (coll *Collection) DetachProgram(name string) *Program {
p := coll.Programs[name]
delete(coll.Programs, name)
return p
}
// Assign the contents of a collection to a struct.
//
// Deprecated: use CollectionSpec.Assign instead. It provides the same
// functionality but creates only the maps and programs requested.
func (coll *Collection) Assign(to interface{}) error {
assignedMaps := make(map[string]struct{})
assignedPrograms := make(map[string]struct{})
valueOf := func(typ reflect.Type, name string) (reflect.Value, error) {
switch typ {
case reflect.TypeOf((*Program)(nil)):
p := coll.Programs[name]
if p == nil {
return reflect.Value{}, fmt.Errorf("missing program %q", name)
}
assignedPrograms[name] = struct{}{}
return reflect.ValueOf(p), nil
case reflect.TypeOf((*Map)(nil)):
m := coll.Maps[name]
if m == nil {
return reflect.Value{}, fmt.Errorf("missing map %q", name)
}
assignedMaps[name] = struct{}{}
return reflect.ValueOf(m), nil
default:
return reflect.Value{}, fmt.Errorf("unsupported type %s", typ)
}
}
if err := assignValues(to, valueOf); err != nil {
return err
}
for name := range assignedPrograms {
coll.DetachProgram(name)
}
for name := range assignedMaps {
coll.DetachMap(name)
}
return nil<|fim▁hole|> reflect.StructField
value reflect.Value
}
var (
fields []structField
visitedTypes = make(map[reflect.Type]bool)
flattenStruct func(reflect.Value) error
)
flattenStruct = func(structVal reflect.Value) error {
structType := structVal.Type()
if structType.Kind() != reflect.Struct {
return fmt.Errorf("%s is not a struct", structType)
}
if visitedTypes[structType] {
return fmt.Errorf("recursion on type %s", structType)
}
for i := 0; i < structType.NumField(); i++ {
field := structField{structType.Field(i), structVal.Field(i)}
name := field.Tag.Get("ebpf")
if name != "" {
fields = append(fields, field)
continue
}
var err error
switch field.Type.Kind() {
case reflect.Ptr:
if field.Type.Elem().Kind() != reflect.Struct {
continue
}
if field.value.IsNil() {
return fmt.Errorf("nil pointer to %s", structType)
}
err = flattenStruct(field.value.Elem())
case reflect.Struct:
err = flattenStruct(field.value)
default:
continue
}
if err != nil {
return fmt.Errorf("field %s: %s", field.Name, err)
}
}
return nil
}
toValue := reflect.ValueOf(to)
if toValue.Type().Kind() != reflect.Ptr {
return fmt.Errorf("%T is not a pointer to struct", to)
}
if toValue.IsNil() {
return fmt.Errorf("nil pointer to %T", to)
}
if err := flattenStruct(toValue.Elem()); err != nil {
return err
}
type elem struct {
// Either *Map or *Program
typ reflect.Type
name string
}
assignedTo := make(map[elem]string)
for _, field := range fields {
name := field.Tag.Get("ebpf")
if strings.Contains(name, ",") {
return fmt.Errorf("field %s: ebpf tag contains a comma", field.Name)
}
e := elem{field.Type, name}
if assignedField := assignedTo[e]; assignedField != "" {
return fmt.Errorf("field %s: %q was already assigned to %s", field.Name, name, assignedField)
}
value, err := valueOf(field.Type, name)
if err != nil {
return fmt.Errorf("field %s: %w", field.Name, err)
}
if !field.value.CanSet() {
return fmt.Errorf("field %s: can't set value", field.Name)
}
field.value.Set(value)
assignedTo[e] = field.Name
}
return nil
}<|fim▁end|>
|
}
func assignValues(to interface{}, valueOf func(reflect.Type, string) (reflect.Value, error)) error {
type structField struct {
|
<|file_name|>pmx_datum_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3<|fim▁hole|>@author: Bruno Beloff ([email protected])
"""
from scs_core.data.datetime import LocalizedDatetime
from scs_core.data.json import JSONify
from scs_core.particulate.pmx_datum import PMxDatum
# --------------------------------------------------------------------------------------------------------------------
now = LocalizedDatetime.now().utc()
pmx = PMxDatum(now, 11, 22, None, 33)
print(pmx)
print("-")
jstr = JSONify.dumps(pmx)
print(jstr)
print("-")<|fim▁end|>
|
"""
Created on 18 Sep 2016
|
<|file_name|>0063_auto_20210511_2343.py<|end_file_name|><|fim▁begin|># Generated by Django 3.2 on 2021-05-11 13:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stock', '0062_auto_20210511_2151'),
]
operations = [
migrations.RemoveField(
model_name='stockitemtracking',
name='link',<|fim▁hole|> ),
migrations.RemoveField(
model_name='stockitemtracking',
name='quantity',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='system',
),
migrations.RemoveField(
model_name='stockitemtracking',
name='title',
),
]<|fim▁end|>
| |
<|file_name|>landing-page-feature-list.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
selector: 'uxd-landing-page-feature-list',
template: '<ng-content></ng-content>',
styles: [':host { display: block; }'],<|fim▁hole|> 'class': 'row'
}
})
export class LandingPageFeatureListComponent {
}<|fim▁end|>
|
host: {
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/* -----------------------------------------------
/* How to use? : Check the GitHub README
/* ----------------------------------------------- */
/* To load a config file (particles.json) you need to host this demo (MAMP/WAMP/local)... */
/*
particlesJS.load('particles-js', 'particles.json', function() {
console.log('particles.js loaded - callback');
});
*/
/* Otherwise just put the config content (json): */
particlesJS('particles-js',
{
"particles": {
"number": {
"value": 80,
"density": {
"enable": true,
"value_area": 800
}
},
"color": {
"value": "#888"
},
"shape": {
"type": "circle",
"stroke": {
"width": 0,
"color": "#000000"
},
"polygon": {
"nb_sides": 5
},
"image": {
"src": "img/github.svg",
"width": 100,
"height": 100
}
},
"opacity": {
"value": 0.5,
"random": false,
"anim": {
"enable": false,
"speed": 1,
"opacity_min": 0.1,
"sync": false
}
},
"size": {
"value": 5,
"random": true,<|fim▁hole|> "size_min": 0.1,
"sync": false
}
},
"line_linked": {
"enable": true,
"distance": 150,
"color": "#777",
"opacity": 0.4,
"width": 1
},
"move": {
"enable": true,
"speed": 6,
"direction": "none",
"random": false,
"straight": false,
"out_mode": "out",
"attract": {
"enable": false,
"rotateX": 600,
"rotateY": 1200
}
}
},
"interactivity": {
"detect_on": "canvas",
"events": {
"onhover": {
"enable": true,
"mode": "repulse"
},
"onclick": {
"enable": true,
"mode": "push"
},
"resize": true
},
"modes": {
"grab": {
"distance": 400,
"line_linked": {
"opacity": 1
}
},
"bubble": {
"distance": 400,
"size": 40,
"duration": 2,
"opacity": 8,
"speed": 3
},
"repulse": {
"distance": 200
},
"push": {
"particles_nb": 4
},
"remove": {
"particles_nb": 2
}
}
},
"retina_detect": true,
"config_demo": {
"hide_card": false,
"background_color": "#b61924",
"background_image": "",
"background_position": "50% 50%",
"background_repeat": "no-repeat",
"background_size": "cover"
}
}
);<|fim▁end|>
|
"anim": {
"enable": false,
"speed": 40,
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for SysuLesson project.<|fim▁hole|>For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "SysuLesson.settings")
application = get_wsgi_application()<|fim▁end|>
|
It exposes the WSGI callable as a module-level variable named ``application``.
|
<|file_name|>VXObject.ts<|end_file_name|><|fim▁begin|>export class TObject {
private _tag: any;
/**
* Stores a value as a part of a component.
* Tag has no predefined meaning. The Tag property can store any additional value for the convenience of developers.
*
*/
public get Tag(): any {
return this._tag;
}
public set Tag(val: any) {
if (val != this._tag) {
this._tag = val;
}
}
/**
* Returns a string indicating the type of the object instance.
*/
public static getClassName(): string {
var funcNameRegex = /function ([^\(]{1,})\(/;
var results = (funcNameRegex).exec((<any> this).toString());
return (results && results.length > 1) ? results[1] : "";
}
public getClassName(): string {
var funcNameRegex = /function ([^\(]{1,})\(/;
var results = (funcNameRegex).exec(this["constructor"].toString());
return (results && results.length > 1) ? results[1] : "";
}
/**
Auto Generate unique identifier of the component.
when a rendering occur this ID become the id of the html element.
**/
public ID: string = TObject.genGUID();
public static genGUID(): string {
return TObject.s4() + TObject.s4() + TObject.s4() + TObject.s4() + TObject.s4() + TObject.s4() + TObject.s4() + TObject.s4();
}
private static s4(): string { return Math.floor((1 + Math.random()) * 0x10000).toString(16).substring(1); }
toString() { return this.ID; }
private listeners: collections.Dictionary<String, eventObject>;
private registerEventListener(type, target, listener) {
if (this.listeners == null) this.listeners = new collections.Dictionary<String, eventObject>();
var obj = this.listeners.getValue(target.ID + type);
if (obj == null) {
obj = new eventObject(type, target, listener);
this.listeners.setValue(obj.toString(), obj);
} else obj.listener = listener;
}
private removeEventListener(type, target) {
if (this.listeners == null) this.listeners = new collections.Dictionary<String, eventObject>();
else this.listeners.remove(target.ID + type);
}
private triggerEvent(type, target?, ...a: any[]): void {
if (this.listeners == null) this.listeners = new collections.Dictionary<String, eventObject>();
this.listeners.forEach((key: string, value: eventObject) => {
var context = {};
if ((value.type == type) && (target == null || target == value.target)) {
value.listener.apply(context, a || []);
}
});
}
}
class eventObject {
public type: string;
public listener: () => void;
public target: TObject;
constructor(type: string, target: TObject, listener: () => void ) {
this.type = type;
this.listener = listener;
this.target = target;
}
public toString() {
return this.target.ID + this.type;
}
}
export class TCollectionItem extends TObject {
public __ownerCollection: TCollection<TCollectionItem>;
public get OwnerCollection(): TCollection<TCollectionItem> {
return this.__ownerCollection;
}
public set OwnerCollection(val: TCollection<TCollectionItem>) {
this.__ownerCollection = val;
}
constructor() {
super();
}
}
export class TTimer {
private action: () => void;
private active: boolean = false;
private timeoutObject: number;
constructor(callback: () => void ) {
this.action = callback;
}
once(milliSeconds: number): void {
var self = this;
if (isNaN(milliSeconds)) { milliSeconds = 0; }
this.timeoutObject = window.setTimeout(function () { self.action(); }, milliSeconds);
}
play(milliSeconds: number): void {
if (isNaN(milliSeconds)) { milliSeconds = 0; }
var self = this;
this.timeoutObject = window.setTimeout(function () {
self.action();
self.play(milliSeconds);
}, milliSeconds);
}
reset() {
clearTimeout(this.timeoutObject);
}
}
// Copyright 2013 Basarat Ali Syed. All Rights Reserved.
//
// Licensed under MIT open source license http://opensource.org/licenses/MIT
//
// Orginal javascript code was by Mauricio Santos
/**
* @namespace Top level namespace for collections, a TypeScript data structure library.
*/
export module collections {
/**
* Function signature for comparing
* <0 means a is smaller
* = 0 means they are equal
* >0 means a is larger
*/
export interface ICompareFunction<T>{
(a: T, b: T): number;
}
/**
* Function signature for checking equality
*/
export interface IEqualsFunction<T>{
(a: T, b: T): boolean;
}
/**
* Function signature for Iterations. Return false to break from loop
*/
export interface ILoopFunction<T>{
(a: T): boolean;
}
/**
* Default function to compare element order.
* @function
*/
export function defaultCompare<T>(a: T, b: T): number {
if (a < b) {
return -1;
} else if (a === b) {
return 0;
} else {
return 1;
}
}
/**
* Default function to test equality.
* @function
*/
export function defaultEquals<T>(a: T, b: T): boolean {
return a === b;
}
/**
* Default function to convert an object to a string.
* @function
*/
export function defaultToString(item): string {
if (item === null) {
return 'COLLECTION_NULL';
} else if (collections.isUndefined(item)) {
return 'COLLECTION_UNDEFINED';
} else if (collections.isString(item)) {
return item;
} else {
return item.toString();
}
}
/**
* Joins all the properies of the object using the provided join string
*/
export function toString<T>(item: T, join: string = ","): string {
if (item === null) {
return 'COLLECTION_NULL';
} else if (collections.isUndefined(item)) {
return 'COLLECTION_UNDEFINED';
} else if (collections.isString(item)) {
return item.toString();
} else {
var toret = "{";
var first = true;
for (var prop in item) {
if (item.hasOwnProperty(prop)) {
if (first)
first = false;
else
toret = toret + join;
toret = toret + prop + ":" + item[prop];
}
}
return toret + "}";
}
}
/**
* Checks if the given argument is a function.<|fim▁hole|> return (typeof func) === 'function';
}
/**
* Checks if the given argument is undefined.
* @function
*/
export function isUndefined(obj): boolean {
return (typeof obj) === 'undefined';
}
/**
* Checks if the given argument is a string.
* @function
*/
export function isString(obj): boolean {
return Object.prototype.toString.call(obj) === '[object String]';
}
/**
* Reverses a compare function.
* @function
*/
export function reverseCompareFunction<T>(compareFunction: ICompareFunction<T>): ICompareFunction<T> {
if (!collections.isFunction(compareFunction)) {
return function (a, b) {
if (a < b) {
return 1;
} else if (a === b) {
return 0;
} else {
return -1;
}
};
} else {
return function (d: T, v: T) {
return compareFunction(d, v) * -1;
};
}
}
/**
* Returns an equal function given a compare function.
* @function
*/
export function compareToEquals<T>(compareFunction: ICompareFunction<T>): IEqualsFunction<T> {
return function (a: T, b: T) {
return compareFunction(a, b) === 0;
};
}
/**
* @namespace Contains various functions for manipulating arrays.
*/
export module arrays {
/**
* Returns the position of the first occurrence of the specified item
* within the specified array.
* @param {*} array the array in which to search the element.
* @param {Object} item the element to search.
* @param {function(Object,Object):boolean=} equalsFunction optional function used to
* check equality between 2 elements.
* @return {number} the position of the first occurrence of the specified element
* within the specified array, or -1 if not found.
*/
export function indexOf<T>(array: T[], item: T, equalsFunction?: collections.IEqualsFunction<T>): number {
var equals = equalsFunction || collections.defaultEquals;
var length = array.length;
for (var i = 0; i < length; i++) {
if (equals(array[i], item)) {
return i;
}
}
return -1;
}
/**
* Returns the position of the last occurrence of the specified element
* within the specified array.
* @param {*} array the array in which to search the element.
* @param {Object} item the element to search.
* @param {function(Object,Object):boolean=} equalsFunction optional function used to
* check equality between 2 elements.
* @return {number} the position of the last occurrence of the specified element
* within the specified array or -1 if not found.
*/
export function lastIndexOf<T>(array: T[], item: T, equalsFunction?: collections.IEqualsFunction<T>): number {
var equals = equalsFunction || collections.defaultEquals;
var length = array.length;
for (var i = length - 1; i >= 0; i--) {
if (equals(array[i], item)) {
return i;
}
}
return -1;
}
/**
* Returns true if the specified array contains the specified element.
* @param {*} array the array in which to search the element.
* @param {Object} item the element to search.
* @param {function(Object,Object):boolean=} equalsFunction optional function to
* check equality between 2 elements.
* @return {boolean} true if the specified array contains the specified element.
*/
export function contains<T>(array: T[], item: T, equalsFunction?: collections.IEqualsFunction<T>): boolean {
return arrays.indexOf(array, item, equalsFunction) >= 0;
}
/**
* Removes the first ocurrence of the specified element from the specified array.
* @param {*} array the array in which to search element.
* @param {Object} item the element to search.
* @param {function(Object,Object):boolean=} equalsFunction optional function to
* check equality between 2 elements.
* @return {boolean} true if the array changed after this call.
*/
export function remove<T>(array: T[], item: T, equalsFunction?: collections.IEqualsFunction<T>): boolean {
var index = arrays.indexOf(array, item, equalsFunction);
if (index < 0) {
return false;
}
array.splice(index, 1);
return true;
}
/**
* Returns the number of elements in the specified array equal
* to the specified object.
* @param {Array} array the array in which to determine the frequency of the element.
* @param {Object} item the element whose frequency is to be determined.
* @param {function(Object,Object):boolean=} equalsFunction optional function used to
* check equality between 2 elements.
* @return {number} the number of elements in the specified array
* equal to the specified object.
*/
export function frequency<T>(array: T[], item: T, equalsFunction?: collections.IEqualsFunction<T>): number {
var equals = equalsFunction || collections.defaultEquals;
var length = array.length;
var freq = 0;
for (var i = 0; i < length; i++) {
if (equals(array[i], item)) {
freq++;
}
}
return freq;
}
/**
* Returns true if the two specified arrays are equal to one another.
* Two arrays are considered equal if both arrays contain the same number
* of elements, and all corresponding pairs of elements in the two
* arrays are equal and are in the same order.
* @param {Array} array1 one array to be tested for equality.
* @param {Array} array2 the other array to be tested for equality.
* @param {function(Object,Object):boolean=} equalsFunction optional function used to
* check equality between elemements in the arrays.
* @return {boolean} true if the two arrays are equal
*/
export function equals<T>(array1: T[], array2: T[], equalsFunction?: collections.IEqualsFunction<T>): boolean {
var equals = equalsFunction || collections.defaultEquals;
if (array1.length !== array2.length) {
return false;
}
var length = array1.length;
for (var i = 0; i < length; i++) {
if (!equals(array1[i], array2[i])) {
return false;
}
}
return true;
}
/**
* Returns shallow a copy of the specified array.
* @param {*} array the array to copy.
* @return {Array} a copy of the specified array
*/
export function copy<T>(array: T[]): T[] {
return array.concat();
}
/**
* Swaps the elements at the specified positions in the specified array.
* @param {Array} array The array in which to swap elements.
* @param {number} i the index of one element to be swapped.
* @param {number} j the index of the other element to be swapped.
* @return {boolean} true if the array is defined and the indexes are valid.
*/
export function swap<T>(array: T[], i: number, j: number): boolean {
if (i < 0 || i >= array.length || j < 0 || j >= array.length) {
return false;
}
var temp = array[i];
array[i] = array[j];
array[j] = temp;
return true;
}
export function toString<T>(array: T[]): string {
return '[' + array.toString() + ']';
}
/**
* Executes the provided function once for each element present in this array
* starting from index 0 to length - 1.
* @param {Array} array The array in which to iterate.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
export function forEach<T>(array: T[], callback: (item: T) => boolean): void {
var lenght = array.length;
for (var i = 0; i < lenght; i++) {
if (callback(array[i]) === false) {
return;
}
}
}
}
// A linked list node
export interface ILinkedListNode<T>{
element: T;
next: ILinkedListNode<T>;
}
export class LinkedList<T> {
/**
* First node in the list
* @type {Object}
* @private
*/
public firstNode: ILinkedListNode<T> = null;
/**
* Last node in the list
* @type {Object}
* @private
*/
private lastNode: ILinkedListNode<T> = null;
/**
* Number of elements in the list
* @type {number}
* @private
*/
private nElements = 0;
/**
* Creates an empty Linked List.
* @class A linked list is a data structure consisting of a group of nodes
* which together represent a sequence.
* @constructor
*/
constructor() {
}
/**
* Adds an element to this list.
* @param {Object} item element to be added.
* @param {number=} index optional index to add the element. If no index is specified
* the element is added to the end of this list.
* @return {boolean} true if the element was added or false if the index is invalid
* or if the element is undefined.
*/
add(item: T, index?: number): boolean {
if (collections.isUndefined(index)) {
index = this.nElements;
}
if (index < 0 || index > this.nElements || collections.isUndefined(item)) {
return false;
}
var newNode = this.createNode(item);
if (this.nElements === 0) {
// First node in the list.
this.firstNode = newNode;
this.lastNode = newNode;
} else if (index === this.nElements) {
// Insert at the end.
this.lastNode.next = newNode;
this.lastNode = newNode;
} else if (index === 0) {
// Change first node.
newNode.next = this.firstNode;
this.firstNode = newNode;
} else {
var prev = this.nodeAtIndex(index - 1);
newNode.next = prev.next;
prev.next = newNode;
}
this.nElements++;
return true;
}
/**
* Returns the first element in this list.
* @return {*} the first element of the list or undefined if the list is
* empty.
*/
first(): T {
if (this.firstNode !== null) {
return this.firstNode.element;
}
return undefined;
}
/**
* Returns the last element in this list.
* @return {*} the last element in the list or undefined if the list is
* empty.
*/
last(): T {
if (this.lastNode !== null) {
return this.lastNode.element;
}
return undefined;
}
/**
* Returns the element at the specified position in this list.
* @param {number} index desired index.
* @return {*} the element at the given index or undefined if the index is
* out of bounds.
*/
elementAtIndex(index: number): T {
var node = this.nodeAtIndex(index);
if (node === null) {
return undefined;
}
return node.element;
}
/**
* Returns the index in this list of the first occurrence of the
* specified element, or -1 if the List does not contain this element.
* <p>If the elements inside this list are
* not comparable with the === operator a custom equals function should be
* provided to perform searches, the function must receive two arguments and
* return true if they are equal, false otherwise. Example:</p>
*
* <pre>
* var petsAreEqualByName = function(pet1, pet2) {
* return pet1.name === pet2.name;
* }
* </pre>
* @param {Object} item element to search for.
* @param {function(Object,Object):boolean=} equalsFunction Optional
* function used to check if two elements are equal.
* @return {number} the index in this list of the first occurrence
* of the specified element, or -1 if this list does not contain the
* element.
*/
indexOf(item: T, equalsFunction?: IEqualsFunction<T>): number {
var equalsF = equalsFunction || collections.defaultEquals;
if (collections.isUndefined(item)) {
return -1;
}
var currentNode = this.firstNode;
var index = 0;
while (currentNode !== null) {
if (equalsF(currentNode.element, item)) {
return index;
}
index++;
currentNode = currentNode.next;
}
return -1;
}
/**
* Returns true if this list contains the specified element.
* <p>If the elements inside the list are
* not comparable with the === operator a custom equals function should be
* provided to perform searches, the function must receive two arguments and
* return true if they are equal, false otherwise. Example:</p>
*
* <pre>
* var petsAreEqualByName = function(pet1, pet2) {
* return pet1.name === pet2.name;
* }
* </pre>
* @param {Object} item element to search for.
* @param {function(Object,Object):boolean=} equalsFunction Optional
* function used to check if two elements are equal.
* @return {boolean} true if this list contains the specified element, false
* otherwise.
*/
contains(item: T, equalsFunction?: IEqualsFunction<T>): boolean {
return (this.indexOf(item, equalsFunction) >= 0);
}
/**
* Removes the first occurrence of the specified element in this list.
* <p>If the elements inside the list are
* not comparable with the === operator a custom equals function should be
* provided to perform searches, the function must receive two arguments and
* return true if they are equal, false otherwise. Example:</p>
*
* <pre>
* var petsAreEqualByName = function(pet1, pet2) {
* return pet1.name === pet2.name;
* }
* </pre>
* @param {Object} item element to be removed from this list, if present.
* @return {boolean} true if the list contained the specified element.
*/
remove(item: T, equalsFunction?: IEqualsFunction<T>): boolean {
var equalsF = equalsFunction || collections.defaultEquals;
if (this.nElements < 1 || collections.isUndefined(item)) {
return false;
}
var previous = null;
var currentNode = this.firstNode;
while (currentNode !== null) {
if (equalsF(currentNode.element, item)) {
if (currentNode === this.firstNode) {
this.firstNode = this.firstNode.next;
if (currentNode === this.lastNode) {
this.lastNode = null;
}
} else if (currentNode === this.lastNode) {
this.lastNode = previous;
previous.next = currentNode.next;
currentNode.next = null;
} else {
previous.next = currentNode.next;
currentNode.next = null;
}
this.nElements--;
return true;
}
previous = currentNode;
currentNode = currentNode.next;
}
return false;
}
/**
* Removes all of the elements from this list.
*/
clear(): void {
this.firstNode = null;
this.lastNode = null;
this.nElements = 0;
}
/**
* Returns true if this list is equal to the given list.
* Two lists are equal if they have the same elements in the same order.
* @param {LinkedList} other the other list.
* @param {function(Object,Object):boolean=} equalsFunction optional
* function used to check if two elements are equal. If the elements in the lists
* are custom objects you should provide a function, otherwise
* the === operator is used to check equality between elements.
* @return {boolean} true if this list is equal to the given list.
*/
equals(other: LinkedList<T>, equalsFunction?: IEqualsFunction<T>): boolean {
var eqF = equalsFunction || collections.defaultEquals;
if (!(other instanceof collections.LinkedList)) {
return false;
}
if (this.length() !== other.length()) {
return false;
}
return this.equalsAux(this.firstNode, other.firstNode, eqF);
}
/**
* @private
*/
private equalsAux(n1: ILinkedListNode<T>, n2: ILinkedListNode<T>, eqF: IEqualsFunction<T>): boolean {
while (n1 !== null) {
if (!eqF(n1.element, n2.element)) {
return false;
}
n1 = n1.next;
n2 = n2.next;
}
return true;
}
/**
* Removes the element at the specified position in this list.
* @param {number} index given index.
* @return {*} removed element or undefined if the index is out of bounds.
*/
removeElementAtIndex(index: number): T {
if (index < 0 || index >= this.nElements) {
return undefined;
}
var element;
if (this.nElements === 1) {
//First node in the list.
element = this.firstNode.element;
this.firstNode = null;
this.lastNode = null;
} else {
var previous = this.nodeAtIndex(index - 1);
if (previous === null) {
element = this.firstNode.element;
this.firstNode = this.firstNode.next;
} else if (previous.next === this.lastNode) {
element = this.lastNode.element;
this.lastNode = previous;
}
if (previous !== null) {
element = previous.next.element;
previous.next = previous.next.next;
}
}
this.nElements--;
return element;
}
/**
* Executes the provided function once for each element present in this list in order.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
forEach(callback: (item: T) => boolean): void {
var currentNode = this.firstNode;
while (currentNode !== null) {
if (callback(currentNode.element) === false) {
break;
}
currentNode = currentNode.next;
}
}
/**
* Reverses the order of the elements in this linked list (makes the last
* element first, and the first element last).
*/
reverse(): void {
var previous = null;
var current = this.firstNode;
var temp = null;
while (current !== null) {
temp = current.next;
current.next = previous;
previous = current;
current = temp;
}
temp = this.firstNode;
this.firstNode = this.lastNode;
this.lastNode = temp;
}
/**
* Returns an array containing all of the elements in this list in proper
* sequence.
* @return {Array.<*>} an array containing all of the elements in this list,
* in proper sequence.
*/
toArray(): T[] {
var array: T[] = [];
var currentNode: ILinkedListNode<T> = this.firstNode;
while (currentNode !== null) {
array.push(currentNode.element);
currentNode = currentNode.next;
}
return array;
}
/**
* Returns the number of elements in this list.
* @return {number} the number of elements in this list.
*/
length(): number {
return this.nElements;
}
/**
* Returns true if this list contains no elements.
* @return {boolean} true if this list contains no elements.
*/
isEmpty(): boolean {
return this.nElements <= 0;
}
toString(): string {
return collections.arrays.toString(this.toArray());
}
/**
* @private
*/
private nodeAtIndex(index): ILinkedListNode<T> {
if (index < 0 || index >= this.nElements) {
return null;
}
if (index === (this.nElements - 1)) {
return this.lastNode;
}
var node = this.firstNode;
for (var i = 0; i < index; i++) {
node = node.next;
}
return node;
}
/**
* @private
*/
private createNode(item: T): ILinkedListNode<T> {
return {
element: item,
next: null
};
}
} // End of linked list
// Used internally by dictionary
interface IDicitonaryPair<K, V>{
key: K;
value: V;
}
export class Dictionary<K, V>{
/**
* Object holding the key-value pairs.
* @type {Object}
* @private
*/
private table: { [key: string]: IDicitonaryPair<K, V> };
//: [key: K] will not work since indices can only by strings in javascript and typescript enforces this.
/**
* Number of elements in the list.
* @type {number}
* @private
*/
private nElements: number;
/**
* Function used to convert keys to strings.
* @type {function(Object):string}
* @private
*/
private toStr: (key: K) => string;
/**
* Creates an empty dictionary.
* @class <p>Dictionaries map keys to values; each key can map to at most one value.
* This implementation accepts any kind of objects as keys.</p>
*
* <p>If the keys are custom objects a function which converts keys to unique
* strings must be provided. Example:</p>
* <pre>
* function petToString(pet) {
* return pet.name;
* }
* </pre>
* @constructor
* @param {function(Object):string=} toStrFunction optional function used
* to convert keys to strings. If the keys aren't strings or if toString()
* is not appropriate, a custom function which receives a key and returns a
* unique string must be provided.
*/
constructor(toStrFunction?: (key: K) => string) {
this.table = {};
this.nElements = 0;
this.toStr = toStrFunction || collections.defaultToString;
}
/**
* Returns the value to which this dictionary maps the specified key.
* Returns undefined if this dictionary contains no mapping for this key.
* @param {Object} key key whose associated value is to be returned.
* @return {*} the value to which this dictionary maps the specified key or
* undefined if the map contains no mapping for this key.
*/
getValue(key: K): V {
var pair: IDicitonaryPair<K, V> = this.table[this.toStr(key)];
if (collections.isUndefined(pair)) {
return undefined;
}
return pair.value;
}
/**
* Associates the specified value with the specified key in this dictionary.
* If the dictionary previously contained a mapping for this key, the old
* value is replaced by the specified value.
* @param {Object} key key with which the specified value is to be
* associated.
* @param {Object} value value to be associated with the specified key.
* @return {*} previous value associated with the specified key, or undefined if
* there was no mapping for the key or if the key/value are undefined.
*/
setValue(key: K, value: V): V {
if (collections.isUndefined(key) || collections.isUndefined(value)) {
return undefined;
}
var ret;
var k = this.toStr(key);
var previousElement: IDicitonaryPair<K, V> = this.table[k];
if (collections.isUndefined(previousElement)) {
this.nElements++;
ret = undefined;
} else {
ret = previousElement.value;
}
this.table[k] = {
key: key,
value: value
};
return ret;
}
/**
* Removes the mapping for this key from this dictionary if it is present.
* @param {Object} key key whose mapping is to be removed from the
* dictionary.
* @return {*} previous value associated with specified key, or undefined if
* there was no mapping for key.
*/
remove(key: K): V {
var k = this.toStr(key);
var previousElement: IDicitonaryPair<K, V> = this.table[k];
if (!collections.isUndefined(previousElement)) {
delete this.table[k];
this.nElements--;
return previousElement.value;
}
return undefined;
}
/**
* Returns an array containing all of the keys in this dictionary.
* @return {Array} an array containing all of the keys in this dictionary.
*/
keys(): K[] {
var array: K[] = [];
for (var name in this.table) {
if (this.table.hasOwnProperty(name)) {
var pair: IDicitonaryPair<K, V> = this.table[name];
array.push(pair.key);
}
}
return array;
}
/**
* Returns an array containing all of the values in this dictionary.
* @return {Array} an array containing all of the values in this dictionary.
*/
values(): V[] {
var array: V[] = [];
for (var name in this.table) {
if (this.table.hasOwnProperty(name)) {
var pair: IDicitonaryPair<K, V> = this.table[name];
array.push(pair.value);
}
}
return array;
}
/**
* Executes the provided function once for each key-value pair
* present in this dictionary.
* @param {function(Object,Object):*} callback function to execute, it is
* invoked with two arguments: key and value. To break the iteration you can
* optionally return false.
*/
forEach(callback: (key: K, value: V) => any): void {
for (var name in this.table) {
if (this.table.hasOwnProperty(name)) {
var pair: IDicitonaryPair<K, V> = this.table[name];
var ret = callback(pair.key, pair.value);
if (ret === false) {
return;
}
}
}
}
/**
* Returns true if this dictionary contains a mapping for the specified key.
* @param {Object} key key whose presence in this dictionary is to be
* tested.
* @return {boolean} true if this dictionary contains a mapping for the
* specified key.
*/
containsKey(key: K): boolean {
return !collections.isUndefined(this.getValue(key));
}
/**
* Removes all mappings from this dictionary.
* @this {collections.Dictionary}
*/
clear() {
this.table = {};
this.nElements = 0;
}
/**
* Returns the number of keys in this dictionary.
* @return {number} the number of key-value mappings in this dictionary.
*/
length(): number {
return this.nElements;
}
/**
* Returns true if this dictionary contains no mappings.
* @return {boolean} true if this dictionary contains no mappings.
*/
isEmpty(): boolean {
return this.nElements <= 0;
}
toString(): string {
var toret = "{";
this.forEach((k, v) => {
toret = toret + "\n\t" + k.toString() + " : " + v.toString();
});
return toret + "\n}";
}
} // End of dictionary
// /**
// * Returns true if this dictionary is equal to the given dictionary.
// * Two dictionaries are equal if they contain the same mappings.
// * @param {collections.Dictionary} other the other dictionary.
// * @param {function(Object,Object):boolean=} valuesEqualFunction optional
// * function used to check if two values are equal.
// * @return {boolean} true if this dictionary is equal to the given dictionary.
// */
// collections.Dictionary.prototype.equals = function(other,valuesEqualFunction) {
// var eqF = valuesEqualFunction || collections.defaultEquals;
// if(!(other instanceof collections.Dictionary)){
// return false;
// }
// if(this.length() !== other.length()){
// return false;
// }
// return this.equalsAux(this.firstNode,other.firstNode,eqF);
// }
export class MultiDictionary<K, V> {
// Cannot do:
// class MultiDictionary<K,V> extends Dictionary<K,Array<V>> {
// Since we want to reuse the function name setValue and types in signature become incompatible
// Therefore we are using composition instead of inheritance
private dict: Dictionary<K, Array<V>>;
private equalsF: IEqualsFunction<V>;
private allowDuplicate: boolean;
/**
* Creates an empty multi dictionary.
* @class <p>A multi dictionary is a special kind of dictionary that holds
* multiple values against each key. Setting a value into the dictionary will
* add the value to an array at that key. Getting a key will return an array,
* holding all the values set to that key.
* You can configure to allow duplicates in the values.
* This implementation accepts any kind of objects as keys.</p>
*
* <p>If the keys are custom objects a function which converts keys to strings must be
* provided. Example:</p>
*
* <pre>
* function petToString(pet) {
* return pet.name;
* }
* </pre>
* <p>If the values are custom objects a function to check equality between values
* must be provided. Example:</p>
*
* <pre>
* function petsAreEqualByAge(pet1,pet2) {
* return pet1.age===pet2.age;
* }
* </pre>
* @constructor
* @param {function(Object):string=} toStrFunction optional function
* to convert keys to strings. If the keys aren't strings or if toString()
* is not appropriate, a custom function which receives a key and returns a
* unique string must be provided.
* @param {function(Object,Object):boolean=} valuesEqualsFunction optional
* function to check if two values are equal.
*
*/
constructor(toStrFunction?: (key: K) => string, valuesEqualsFunction?: IEqualsFunction<V>, allowDuplicateValues = false) {
this.dict = new Dictionary<K, Array<V>>(toStrFunction);
this.equalsF = valuesEqualsFunction || collections.defaultEquals;
this.allowDuplicate = allowDuplicateValues;
}
/**
* Returns an array holding the values to which this dictionary maps
* the specified key.
* Returns an empty array if this dictionary contains no mappings for this key.
* @param {Object} key key whose associated values are to be returned.
* @return {Array} an array holding the values to which this dictionary maps
* the specified key.
*/
getValue(key: K): V[] {
var values = this.dict.getValue(key);
if (collections.isUndefined(values)) {
return [];
}
return collections.arrays.copy(values);
}
/**
* Adds the value to the array associated with the specified key, if
* it is not already present.
* @param {Object} key key with which the specified value is to be
* associated.
* @param {Object} value the value to add to the array at the key
* @return {boolean} true if the value was not already associated with that key.
*/
setValue(key: K, value: V): boolean {
if (collections.isUndefined(key) || collections.isUndefined(value)) {
return false;
}
if (!this.containsKey(key)) {
this.dict.setValue(key, [value]);
return true;
}
var array = this.dict.getValue(key);
if (!this.allowDuplicate) {
if (collections.arrays.contains(array, value, this.equalsF)) {
return false;
}
}
array.push(value);
return true;
}
/**
* Removes the specified values from the array of values associated with the
* specified key. If a value isn't given, all values associated with the specified
* key are removed.
* @param {Object} key key whose mapping is to be removed from the
* dictionary.
* @param {Object=} value optional argument to specify the value to remove
* from the array associated with the specified key.
* @return {*} true if the dictionary changed, false if the key doesn't exist or
* if the specified value isn't associated with the specified key.
*/
remove(key: K, value?: V): boolean {
if (collections.isUndefined(value)) {
var v = this.dict.remove(key);
if (collections.isUndefined(v)) {
return false;
}
return true;
}
var array = this.dict.getValue(key);
if (collections.arrays.remove(array, value, this.equalsF)) {
if (array.length === 0) {
this.dict.remove(key);
}
return true;
}
return false;
}
/**
* Returns an array containing all of the keys in this dictionary.
* @return {Array} an array containing all of the keys in this dictionary.
*/
keys(): K[] {
return this.dict.keys();
}
/**
* Returns an array containing all of the values in this dictionary.
* @return {Array} an array containing all of the values in this dictionary.
*/
values(): V[] {
var values = this.dict.values();
var array = [];
for (var i = 0; i < values.length; i++) {
var v = values[i];
for (var j = 0; j < v.length; j++) {
array.push(v[j]);
}
}
return array;
}
/**
* Returns true if this dictionary at least one value associatted the specified key.
* @param {Object} key key whose presence in this dictionary is to be
* tested.
* @return {boolean} true if this dictionary at least one value associatted
* the specified key.
*/
containsKey(key: K): boolean {
return this.dict.containsKey(key);
}
/**
* Removes all mappings from this dictionary.
*/
clear(): void {
return this.dict.clear();
}
/**
* Returns the number of keys in this dictionary.
* @return {number} the number of key-value mappings in this dictionary.
*/
length(): number {
return this.dict.length();
}
/**
* Returns true if this dictionary contains no mappings.
* @return {boolean} true if this dictionary contains no mappings.
*/
isEmpty(): boolean {
return this.dict.isEmpty();
}
}// end of multi dictionary
export class Heap<T> {
/**
* Array used to store the elements od the heap.
* @type {Array.<Object>}
* @private
*/
private data: T[] = [];
/**
* Function used to compare elements.
* @type {function(Object,Object):number}
* @private
*/
private compare: ICompareFunction<T>;
/**
* Creates an empty Heap.
* @class
* <p>A heap is a binary tree, where the nodes maintain the heap property:
* each node is smaller than each of its children and therefore a MinHeap
* This implementation uses an array to store elements.</p>
* <p>If the inserted elements are custom objects a compare function must be provided,
* at construction time, otherwise the <=, === and >= operators are
* used to compare elements. Example:</p>
*
* <pre>
* function compare(a, b) {
* if (a is less than b by some ordering criterion) {
* return -1;
* } if (a is greater than b by the ordering criterion) {
* return 1;
* }
* // a must be equal to b
* return 0;
* }
* </pre>
*
* <p>If a Max-Heap is wanted (greater elements on top) you can a provide a
* reverse compare function to accomplish that behavior. Example:</p>
*
* <pre>
* function reverseCompare(a, b) {
* if (a is less than b by some ordering criterion) {
* return 1;
* } if (a is greater than b by the ordering criterion) {
* return -1;
* }
* // a must be equal to b
* return 0;
* }
* </pre>
*
* @constructor
* @param {function(Object,Object):number=} compareFunction optional
* function used to compare two elements. Must return a negative integer,
* zero, or a positive integer as the first argument is less than, equal to,
* or greater than the second.
*/
constructor(compareFunction?: ICompareFunction<T>) {
this.compare = compareFunction || collections.defaultCompare;
}
/**
* Returns the index of the left child of the node at the given index.
* @param {number} nodeIndex The index of the node to get the left child
* for.
* @return {number} The index of the left child.
* @private
*/
private leftChildIndex(nodeIndex: number): number {
return (2 * nodeIndex) + 1;
}
/**
* Returns the index of the right child of the node at the given index.
* @param {number} nodeIndex The index of the node to get the right child
* for.
* @return {number} The index of the right child.
* @private
*/
private rightChildIndex(nodeIndex: number): number {
return (2 * nodeIndex) + 2;
}
/**
* Returns the index of the parent of the node at the given index.
* @param {number} nodeIndex The index of the node to get the parent for.
* @return {number} The index of the parent.
* @private
*/
private parentIndex(nodeIndex: number): number {
return Math.floor((nodeIndex - 1) / 2);
}
/**
* Returns the index of the smaller child node (if it exists).
* @param {number} leftChild left child index.
* @param {number} rightChild right child index.
* @return {number} the index with the minimum value or -1 if it doesn't
* exists.
* @private
*/
private minIndex(leftChild: number, rightChild: number): number {
if (rightChild >= this.data.length) {
if (leftChild >= this.data.length) {
return -1;
} else {
return leftChild;
}
} else {
if (this.compare(this.data[leftChild], this.data[rightChild]) <= 0) {
return leftChild;
} else {
return rightChild;
}
}
}
/**
* Moves the node at the given index up to its proper place in the heap.
* @param {number} index The index of the node to move up.
* @private
*/
private siftUp(index: number): void {
var parent = this.parentIndex(index);
while (index > 0 && this.compare(this.data[parent], this.data[index]) > 0) {
collections.arrays.swap(this.data, parent, index);
index = parent;
parent = this.parentIndex(index);
}
}
/**
* Moves the node at the given index down to its proper place in the heap.
* @param {number} nodeIndex The index of the node to move down.
* @private
*/
private siftDown(nodeIndex: number): void {
//smaller child index
var min = this.minIndex(this.leftChildIndex(nodeIndex),
this.rightChildIndex(nodeIndex));
while (min >= 0 && this.compare(this.data[nodeIndex],
this.data[min]) > 0) {
collections.arrays.swap(this.data, min, nodeIndex);
nodeIndex = min;
min = this.minIndex(this.leftChildIndex(nodeIndex),
this.rightChildIndex(nodeIndex));
}
}
/**
* Retrieves but does not remove the root element of this heap.
* @return {*} The value at the root of the heap. Returns undefined if the
* heap is empty.
*/
peek(): T {
if (this.data.length > 0) {
return this.data[0];
} else {
return undefined;
}
}
/**
* Adds the given element into the heap.
* @param {*} element the element.
* @return true if the element was added or fals if it is undefined.
*/
add(element: T): boolean {
if (collections.isUndefined(element)) {
return undefined;
}
this.data.push(element);
this.siftUp(this.data.length - 1);
return true;
}
/**
* Retrieves and removes the root element of this heap.
* @return {*} The value removed from the root of the heap. Returns
* undefined if the heap is empty.
*/
removeRoot(): T {
if (this.data.length > 0) {
var obj = this.data[0];
this.data[0] = this.data[this.data.length - 1];
this.data.splice(this.data.length - 1, 1);
if (this.data.length > 0) {
this.siftDown(0);
}
return obj;
}
return undefined;
}
/**
* Returns true if this heap contains the specified element.
* @param {Object} element element to search for.
* @return {boolean} true if this Heap contains the specified element, false
* otherwise.
*/
contains(element: T): boolean {
var equF = collections.compareToEquals(this.compare);
return collections.arrays.contains(this.data, element, equF);
}
/**
* Returns the number of elements in this heap.
* @return {number} the number of elements in this heap.
*/
length(): number {
return this.data.length;
}
/**
* Checks if this heap is empty.
* @return {boolean} true if and only if this heap contains no items; false
* otherwise.
*/
isEmpty(): boolean {
return this.data.length <= 0;
}
/**
* Removes all of the elements from this heap.
*/
clear(): void {
this.data.length = 0;
}
/**
* Executes the provided function once for each element present in this heap in
* no particular order.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
forEach(callback: (item: T) => boolean) {
collections.arrays.forEach(this.data, callback);
}
}
export class Stack<T> {
/**
* List containing the elements.
* @type collections.LinkedList
* @private
*/
private list: LinkedList<T>;
/**
* Creates an empty Stack.
* @class A Stack is a Last-In-First-Out (LIFO) data structure, the last
* element added to the stack will be the first one to be removed. This
* implementation uses a linked list as a container.
* @constructor
*/
constructor() {
this.list = new LinkedList<T>();
}
/**
* Pushes an item onto the top of this stack.
* @param {Object} elem the element to be pushed onto this stack.
* @return {boolean} true if the element was pushed or false if it is undefined.
*/
push(elem: T) {
return this.list.add(elem, 0);
}
/**
* Pushes an item onto the top of this stack.
* @param {Object} elem the element to be pushed onto this stack.
* @return {boolean} true if the element was pushed or false if it is undefined.
*/
add(elem: T) {
return this.list.add(elem, 0);
}
/**
* Removes the object at the top of this stack and returns that object.
* @return {*} the object at the top of this stack or undefined if the
* stack is empty.
*/
pop(): T {
return this.list.removeElementAtIndex(0);
}
/**
* Looks at the object at the top of this stack without removing it from the
* stack.
* @return {*} the object at the top of this stack or undefined if the
* stack is empty.
*/
peek(): T {
return this.list.first();
}
/**
* Returns the number of elements in this stack.
* @return {number} the number of elements in this stack.
*/
length(): number {
return this.list.length();
}
/**
* Returns true if this stack contains the specified element.
* <p>If the elements inside this stack are
* not comparable with the === operator, a custom equals function should be
* provided to perform searches, the function must receive two arguments and
* return true if they are equal, false otherwise. Example:</p>
*
* <pre>
* var petsAreEqualByName (pet1, pet2) {
* return pet1.name === pet2.name;
* }
* </pre>
* @param {Object} elem element to search for.
* @param {function(Object,Object):boolean=} equalsFunction optional
* function to check if two elements are equal.
* @return {boolean} true if this stack contains the specified element,
* false otherwise.
*/
contains(elem: T, equalsFunction?: IEqualsFunction<T>) {
return this.list.contains(elem, equalsFunction);
}
/**
* Checks if this stack is empty.
* @return {boolean} true if and only if this stack contains no items; false
* otherwise.
*/
isEmpty(): boolean {
return this.list.isEmpty();
}
/**
* Removes all of the elements from this stack.
*/
clear(): void {
this.list.clear();
}
/**
* Executes the provided function once for each element present in this stack in
* LIFO order.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
forEach(callback: ILoopFunction<T>) {
this.list.forEach(callback);
}
} // End of stack
export class Queue<T>{
/**
* List containing the elements.
* @type collections.LinkedList
* @private
*/
private list: LinkedList<T>;
/**
* Creates an empty queue.
* @class A queue is a First-In-First-Out (FIFO) data structure, the first
* element added to the queue will be the first one to be removed. This
* implementation uses a linked list as a container.
* @constructor
*/
constructor() {
this.list = new LinkedList<T>();
}
/**
* Inserts the specified element into the end of this queue.
* @param {Object} elem the element to insert.
* @return {boolean} true if the element was inserted, or false if it is undefined.
*/
enqueue(elem: T): boolean {
return this.list.add(elem);
}
/**
* Inserts the specified element into the end of this queue.
* @param {Object} elem the element to insert.
* @return {boolean} true if the element was inserted, or false if it is undefined.
*/
add(elem: T): boolean {
return this.list.add(elem);
}
/**
* Retrieves and removes the head of this queue.
* @return {*} the head of this queue, or undefined if this queue is empty.
*/
dequeue(): T {
if (this.list.length() !== 0) {
var el = this.list.first();
this.list.removeElementAtIndex(0);
return el;
}
return undefined;
}
/**
* Retrieves, but does not remove, the head of this queue.
* @return {*} the head of this queue, or undefined if this queue is empty.
*/
peek(): T {
if (this.list.length() !== 0) {
return this.list.first();
}
return undefined;
}
/**
* Returns the number of elements in this queue.
* @return {number} the number of elements in this queue.
*/
length(): number {
return this.list.length();
}
/**
* Returns true if this queue contains the specified element.
* <p>If the elements inside this stack are
* not comparable with the === operator, a custom equals function should be
* provided to perform searches, the function must receive two arguments and
* return true if they are equal, false otherwise. Example:</p>
*
* <pre>
* var petsAreEqualByName (pet1, pet2) {
* return pet1.name === pet2.name;
* }
* </pre>
* @param {Object} elem element to search for.
* @param {function(Object,Object):boolean=} equalsFunction optional
* function to check if two elements are equal.
* @return {boolean} true if this queue contains the specified element,
* false otherwise.
*/
contains(elem: T, equalsFunction?: IEqualsFunction<T>): boolean {
return this.list.contains(elem, equalsFunction);
}
/**
* Checks if this queue is empty.
* @return {boolean} true if and only if this queue contains no items; false
* otherwise.
*/
isEmpty(): boolean {
return this.list.length() <= 0;
}
/**
* Removes all of the elements from this queue.
*/
clear(): void {
this.list.clear();
}
/**
* Executes the provided function once for each element present in this queue in
* FIFO order.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
forEach(callback: ILoopFunction<T>) {
this.list.forEach(callback);
}
} // End of queue
export class PriorityQueue<T> {
private heap: Heap<T>;
/**
* Creates an empty priority queue.
* @class <p>In a priority queue each element is associated with a "priority",
* elements are dequeued in highest-priority-first order (the elements with the
* highest priority are dequeued first). Priority Queues are implemented as heaps.
* If the inserted elements are custom objects a compare function must be provided,
* otherwise the <=, === and >= operators are used to compare object priority.</p>
* <pre>
* function compare(a, b) {
* if (a is less than b by some ordering criterion) {
* return -1;
* } if (a is greater than b by the ordering criterion) {
* return 1;
* }
* // a must be equal to b
* return 0;
* }
* </pre>
* @constructor
* @param {function(Object,Object):number=} compareFunction optional
* function used to compare two element priorities. Must return a negative integer,
* zero, or a positive integer as the first argument is less than, equal to,
* or greater than the second.
*/
constructor(compareFunction?: ICompareFunction<T>) {
this.heap = new Heap<T>(collections.reverseCompareFunction(compareFunction));
}
/**
* Inserts the specified element into this priority queue.
* @param {Object} element the element to insert.
* @return {boolean} true if the element was inserted, or false if it is undefined.
*/
enqueue(element: T): boolean {
return this.heap.add(element);
}
/**
* Inserts the specified element into this priority queue.
* @param {Object} element the element to insert.
* @return {boolean} true if the element was inserted, or false if it is undefined.
*/
add(element: T): boolean {
return this.heap.add(element);
}
/**
* Retrieves and removes the highest priority element of this queue.
* @return {*} the the highest priority element of this queue,
* or undefined if this queue is empty.
*/
dequeue(): T {
if (this.heap.length() !== 0) {
var el = this.heap.peek();
this.heap.removeRoot();
return el;
}
return undefined;
}
/**
* Retrieves, but does not remove, the highest priority element of this queue.
* @return {*} the highest priority element of this queue, or undefined if this queue is empty.
*/
peek(): T {
return this.heap.peek();
}
/**
* Returns true if this priority queue contains the specified element.
* @param {Object} element element to search for.
* @return {boolean} true if this priority queue contains the specified element,
* false otherwise.
*/
contains(element: T): boolean {
return this.heap.contains(element);
}
/**
* Checks if this priority queue is empty.
* @return {boolean} true if and only if this priority queue contains no items; false
* otherwise.
*/
isEmpty(): boolean {
return this.heap.isEmpty();
}
/**
* Returns the number of elements in this priority queue.
* @return {number} the number of elements in this priority queue.
*/
length(): number {
return this.heap.length();
}
/**
* Removes all of the elements from this priority queue.
*/
clear(): void {
this.heap.clear();
}
/**
* Executes the provided function once for each element present in this queue in
* no particular order.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element value, to break the iteration you can
* optionally return false.
*/
forEach(callback: ILoopFunction<T>) {
this.heap.forEach(callback);
}
} // end of priority queue
export class Set<T>{
private dictionary: Dictionary<T, any>;
public onChanged: () => void;
/**
* Creates an empty set.
* @class <p>A set is a data structure that contains no duplicate items.</p>
* <p>If the inserted elements are custom objects a function
* which converts elements to strings must be provided. Example:</p>
*
* <pre>
* function petToString(pet) {
* return pet.name;
* }
* </pre>
*
* @constructor
* @param {function(Object):string=} toStringFunction optional function used
* to convert elements to strings. If the elements aren't strings or if toString()
* is not appropriate, a custom function which receives a onject and returns a
* unique string must be provided.
*/
constructor(toStringFunction?: (item: T) => string) {
this.dictionary = new Dictionary<T, any>(toStringFunction);
}
/**
* Returns true if this set contains the specified element.
* @param {Object} element element to search for.
* @return {boolean} true if this set contains the specified element,
* false otherwise.
*/
contains(element: T): boolean {
return this.dictionary.containsKey(element);
}
/**
* Adds the specified element to this set if it is not already present.
* @param {Object} element the element to insert.
* @return {boolean} true if this set did not already contain the specified element.
*/
add(element: T): boolean {
if (element instanceof TCollectionItem) (<any>element).__ownerCollection = this;
if (this.contains(element) || collections.isUndefined(element)) {
return false;
} else {
this.dictionary.setValue(element, element);
if (this.onChanged) this.onChanged();
return true;
}
}
/**
* Performs an intersecion between this an another set.
* Removes all values that are not present this set and the given set.
* @param {collections.Set} otherSet other set.
*/
intersection(otherSet: Set<T>): void {
var set = this;
this.forEach(function (element: T): boolean {
if (!otherSet.contains(element)) {
set.remove(element);
}
return;
});
}
/**
* Performs a union between this an another set.
* Adds all values from the given set to this set.
* @param {collections.Set} otherSet other set.
*/
union(otherSet: Set<T>): void {
var set = this;
otherSet.forEach(function (element: T): boolean {
set.add(element);
return;
});
}
/**
* Performs a difference between this an another set.
* Removes from this set all the values that are present in the given set.
* @param {collections.Set} otherSet other set.
*/
difference(otherSet: Set<T>): void {
var set = this;
otherSet.forEach(function (element: T): boolean {
set.remove(element);
return;
});
}
/**
* Checks whether the given set contains all the elements in this set.
* @param {collections.Set} otherSet other set.
* @return {boolean} true if this set is a subset of the given set.
*/
isSubsetOf(otherSet: Set<T>): boolean {
if (this.length() > otherSet.length()) {
return false;
}
var isSub = true;
this.forEach(function (element) {
if (!otherSet.contains(element)) {
isSub = false;
return false;
}
});
return isSub;
}
/**
* Removes the specified element from this set if it is present.
* @return {boolean} true if this set contained the specified element.
*/
remove(element: T): boolean {
if (!this.contains(element)) {
return false;
} else {
this.dictionary.remove(element);
if (this.onChanged) this.onChanged();
return true;
}
}
/**
* Executes the provided function once for each element
* present in this set.
* @param {function(Object):*} callback function to execute, it is
* invoked with one arguments: the element. To break the iteration you can
* optionally return false.
*/
forEach(callback: (item: T) => any): void {
this.dictionary.forEach(function (k, v) {
if (callback(v) == false) return false;
return true;
});
}
private removeOwnerCollections(data) {
for (var key in data) {
var item = data[key];
// see if this item is an array
if (key == "__ownerCollection") {
delete data[key];
} else if (typeof item == "object") {
this.removeOwnerCollections(item);
}
}
}
/**
* Returns element x from collection
*/
get(index : number): T {
var rc = this.dictionary.values()[index];
return rc;
}
/**
* Returns an array containing all of the elements in this set in arbitrary order.
* @return {Array} an array containing all of the elements in this set.
*/
toArray(): T[] {
var rc = this.dictionary.values();
//this.removeOwnerCollections(rc);
return rc;
}
/**
* Returns true if this set contains no elements.
* @return {boolean} true if this set contains no elements.
*/
isEmpty(): boolean {
return this.dictionary.isEmpty();
}
/**
* Returns the number of elements in this set.
* @return {number} the number of elements in this set.
*/
length(): number {
return this.dictionary.length();
}
/**
* Removes all of the elements from this set.
*/
clear(): void {
this.dictionary.clear();
if (this.onChanged) this.onChanged();
}
/*
* Provides a string representation for display
*/
toString(): string {
return collections.arrays.toString(this.toArray());
}
}// end of Set
export class Bag<T>{
private toStrF: (item: T) => string;
private dictionary: Dictionary<T, any>;
private nElements: number;
/**
* Creates an empty bag.
* @class <p>A bag is a special kind of set in which members are
* allowed to appear more than once.</p>
* <p>If the inserted elements are custom objects a function
* which converts elements to unique strings must be provided. Example:</p>
*
* <pre>
* function petToString(pet) {
* return pet.name;
* }
* </pre>
*
* @constructor
* @param {function(Object):string=} toStrFunction optional function used
* to convert elements to strings. If the elements aren't strings or if toString()
* is not appropriate, a custom function which receives an object and returns a
* unique string must be provided.
*/
constructor(toStrFunction?: (item: T) => string) {
this.toStrF = toStrFunction || collections.defaultToString;
this.dictionary = new Dictionary<T, any>(this.toStrF);
this.nElements = 0;
}
/**
* Adds nCopies of the specified object to this bag.
* @param {Object} element element to add.
* @param {number=} nCopies the number of copies to add, if this argument is
* undefined 1 copy is added.
* @return {boolean} true unless element is undefined.
*/
add(element: T, nCopies: number= 1): boolean {
if (collections.isUndefined(element) || nCopies <= 0) {
return false;
}
if (element instanceof TCollectionItem) (<any>element).__ownerCollection = this;
if (!this.contains(element)) {
var node = {
value: element,
copies: nCopies
};
this.dictionary.setValue(element, node);
} else {
this.dictionary.getValue(element).copies += nCopies;
}
this.nElements += nCopies;
return true;
}
/**
* Returns true if this bag contains the specified element.
* @param {Object} element element to search for.
* @return {boolean} true if this bag contains the specified element,
* false otherwise.
*/
contains(element: T): boolean {
return this.dictionary.containsKey(element);
}
/**
* Removes nCopies of the specified object to this bag.
* If the number of copies to remove is greater than the actual number
* of copies in the Bag, all copies are removed.
* @param {Object} element element to remove.
* @param {number=} nCopies the number of copies to remove, if this argument is
* undefined 1 copy is removed.
* @return {boolean} true if at least 1 element was removed.
*/
remove(element: T, nCopies: number = 1) {
if (collections.isUndefined(element) || nCopies <= 0) {
return false;
}
if (!this.contains(element)) {
return false;
} else {
var node = this.dictionary.getValue(element);
if (nCopies > node.copies) {
this.nElements -= node.copies;
} else {
this.nElements -= nCopies;
}
node.copies -= nCopies;
if (node.copies <= 0) {
this.dictionary.remove(element);
}
return true;
}
}
/**
* Returns an array containing all of the elements in this big in arbitrary order,
* including multiple copies.
* @return {Array} an array containing all of the elements in this bag.
*/
toArray(): T[] {
var a = [];
var values = this.dictionary.values();
var vl = values.length;
for (var i = 0; i < vl; i++) {
var node = values[i];
var element: any = clone(node.value);
if (element.hasOwnProperty("__ownerCollection") && element.ownerCollection == this) {
delete element.ownerCollection;
}
var copies = node.copies;
for (var j = 0; j < copies; j++) {
a.push(element);
}
}
return a;
}
/**
* Returns a set of unique elements in this bag.
* @return {collections.Set<T>} a set of unique elements in this bag.
*/
toSet(): Set<T> {
var toret = new Set<T>(this.toStrF);
var elements = this.dictionary.values();
var l = elements.length;
for (var i = 0; i < l; i++) {
var value = elements[i].value;
toret.add(value);
}
return toret;
}
/**
* Executes the provided function once for each element
* present in this bag, including multiple copies.
* @param {function(Object):*} callback function to execute, it is
* invoked with one argument: the element. To break the iteration you can
* optionally return false.
*/
forEach(callback: (item: T) => any) {
this.dictionary.forEach(function (k, v) {
var value = v.value;
var copies = v.copies;
for (var i = 0; i < copies; i++) {
if (callback(value) === false) {
return false;
}
}
return true;
});
}
/**
* Returns the number of elements in this bag.
* @return {number} the number of elements in this bag.
*/
length(): number {
return this.nElements;
}
/**
* Returns true if this bag contains no elements.
* @return {boolean} true if this bag contains no elements.
*/
isEmpty(): boolean {
return this.nElements === 0;
}
/**
* Removes all of the elements from this bag.
*/
clear(): void {
this.nElements = 0;
this.dictionary.clear();
}
}// End of bag
}// End of module
export class TCollection<T> extends collections.Set<T>{
public locked: boolean = false;
/**
* Signals the end of an update operation.
* Call EndUpdate after completing an operation that was preceded by a call to the BeginUpdate method.
*/
public BeginUpdate() {
this.locked = true;
}
/**
* Signals the start of an update operation.
* All BeginUpdate before starting an operation that performs changes to TCollection. After completing all the changes, call EndUpdate to signal the end of the operation.
*/
public EndUpdate() {
this.locked = false;
}
public refresh() {
}
public FindItemByID(ID: string): T {
var rc: T = null;
this.forEach(function (item: T) {
if ((<any>item).ID == ID) {
rc = item;
return false;
}
return true;
});
return rc;
}
}
export class TList<T> extends collections.LinkedList<T>{
public locked: boolean = false;
/**
* Signals the end of an update operation.
* Call EndUpdate after completing an operation that was preceded by a call to the BeginUpdate method.
*/
public BeginUpdate() {
this.locked = true;
}
/**
* Signals the start of an update operation.
* All BeginUpdate before starting an operation that performs changes to TCollection. After completing all the changes, call EndUpdate to signal the end of the operation.
*/
public EndUpdate() {
this.locked = false;
}
public refresh() {
}
public FindItemByID(ID: string): T {
var rc: T = null;
this.forEach(function (item: T) {
if ((<any>item).ID == ID) {
rc = item;
return false;
}
return true;
});
return rc;
}
}
function clone(obj) {
var target = {};
for (var i in obj) {
if (obj.hasOwnProperty(i)) {
target[i] = obj[i];
}
}
return target;
}<|fim▁end|>
|
* @function
*/
export function isFunction(func): boolean {
|
<|file_name|>mode-tex.js<|end_file_name|><|fim▁begin|>ace.define("ace/mode/tex_highlight_rules",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/mode/text_highlight_rules"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var lang = require("../lib/lang");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var TexHighlightRules = function(textClass) {
if (!textClass)
textClass = "text";
this.$rules = {
"start" : [
{
token : "comment",
regex : "%.*$"
}, {
token : textClass, // non-command
regex : "\\\\[$&%#\\{\\}]"
}, {
token : "keyword", // command
regex : "\\\\(?:documentclass|usepackage|newcounter|setcounter|addtocounter|value|arabic|stepcounter|newenvironment|renewenvironment|ref|vref|eqref|pageref|label|cite[a-zA-Z]*|tag|begin|end|bibitem)\\b",
next : "nospell"
}, {
token : "keyword", // command
regex : "\\\\(?:[a-zA-Z0-9]+|[^a-zA-Z0-9])"
}, {
token : "paren.keyword.operator",
regex : "[[({]"
}, {
token : "paren.keyword.operator",
regex : "[\\])}]"
}, {
token : textClass,
regex : "\\s+"
}
],
"nospell" : [
{
token : "comment",
regex : "%.*$",
next : "start"
}, {
token : "nospell." + textClass, // non-command
regex : "\\\\[$&%#\\{\\}]"
}, {
token : "keyword", // command
regex : "\\\\(?:documentclass|usepackage|newcounter|setcounter|addtocounter|value|arabic|stepcounter|newenvironment|renewenvironment|ref|vref|eqref|pageref|label|cite[a-zA-Z]*|tag|begin|end|bibitem)\\b"
}, {
token : "keyword", // command
regex : "\\\\(?:[a-zA-Z0-9]+|[^a-zA-Z0-9])",
next : "start"
}, {
token : "paren.keyword.operator",
regex : "[[({]"
}, {
token : "paren.keyword.operator",
regex : "[\\])]"
}, {
token : "paren.keyword.operator",
regex : "}",
next : "start"
}, {
token : "nospell." + textClass,
regex : "\\s+"
}, {
token : "nospell." + textClass,
regex : "\\w+"
}
]
};
};
oop.inherits(TexHighlightRules, TextHighlightRules);
exports.TexHighlightRules = TexHighlightRules;
});
ace.define("ace/mode/matching_brace_outdent",["require","exports","module","ace/range"], function(require, exports, module) {
"use strict";
var Range = require("../range").Range;
var MatchingBraceOutdent = function() {};
(function() {
this.checkOutdent = function(line, input) {
if (! /^\s+$/.test(line))
return false;
return /^\s*\}/.test(input);
};
this.autoOutdent = function(doc, row) {
var line = doc.getLine(row);
var match = line.match(/^(\s*\})/);
if (!match) return 0;
var column = match[1].length;
var openBracePos = doc.findMatchingBracket({row: row, column: column});
if (!openBracePos || openBracePos.row == row) return 0;
var indent = this.$getIndent(doc.getLine(openBracePos.row));
doc.replace(new Range(row, 0, row, column-1), indent);
};
this.$getIndent = function(line) {
return line.match(/^\s*/)[0];
};
}).call(MatchingBraceOutdent.prototype);
exports.MatchingBraceOutdent = MatchingBraceOutdent;
});
ace.define("ace/mode/tex",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/text_highlight_rules","ace/mode/tex_highlight_rules","ace/mode/matching_brace_outdent"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var TexHighlightRules = require("./tex_highlight_rules").TexHighlightRules;
var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent;
<|fim▁hole|> if (suppressHighlighting)
this.HighlightRules = TextHighlightRules;
else
this.HighlightRules = TexHighlightRules;
this.$outdent = new MatchingBraceOutdent();
this.$behaviour = this.$defaultBehaviour;
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "%";
this.getNextLineIndent = function(state, line, tab) {
return this.$getIndent(line);
};
this.allowAutoInsert = function() {
return false;
};
this.$id = "ace/mode/tex";
}).call(Mode.prototype);
exports.Mode = Mode;
});
(function() {
ace.require(["ace/mode/tex"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})();<|fim▁end|>
|
var Mode = function(suppressHighlighting) {
|
<|file_name|>issue-6458-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
fn main() {
// Unconstrained type:
format!("{:?}", None);
//~^ ERROR type annotations required
}<|fim▁end|>
|
// file at the top-level directory of this distribution and at
|
<|file_name|>ModelDisplay.java<|end_file_name|><|fim▁begin|>import java.awt.Graphics;
/**
* Interface to used to communicate between a model
* and its display
*
* Copyright Georgia Institute of Technology 2004
* @author Barb Ericson [email protected]
*/
public interface ModelDisplay
{
/** method to notify the thing that displays that
* the model has changed */
public void modelChanged();
/** method to add the model to the world
* @param model the model object to add */
public void addModel(Object model);
/**
* Method to remove the model from the world
* @param model the model object to remove */
public void remove(Object model);
<|fim▁hole|> * for this model display
* @return the graphics context
*/
public Graphics getGraphics();
/**
* Method to clear the background
*/
public void clearBackground();
/** Method to get the width of the display
* @return the width in pixels of the display
*/
public int getWidth();
/** Method to get the height of the display
* @return the height in pixels of the display
*/
public int getHeight();
}<|fim▁end|>
|
/**
* Method that returns the graphics context
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>// instanciando módulos
var gulp = require('gulp');
var watch = require('gulp-watch');
var del = require('del');
var shell = require('gulp-shell');
var connect = require('gulp-connect');
gulp.task('run:pelican', shell.task([
'pelican content -s pelicanconf.py'
]));
gulp.task('clean:pelican', function () {
return del([
'output/*'
]);
});
<|fim▁hole|> livereload: true
});
});
gulp.task('reload:output', function () {
connect.reload();
});
gulp.task('watch', function(){
// watch('content/*', gulp.series('run:pelican', 'reload:output'));
// watch('*', gulp.series('run:pelican', 'reload:output'));
// watch('theme/*', gulp.series('run:pelican', 'reload:output'));
// watch('theme/templates/*', gulp.series('run:pelican', 'reload:output'));
// watch('theme/static/css/*', gulp.series('run:pelican', 'reload:output'));
// watch('theme/static/js/*', gulp.series('run:pelican', 'reload:output'));
})
gulp.task('serve', gulp.parallel('connect','run:pelican', 'watch'));<|fim▁end|>
|
gulp.task('connect', function(){
connect.server({
root: ['output'],
port: 1337,
|
<|file_name|>parallel.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Implements parallel traversals over the DOM and flow trees.
//!
//! This code is highly unsafe. Keep this file small and easy to audit.
#![allow(unsafe_code)]
use context::{LayoutContext, SharedLayoutContext};
use flow::{self, Flow, MutableFlowUtils, PostorderFlowTraversal, PreorderFlowTraversal};
use flow_ref::{self, FlowRef};
use profile_traits::time::{self, TimerMetadata, profile};
use std::mem;
use std::sync::atomic::{AtomicIsize, Ordering};
use style::dom::{TNode, UnsafeNode};
use style::parallel::{CHUNK_SIZE, WorkQueueData};
use style::parallel::{run_queue_with_custom_work_data_type};
use traversal::AssignBSizes;
use traversal::{AssignISizes, BubbleISizes, PostorderNodeMutTraversal};
use util::opts;
use util::workqueue::{WorkQueue, WorkUnit, WorkerProxy};
pub use style::parallel::traverse_dom;
#[allow(dead_code)]
fn static_assertion(node: UnsafeNode) {
unsafe {
let _: UnsafeFlow = ::std::intrinsics::transmute(node);
}
}
/// Vtable + pointer representation of a Flow trait object.
pub type UnsafeFlow = (usize, usize);
fn null_unsafe_flow() -> UnsafeFlow {
(0, 0)
}
pub fn mut_owned_flow_to_unsafe_flow(flow: *mut FlowRef) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(&**flow)
}
}
pub fn borrowed_flow_to_unsafe_flow(flow: &Flow) -> UnsafeFlow {
unsafe {
mem::transmute::<&Flow, UnsafeFlow>(flow)
}
}
pub type UnsafeFlowList = (Box<Vec<UnsafeNode>>, usize);
pub type ChunkedFlowTraversalFunction =
extern "Rust" fn(UnsafeFlowList, &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
pub type FlowTraversalFunction = extern "Rust" fn(UnsafeFlow, &SharedLayoutContext);
/// Information that we need stored in each flow.
pub struct FlowParallelInfo {
/// The number of children that still need work done.
pub children_count: AtomicIsize,
/// The address of the parent flow.
pub parent: UnsafeFlow,
}
impl FlowParallelInfo {
pub fn new() -> FlowParallelInfo {
FlowParallelInfo {
children_count: AtomicIsize::new(0),
parent: null_unsafe_flow(),
}
}
}
/// A parallel bottom-up flow traversal.
trait ParallelPostorderFlowTraversal : PostorderFlowTraversal {
/// Process current flow and potentially traverse its ancestors.
///
/// If we are the last child that finished processing, recursively process
/// our parent. Else, stop. Also, stop at the root.
///
/// Thus, if we start with all the leaves of a tree, we end up traversing
/// the whole tree bottom-up because each parent will be processed exactly
/// once (by the last child that finishes processing).
///
/// The only communication between siblings is that they both
/// fetch-and-subtract the parent's children count.
fn run_parallel(&self, mut unsafe_flow: UnsafeFlow) {
loop {
// Get a real flow.
let flow: &mut Flow = unsafe {
mem::transmute(unsafe_flow)
};
// Perform the appropriate traversal.
if self.should_process(flow) {
self.process(flow);
}
let base = flow::mut_base(flow);
// Reset the count of children for the next layout traversal.
base.parallel.children_count.store(base.children.len() as isize,
Ordering::Relaxed);
// Possibly enqueue the parent.
let unsafe_parent = base.parallel.parent;
if unsafe_parent == null_unsafe_flow() {
// We're done!
break
}
// No, we're not at the root yet. Then are we the last child
// of our parent to finish processing? If so, we can continue
// on with our parent; otherwise, we've gotta wait.
let parent: &mut Flow = unsafe {
mem::transmute(unsafe_parent)
};
let parent_base = flow::mut_base(parent);
if parent_base.parallel.children_count.fetch_sub(1, Ordering::Relaxed) == 1 {
// We were the last child of our parent. Reflow our parent.
unsafe_flow = unsafe_parent
} else {
// Stop.
break
}
}
}
}
/// A parallel top-down flow traversal.
trait ParallelPreorderFlowTraversal : PreorderFlowTraversal {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>);
fn should_record_thread_ids(&self) -> bool;
#[inline(always)]
fn run_parallel_helper(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>,
top_down_func: ChunkedFlowTraversalFunction,
bottom_up_func: FlowTraversalFunction) {
let mut discovered_child_flows = Vec::new();
for unsafe_flow in *unsafe_flows.0 {
let mut had_children = false;
unsafe {
// Get a real flow.
let flow: &mut Flow = mem::transmute(unsafe_flow);
if self.should_record_thread_ids() {
flow::mut_base(flow).thread_id = proxy.worker_index();
}
if self.should_process(flow) {
// Perform the appropriate traversal.
self.process(flow);
}
// Possibly enqueue the children.
for kid in flow::child_iter(flow) {
had_children = true;
discovered_child_flows.push(borrowed_flow_to_unsafe_flow(kid));
}
}
// If there were no more children, start assigning block-sizes.
if !had_children {
bottom_up_func(unsafe_flow, proxy.user_data())
}
}
for chunk in discovered_child_flows.chunks(CHUNK_SIZE) {<|fim▁hole|> }
}
}
impl<'a> ParallelPreorderFlowTraversal for AssignISizes<'a> {
fn run_parallel(&self,
unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
self.run_parallel_helper(unsafe_flows,
proxy,
assign_inline_sizes,
assign_block_sizes_and_store_overflow)
}
fn should_record_thread_ids(&self) -> bool {
true
}
}
impl<'a> ParallelPostorderFlowTraversal for AssignBSizes<'a> {}
fn assign_inline_sizes(unsafe_flows: UnsafeFlowList,
proxy: &mut WorkerProxy<SharedLayoutContext, UnsafeFlowList>) {
let shared_layout_context = proxy.user_data();
let layout_context = LayoutContext::new(shared_layout_context);
let assign_inline_sizes_traversal = AssignISizes {
layout_context: &layout_context,
};
assign_inline_sizes_traversal.run_parallel(unsafe_flows, proxy)
}
fn assign_block_sizes_and_store_overflow(
unsafe_flow: UnsafeFlow,
shared_layout_context: &SharedLayoutContext) {
let layout_context = LayoutContext::new(shared_layout_context);
let assign_block_sizes_traversal = AssignBSizes {
layout_context: &layout_context,
};
assign_block_sizes_traversal.run_parallel(unsafe_flow)
}
pub fn traverse_flow_tree_preorder(
root: &mut FlowRef,
profiler_metadata: Option<TimerMetadata>,
time_profiler_chan: time::ProfilerChan,
shared_layout_context: &SharedLayoutContext,
queue: &mut WorkQueue<SharedLayoutContext, WorkQueueData>) {
if opts::get().bubble_inline_sizes_separately {
let layout_context = LayoutContext::new(shared_layout_context);
let bubble_inline_sizes = BubbleISizes { layout_context: &layout_context };
flow_ref::deref_mut(root).traverse_postorder(&bubble_inline_sizes);
}
run_queue_with_custom_work_data_type(queue, |queue| {
profile(time::ProfilerCategory::LayoutParallelWarmup, profiler_metadata,
time_profiler_chan, || {
queue.push(WorkUnit {
fun: assign_inline_sizes,
data: (box vec![mut_owned_flow_to_unsafe_flow(root)], 0),
})
});
}, shared_layout_context);
}<|fim▁end|>
|
proxy.push(WorkUnit {
fun: top_down_func,
data: (box chunk.iter().cloned().collect(), 0),
});
|
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { OutroModelo } from './modelos/outro-modelo.model';
import { Armario } from './modelos/armario.model';
import { Component } from '@angular/core';
@Component({<|fim▁hole|> templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
title = 'app works!';
armario: Armario | OutroModelo;
}<|fim▁end|>
|
selector: 'app-root',
|
<|file_name|>jquery.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for jQuery 1.10.x / 2.0.x
// Project: http://jquery.com/
// Definitions by: Boris Yankov <https://github.com/borisyankov/>, Christian Hoffmeister <https://github.com/choffmeister>, Steve Fenton <https://github.com/Steve-Fenton>, Diullei Gomes <https://github.com/Diullei>, Tass Iliopoulos <https://github.com/tasoili>, Jason Swearingen <https://github.com/jasons-novaleaf>, Sean Hill <https://github.com/seanski>, Guus Goossens <https://github.com/Guuz>, Kelly Summerlin <https://github.com/ksummerlin>, Basarat Ali Syed <https://github.com/basarat>, Nicholas Wolverson <https://github.com/nwolverson>, Derek Cicerone <https://github.com/derekcicerone>, Andrew Gaspar <https://github.com/AndrewGaspar>, James Harrison Fisher <https://github.com/jameshfisher>, Seikichi Kondo <https://github.com/seikichi>, Benjamin Jackman <https://github.com/benjaminjackman>, Poul Sorensen <https://github.com/s093294>, Josh Strobl <https://github.com/JoshStrobl>, John Reilly <https://github.com/johnnyreilly/>, Dick van den Brink <https://github.com/DickvdBrink>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
/* *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */
/**
* Interface for the AJAX setting that will configure the AJAX request
*/
interface JQueryAjaxSettings {
/**
* The content type sent in the request header that tells the server what kind of response it will accept in return. If the accepts setting needs modification, it is recommended to do so once in the $.ajaxSetup() method.
*/
accepts?: any;
/**
* By default, all requests are sent asynchronously (i.e. this is set to true by default). If you need synchronous requests, set this option to false. Cross-domain requests and dataType: "jsonp" requests do not support synchronous operation. Note that synchronous requests may temporarily lock the browser, disabling any actions while the request is active. As of jQuery 1.8, the use of async: false with jqXHR ($.Deferred) is deprecated; you must use the success/error/complete callback options instead of the corresponding methods of the jqXHR object such as jqXHR.done() or the deprecated jqXHR.success().
*/
async?: boolean;
/**
* A pre-request callback function that can be used to modify the jqXHR (in jQuery 1.4.x, XMLHTTPRequest) object before it is sent. Use this to set custom headers, etc. The jqXHR and settings objects are passed as arguments. This is an Ajax Event. Returning false in the beforeSend function will cancel the request. As of jQuery 1.5, the beforeSend option will be called regardless of the type of request.
*/
beforeSend? (jqXHR: JQueryXHR, settings: JQueryAjaxSettings): any;
/**
* If set to false, it will force requested pages not to be cached by the browser. Note: Setting cache to false will only work correctly with HEAD and GET requests. It works by appending "_={timestamp}" to the GET parameters. The parameter is not needed for other types of requests, except in IE8 when a POST is made to a URL that has already been requested by a GET.
*/
cache?: boolean;
/**
* A function to be called when the request finishes (after success and error callbacks are executed). The function gets passed two arguments: The jqXHR (in jQuery 1.4.x, XMLHTTPRequest) object and a string categorizing the status of the request ("success", "notmodified", "error", "timeout", "abort", or "parsererror"). As of jQuery 1.5, the complete setting can accept an array of functions. Each function will be called in turn. This is an Ajax Event.
*/
complete? (jqXHR: JQueryXHR, textStatus: string): any;
/**
* An object of string/regular-expression pairs that determine how jQuery will parse the response, given its content type. (version added: 1.5)
*/
contents?: { [key: string]: any; };
//According to jQuery.ajax source code, ajax's option actually allows contentType to set to "false"
// https://github.com/borisyankov/DefinitelyTyped/issues/742
/**
* When sending data to the server, use this content type. Default is "application/x-www-form-urlencoded; charset=UTF-8", which is fine for most cases. If you explicitly pass in a content-type to $.ajax(), then it is always sent to the server (even if no data is sent). The W3C XMLHttpRequest specification dictates that the charset is always UTF-8; specifying another charset will not force the browser to change the encoding.
*/
contentType?: any;
/**
* This object will be made the context of all Ajax-related callbacks. By default, the context is an object that represents the ajax settings used in the call ($.ajaxSettings merged with the settings passed to $.ajax).
*/
context?: any;
/**
* An object containing dataType-to-dataType converters. Each converter's value is a function that returns the transformed value of the response. (version added: 1.5)
*/
converters?: { [key: string]: any; };
/**
* If you wish to force a crossDomain request (such as JSONP) on the same domain, set the value of crossDomain to true. This allows, for example, server-side redirection to another domain. (version added: 1.5)
*/
crossDomain?: boolean;
/**
* Data to be sent to the server. It is converted to a query string, if not already a string. It's appended to the url for GET-requests. See processData option to prevent this automatic processing. Object must be Key/Value pairs. If value is an Array, jQuery serializes multiple values with same key based on the value of the traditional setting (described below).
*/
data?: any;
/**
* A function to be used to handle the raw response data of XMLHttpRequest.This is a pre-filtering function to sanitize the response. You should return the sanitized data. The function accepts two arguments: The raw data returned from the server and the 'dataType' parameter.
*/
dataFilter? (data: any, ty: any): any;
/**
* The type of data that you're expecting back from the server. If none is specified, jQuery will try to infer it based on the MIME type of the response (an XML MIME type will yield XML, in 1.4 JSON will yield a JavaScript object, in 1.4 script will execute the script, and anything else will be returned as a string).
*/
dataType?: string;
/**
* A function to be called if the request fails. The function receives three arguments: The jqXHR (in jQuery 1.4.x, XMLHttpRequest) object, a string describing the type of error that occurred and an optional exception object, if one occurred. Possible values for the second argument (besides null) are "timeout", "error", "abort", and "parsererror". When an HTTP error occurs, errorThrown receives the textual portion of the HTTP status, such as "Not Found" or "Internal Server Error." As of jQuery 1.5, the error setting can accept an array of functions. Each function will be called in turn. Note: This handler is not called for cross-domain script and cross-domain JSONP requests. This is an Ajax Event.
*/
error? (jqXHR: JQueryXHR, textStatus: string, errorThrown: string): any;
/**
* Whether to trigger global Ajax event handlers for this request. The default is true. Set to false to prevent the global handlers like ajaxStart or ajaxStop from being triggered. This can be used to control various Ajax Events.
*/
global?: boolean;
/**
* An object of additional header key/value pairs to send along with requests using the XMLHttpRequest transport. The header X-Requested-With: XMLHttpRequest is always added, but its default XMLHttpRequest value can be changed here. Values in the headers setting can also be overwritten from within the beforeSend function. (version added: 1.5)
*/
headers?: { [key: string]: any; };
/**
* Allow the request to be successful only if the response has changed since the last request. This is done by checking the Last-Modified header. Default value is false, ignoring the header. In jQuery 1.4 this technique also checks the 'etag' specified by the server to catch unmodified data.
*/
ifModified?: boolean;
/**
* Allow the current environment to be recognized as "local," (e.g. the filesystem), even if jQuery does not recognize it as such by default. The following protocols are currently recognized as local: file, *-extension, and widget. If the isLocal setting needs modification, it is recommended to do so once in the $.ajaxSetup() method. (version added: 1.5.1)
*/
isLocal?: boolean;
/**
* Override the callback function name in a jsonp request. This value will be used instead of 'callback' in the 'callback=?' part of the query string in the url. So {jsonp:'onJSONPLoad'} would result in 'onJSONPLoad=?' passed to the server. As of jQuery 1.5, setting the jsonp option to false prevents jQuery from adding the "?callback" string to the URL or attempting to use "=?" for transformation. In this case, you should also explicitly set the jsonpCallback setting. For example, { jsonp: false, jsonpCallback: "callbackName" }
*/
jsonp?: any;
/**
* Specify the callback function name for a JSONP request. This value will be used instead of the random name automatically generated by jQuery. It is preferable to let jQuery generate a unique name as it'll make it easier to manage the requests and provide callbacks and error handling. You may want to specify the callback when you want to enable better browser caching of GET requests. As of jQuery 1.5, you can also use a function for this setting, in which case the value of jsonpCallback is set to the return value of that function.
*/
jsonpCallback?: any;
/**
* A mime type to override the XHR mime type. (version added: 1.5.1)
*/
mimeType?: string;
/**
* A password to be used with XMLHttpRequest in response to an HTTP access authentication request.
*/
password?: string;
/**
* By default, data passed in to the data option as an object (technically, anything other than a string) will be processed and transformed into a query string, fitting to the default content-type "application/x-www-form-urlencoded". If you want to send a DOMDocument, or other non-processed data, set this option to false.
*/
processData?: boolean;
/**
* Only applies when the "script" transport is used (e.g., cross-domain requests with "jsonp" or "script" dataType and "GET" type). Sets the charset attribute on the script tag used in the request. Used when the character set on the local page is not the same as the one on the remote script.
*/
scriptCharset?: string;
/**
* An object of numeric HTTP codes and functions to be called when the response has the corresponding code. f the request is successful, the status code functions take the same parameters as the success callback; if it results in an error (including 3xx redirect), they take the same parameters as the error callback. (version added: 1.5)
*/
statusCode?: { [key: string]: any; };
/**
* A function to be called if the request succeeds. The function gets passed three arguments: The data returned from the server, formatted according to the dataType parameter; a string describing the status; and the jqXHR (in jQuery 1.4.x, XMLHttpRequest) object. As of jQuery 1.5, the success setting can accept an array of functions. Each function will be called in turn. This is an Ajax Event.
*/
success? (data: any, textStatus: string, jqXHR: JQueryXHR): any;
/**
* Set a timeout (in milliseconds) for the request. This will override any global timeout set with $.ajaxSetup(). The timeout period starts at the point the $.ajax call is made; if several other requests are in progress and the browser has no connections available, it is possible for a request to time out before it can be sent. In jQuery 1.4.x and below, the XMLHttpRequest object will be in an invalid state if the request times out; accessing any object members may throw an exception. In Firefox 3.0+ only, script and JSONP requests cannot be cancelled by a timeout; the script will run even if it arrives after the timeout period.
*/
timeout?: number;
/**
* Set this to true if you wish to use the traditional style of param serialization.
*/
traditional?: boolean;
/**
* The type of request to make ("POST" or "GET"), default is "GET". Note: Other HTTP request methods, such as PUT and DELETE, can also be used here, but they are not supported by all browsers.
*/
type?: string;
/**
* A string containing the URL to which the request is sent.
*/
url?: string;
/**
* A username to be used with XMLHttpRequest in response to an HTTP access authentication request.
*/
username?: string;
/**
* Callback for creating the XMLHttpRequest object. Defaults to the ActiveXObject when available (IE), the XMLHttpRequest otherwise. Override to provide your own implementation for XMLHttpRequest or enhancements to the factory.
*/
xhr?: any;
/**
* An object of fieldName-fieldValue pairs to set on the native XHR object. For example, you can use it to set withCredentials to true for cross-domain requests if needed. In jQuery 1.5, the withCredentials property was not propagated to the native XHR and thus CORS requests requiring it would ignore this flag. For this reason, we recommend using jQuery 1.5.1+ should you require the use of it. (version added: 1.5.1)
*/
xhrFields?: { [key: string]: any; };
}
/**
* Interface for the jqXHR object
*/
interface JQueryXHR extends XMLHttpRequest, JQueryPromise<any> {
/**
* The .overrideMimeType() method may be used in the beforeSend() callback function, for example, to modify the response content-type header. As of jQuery 1.5.1, the jqXHR object also contains the overrideMimeType() method (it was available in jQuery 1.4.x, as well, but was temporarily removed in jQuery 1.5).
*/
overrideMimeType(mimeType: string): any;
/**
* Cancel the request.
*
* @param statusText A string passed as the textStatus parameter for the done callback. Default value: "canceled"
*/
abort(statusText?: string): void;
/**
* Incorporates the functionality of the .done() and .fail() methods, allowing (as of jQuery 1.8) the underlying Promise to be manipulated. Refer to deferred.then() for implementation details.
*/
then(doneCallback: (data: any, textStatus: string, jqXHR: JQueryXHR) => void, failCallback?: (jqXHR: JQueryXHR, textStatus: string, errorThrown: any) => void): JQueryPromise<any>;
/**
* Property containing the parsed response if the response Content-Type is json
*/
responseJSON?: any;
}
/**
* Interface for the JQuery callback
*/
interface JQueryCallback {
/**
* Add a callback or a collection of callbacks to a callback list.
*
* @param callbacks A function, or array of functions, that are to be added to the callback list.
*/
add(callbacks: Function): JQueryCallback;
/**
* Add a callback or a collection of callbacks to a callback list.
*
* @param callbacks A function, or array of functions, that are to be added to the callback list.
*/
add(callbacks: Function[]): JQueryCallback;
/**
* Disable a callback list from doing anything more.
*/
disable(): JQueryCallback;
/**
* Determine if the callbacks list has been disabled.
*/
disabled(): boolean;
/**
* Remove all of the callbacks from a list.
*/
empty(): JQueryCallback;
/**
* Call all of the callbacks with the given arguments
*
* @param arguments The argument or list of arguments to pass back to the callback list.
*/
fire(...arguments: any[]): JQueryCallback;
/**
* Determine if the callbacks have already been called at least once.
*/
fired(): boolean;
/**
* Call all callbacks in a list with the given context and arguments.
*
* @param context A reference to the context in which the callbacks in the list should be fired.
* @param arguments An argument, or array of arguments, to pass to the callbacks in the list.
*/
fireWith(context?: any, ...args: any[]): JQueryCallback;
/**
* Determine whether a supplied callback is in a list
*
* @param callback The callback to search for.
*/
has(callback: Function): boolean;
/**
* Lock a callback list in its current state.
*/
lock(): JQueryCallback;
/**
* Determine if the callbacks list has been locked.
*/
locked(): boolean;
/**
* Remove a callback or a collection of callbacks from a callback list.
*
* @param callbacks A function, or array of functions, that are to be removed from the callback list.
*/
remove(callbacks: Function): JQueryCallback;
/**
* Remove a callback or a collection of callbacks from a callback list.
*
* @param callbacks A function, or array of functions, that are to be removed from the callback list.
*/
remove(callbacks: Function[]): JQueryCallback;
}
/**
* Allows jQuery Promises to interop with non-jQuery promises
*/
interface JQueryGenericPromise<T> {
/**
* Add handlers to be called when the Deferred object is resolved, rejected, or still in progress.
*
* @param doneFilter A function that is called when the Deferred is resolved.
* @param failFilter An optional function that is called when the Deferred is rejected.
*/
then<U>(doneFilter: (value: T, ...values: any[]) => U|JQueryPromise<U>, failFilter?: (...reasons: any[]) => U|JQueryPromise<U>, progressFilter?: (...progression: any[]) => any): JQueryPromise<U>;
/**
* Determine the current state of a Deferred object.
*/
state(): string;
// Deprecated - given no typings
pipe(doneFilter?: (x: any) => any, failFilter?: (x: any) => any, progressFilter?: (x: any) => any): JQueryPromise<any>;
}
/**
* Interface for the JQuery promise/deferred callbacks
*/
interface JQueryPromiseCallback<T> {
(value?: T, ...args: any[]): void;
}
interface JQueryPromiseOperator<T, U> {
(callback1: JQueryPromiseCallback<T>|JQueryPromiseCallback<T>[], ...callbacksN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryPromise<U>;
}
/**
* Interface for the JQuery promise, part of callbacks
*/
interface JQueryPromise<T> extends JQueryGenericPromise<T> {
/**
* Add handlers to be called when the Deferred object is either resolved or rejected.
*
* @param alwaysCallbacks1 A function, or array of functions, that is called when the Deferred is resolved or rejected.
* @param alwaysCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved or rejected.
*/
always(alwaysCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...alwaysCallbacksN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryPromise<T>;
/**
* Add handlers to be called when the Deferred object is resolved.
*
* @param doneCallbacks1 A function, or array of functions, that are called when the Deferred is resolved.
* @param doneCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved.
*/
done(doneCallback1?: JQueryPromiseCallback<T>|JQueryPromiseCallback<T>[], ...doneCallbackN: Array<JQueryPromiseCallback<T>|JQueryPromiseCallback<T>[]>): JQueryPromise<T>;
/**
* Add handlers to be called when the Deferred object is rejected.
*
* @param failCallbacks1 A function, or array of functions, that are called when the Deferred is rejected.
* @param failCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is rejected.
*/
fail(failCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...failCallbacksN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryPromise<T>;
/**
* Add handlers to be called when the Deferred object generates progress notifications.
*
* @param progressCallbacks A function, or array of functions, to be called when the Deferred generates progress notifications.
*/
progress(progressCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...progressCallbackN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryPromise<T>;
}
/**
* Interface for the JQuery deferred, part of callbacks
*/
interface JQueryDeferred<T> extends JQueryGenericPromise<T> {
/**
* Add handlers to be called when the Deferred object is either resolved or rejected.
*
* @param alwaysCallbacks1 A function, or array of functions, that is called when the Deferred is resolved or rejected.
* @param alwaysCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved or rejected.
*/
always(alwaysCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...alwaysCallbacksN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryDeferred<T>;
/**
* Add handlers to be called when the Deferred object is resolved.
*
* @param doneCallbacks1 A function, or array of functions, that are called when the Deferred is resolved.
* @param doneCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is resolved.
*/
done(doneCallback1?: JQueryPromiseCallback<T>|JQueryPromiseCallback<T>[], ...doneCallbackN: Array<JQueryPromiseCallback<T>|JQueryPromiseCallback<T>[]>): JQueryDeferred<T>;
/**
* Add handlers to be called when the Deferred object is rejected.
*
* @param failCallbacks1 A function, or array of functions, that are called when the Deferred is rejected.
* @param failCallbacks2 Optional additional functions, or arrays of functions, that are called when the Deferred is rejected.
*/
fail(failCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...failCallbacksN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryDeferred<T>;
/**
* Add handlers to be called when the Deferred object generates progress notifications.
*
* @param progressCallbacks A function, or array of functions, to be called when the Deferred generates progress notifications.
*/
progress(progressCallback1?: JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[], ...progressCallbackN: Array<JQueryPromiseCallback<any>|JQueryPromiseCallback<any>[]>): JQueryDeferred<T>;
/**
* Call the progressCallbacks on a Deferred object with the given args.
*
* @param args Optional arguments that are passed to the progressCallbacks.
*/
notify(value?: any, ...args: any[]): JQueryDeferred<T>;
/**
* Call the progressCallbacks on a Deferred object with the given context and args.
*
* @param context Context passed to the progressCallbacks as the this object.
* @param args Optional arguments that are passed to the progressCallbacks.
*/
notifyWith(context: any, value?: any, ...args: any[]): JQueryDeferred<T>;
/**
* Reject a Deferred object and call any failCallbacks with the given args.
*
* @param args Optional arguments that are passed to the failCallbacks.
*/
reject(value?: any, ...args: any[]): JQueryDeferred<T>;
/**
* Reject a Deferred object and call any failCallbacks with the given context and args.
*
* @param context Context passed to the failCallbacks as the this object.
* @param args An optional array of arguments that are passed to the failCallbacks.
*/
rejectWith(context: any, value?: any, ...args: any[]): JQueryDeferred<T>;
/**
* Resolve a Deferred object and call any doneCallbacks with the given args.
*
* @param value First argument passed to doneCallbacks.
* @param args Optional subsequent arguments that are passed to the doneCallbacks.
*/
resolve(value?: T, ...args: any[]): JQueryDeferred<T>;
/**
* Resolve a Deferred object and call any doneCallbacks with the given context and args.
*
* @param context Context passed to the doneCallbacks as the this object.
* @param args An optional array of arguments that are passed to the doneCallbacks.
*/
resolveWith(context: any, value?: T, ...args: any[]): JQueryDeferred<T>;
/**
* Return a Deferred's Promise object.
*
* @param target Object onto which the promise methods have to be attached
*/
promise(target?: any): JQueryPromise<T>;
}
/**
* Interface of the JQuery extension of the W3C event object
*/
interface BaseJQueryEventObject extends Event {
data: any;
delegateTarget: Element;
isDefaultPrevented(): boolean;
isImmediatePropagationStopped(): boolean;
isPropagationStopped(): boolean;
namespace: string;
originalEvent: Event;
preventDefault(): any;
relatedTarget: Element;
result: any;
stopImmediatePropagation(): void;
stopPropagation(): void;
target: Element;
pageX: number;
pageY: number;
which: number;
metaKey: boolean;
}
interface JQueryInputEventObject extends BaseJQueryEventObject {
altKey: boolean;
ctrlKey: boolean;
metaKey: boolean;
shiftKey: boolean;
}
interface JQueryMouseEventObject extends JQueryInputEventObject {
button: number;
clientX: number;
clientY: number;
offsetX: number;
offsetY: number;
pageX: number;
pageY: number;
screenX: number;
screenY: number;
}
interface JQueryKeyEventObject extends JQueryInputEventObject {
char: any;
charCode: number;
key: any;
keyCode: number;
}
interface JQueryEventObject extends BaseJQueryEventObject, JQueryInputEventObject, JQueryMouseEventObject, JQueryKeyEventObject {
}
/*
Collection of properties of the current browser
*/
interface JQuerySupport {
ajax?: boolean;
boxModel?: boolean;
changeBubbles?: boolean;
checkClone?: boolean;
checkOn?: boolean;
cors?: boolean;
cssFloat?: boolean;
hrefNormalized?: boolean;
htmlSerialize?: boolean;
leadingWhitespace?: boolean;
noCloneChecked?: boolean;
noCloneEvent?: boolean;
opacity?: boolean;
optDisabled?: boolean;
optSelected?: boolean;
scriptEval? (): boolean;
style?: boolean;
submitBubbles?: boolean;
tbody?: boolean;
}
interface JQueryParam {
/**
* Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request.
*
* @param obj An array or object to serialize.
*/
(obj: any): string;
/**
* Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request.
*
* @param obj An array or object to serialize.
* @param traditional A Boolean indicating whether to perform a traditional "shallow" serialization.
*/
(obj: any, traditional: boolean): string;
}
/**
* The interface used to construct jQuery events (with $.Event). It is
* defined separately instead of inline in JQueryStatic to allow
* overriding the construction function with specific strings
* returning specific event objects.
*/
interface JQueryEventConstructor {
(name: string, eventProperties?: any): JQueryEventObject;
new (name: string, eventProperties?: any): JQueryEventObject;
}
/**
* The interface used to specify coordinates.
*/
interface JQueryCoordinates {
left: number;
top: number;
}
/**
* Elements in the array returned by serializeArray()
*/
interface JQuerySerializeArrayElement {
name: string;
value: string;
}
interface JQueryAnimationOptions {
/**
* A string or number determining how long the animation will run.
*/
duration?: any;
/**
* A string indicating which easing function to use for the transition.
*/
easing?: string;
/**
* A function to call once the animation is complete.
*/
complete?: Function;
/**
* A function to be called for each animated property of each animated element. This function provides an opportunity to modify the Tween object to change the value of the property before it is set.
*/
step?: (now: number, tween: any) => any;
/**
* A function to be called after each step of the animation, only once per animated element regardless of the number of animated properties. (version added: 1.8)
*/
progress?: (animation: JQueryPromise<any>, progress: number, remainingMs: number) => any;
/**
* A function to call when the animation begins. (version added: 1.8)
*/
start?: (animation: JQueryPromise<any>) => any;
/**
* A function to be called when the animation completes (its Promise object is resolved). (version added: 1.8)
*/
done?: (animation: JQueryPromise<any>, jumpedToEnd: boolean) => any;
/**
* A function to be called when the animation fails to complete (its Promise object is rejected). (version added: 1.8)
*/
fail?: (animation: JQueryPromise<any>, jumpedToEnd: boolean) => any;
/**
* A function to be called when the animation completes or stops without completing (its Promise object is either resolved or rejected). (version added: 1.8)
*/
always?: (animation: JQueryPromise<any>, jumpedToEnd: boolean) => any;
/**
* A Boolean indicating whether to place the animation in the effects queue. If false, the animation will begin immediately. As of jQuery 1.7, the queue option can also accept a string, in which case the animation is added to the queue represented by that string. When a custom queue name is used the animation does not automatically start; you must call .dequeue("queuename") to start it.
*/
queue?: any;
/**
* A map of one or more of the CSS properties defined by the properties argument and their corresponding easing functions. (version added: 1.4)
*/
specialEasing?: Object;
}
/**
* Static members of jQuery (those on $ and jQuery themselves)
*/
interface JQueryStatic {
/**
* Perform an asynchronous HTTP (Ajax) request.
*
* @param settings A set of key/value pairs that configure the Ajax request. All settings are optional. A default can be set for any option with $.ajaxSetup().
*/
ajax(settings: JQueryAjaxSettings): JQueryXHR;
/**
* Perform an asynchronous HTTP (Ajax) request.
*
* @param url A string containing the URL to which the request is sent.
* @param settings A set of key/value pairs that configure the Ajax request. All settings are optional. A default can be set for any option with $.ajaxSetup().
*/
ajax(url: string, settings?: JQueryAjaxSettings): JQueryXHR;
/**
* Handle custom Ajax options or modify existing options before each request is sent and before they are processed by $.ajax().
*
* @param dataTypes An optional string containing one or more space-separated dataTypes
* @param handler A handler to set default values for future Ajax requests.
*/
ajaxPrefilter(dataTypes: string, handler: (opts: any, originalOpts: JQueryAjaxSettings, jqXHR: JQueryXHR) => any): void;
/**
* Handle custom Ajax options or modify existing options before each request is sent and before they are processed by $.ajax().
*
* @param handler A handler to set default values for future Ajax requests.
*/
ajaxPrefilter(handler: (opts: any, originalOpts: JQueryAjaxSettings, jqXHR: JQueryXHR) => any): void;
ajaxSettings: JQueryAjaxSettings;
/**
* Set default values for future Ajax requests. Its use is not recommended.
*
* @param options A set of key/value pairs that configure the default Ajax request. All options are optional.
*/
ajaxSetup(options: JQueryAjaxSettings): void;
/**
* Load data from the server using a HTTP GET request.
*
* @param url A string containing the URL to which the request is sent.
* @param success A callback function that is executed if the request succeeds.
* @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, or html).
*/
get(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR;
/**
* Load data from the server using a HTTP GET request.
*
* @param url A string containing the URL to which the request is sent.
* @param data A plain object or string that is sent to the server with the request.
* @param success A callback function that is executed if the request succeeds.
* @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, or html).
*/
get(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR;
/**
* Load JSON-encoded data from the server using a GET HTTP request.
*
* @param url A string containing the URL to which the request is sent.
* @param success A callback function that is executed if the request succeeds.
*/
getJSON(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR;
/**
* Load JSON-encoded data from the server using a GET HTTP request.
*
* @param url A string containing the URL to which the request is sent.
* @param data A plain object or string that is sent to the server with the request.
* @param success A callback function that is executed if the request succeeds.
*/
getJSON(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR;
/**
* Load a JavaScript file from the server using a GET HTTP request, then execute it.
*
* @param url A string containing the URL to which the request is sent.
* @param success A callback function that is executed if the request succeeds.
*/
getScript(url: string, success?: (script: string, textStatus: string, jqXHR: JQueryXHR) => any): JQueryXHR;
/**
* Create a serialized representation of an array or object, suitable for use in a URL query string or Ajax request.
*/
param: JQueryParam;
/**
* Load data from the server using a HTTP POST request.
*
* @param url A string containing the URL to which the request is sent.
* @param success A callback function that is executed if the request succeeds. Required if dataType is provided, but can be null in that case.
* @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, text, html).
*/
post(url: string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR;
/**
* Load data from the server using a HTTP POST request.
*
* @param url A string containing the URL to which the request is sent.
* @param data A plain object or string that is sent to the server with the request.
* @param success A callback function that is executed if the request succeeds. Required if dataType is provided, but can be null in that case.
* @param dataType The type of data expected from the server. Default: Intelligent Guess (xml, json, script, text, html).
*/
post(url: string, data?: Object|string, success?: (data: any, textStatus: string, jqXHR: JQueryXHR) => any, dataType?: string): JQueryXHR;
/**
* A multi-purpose callbacks list object that provides a powerful way to manage callback lists.
*
* @param flags An optional list of space-separated flags that change how the callback list behaves.
*/
Callbacks(flags?: string): JQueryCallback;
/**
* Holds or releases the execution of jQuery's ready event.
*
* @param hold Indicates whether the ready hold is being requested or released
*/
holdReady(hold: boolean): void;
/**
* Accepts a string containing a CSS selector which is then used to match a set of elements.
*
* @param selector A string containing a selector expression
* @param context A DOM Element, Document, or jQuery to use as context
*/
(selector: string, context?: Element|JQuery): JQuery;
/**
* Accepts a string containing a CSS selector which is then used to match a set of elements.
*
* @param element A DOM element to wrap in a jQuery object.
*/
(element: Element): JQuery;
/**
* Accepts a string containing a CSS selector which is then used to match a set of elements.
*
* @param elementArray An array containing a set of DOM elements to wrap in a jQuery object.
*/
(elementArray: Element[]): JQuery;
/**
* Accepts a string containing a CSS selector which is then used to match a set of elements.
*
* @param object A plain object to wrap in a jQuery object.
*/
(object: {}): JQuery;
/**
* Accepts a string containing a CSS selector which is then used to match a set of elements.
*
* @param object An existing jQuery object to clone.
*/
(object: JQuery): JQuery;
/**
* Specify a function to execute when the DOM is fully loaded.
*/
(): JQuery;
/**
* Creates DOM elements on the fly from the provided string of raw HTML.
*
* @param html A string of HTML to create on the fly. Note that this parses HTML, not XML.
* @param ownerDocument A document in which the new elements will be created.
*/
(html: string, ownerDocument?: Document): JQuery;
/**
* Creates DOM elements on the fly from the provided string of raw HTML.
*
* @param html A string defining a single, standalone, HTML element (e.g. <div/> or <div></div>).
* @param attributes An object of attributes, events, and methods to call on the newly-created element.
*/
(html: string, attributes: Object): JQuery;
/**
* Binds a function to be executed when the DOM has finished loading.
*
* @param callback A function to execute after the DOM is ready.
*/
(callback: Function): JQuery;
/**
* Relinquish jQuery's control of the $ variable.
*
* @param removeAll A Boolean indicating whether to remove all jQuery variables from the global scope (including jQuery itself).
*/
noConflict(removeAll?: boolean): Object;
/**
* Provides a way to execute callback functions based on one or more objects, usually Deferred objects that represent asynchronous events.
*
* @param deferreds One or more Deferred objects, or plain JavaScript objects.
*/
when<T>(...deferreds: Array<T|JQueryPromise<T>/* as JQueryDeferred<T> */>): JQueryPromise<T>;
/**
* Hook directly into jQuery to override how particular CSS properties are retrieved or set, normalize CSS property naming, or create custom properties.
*/
cssHooks: { [key: string]: any; };
cssNumber: any;
/**
* Store arbitrary data associated with the specified element. Returns the value that was set.
*
* @param element The DOM element to associate with the data.
* @param key A string naming the piece of data to set.
* @param value The new data value.
*/
data<T>(element: Element, key: string, value: T): T;
/**
* Returns value at named data store for the element, as set by jQuery.data(element, name, value), or the full data store for the element.
*
* @param element The DOM element to associate with the data.
* @param key A string naming the piece of data to set.
*/
data(element: Element, key: string): any;
/**
* Returns value at named data store for the element, as set by jQuery.data(element, name, value), or the full data store for the element.
*
* @param element The DOM element to associate with the data.
*/
data(element: Element): any;
/**
* Execute the next function on the queue for the matched element.
*
* @param element A DOM element from which to remove and execute a queued function.
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
dequeue(element: Element, queueName?: string): void;
/**
* Determine whether an element has any jQuery data associated with it.
*
* @param element A DOM element to be checked for data.
*/
hasData(element: Element): boolean;
/**
* Show the queue of functions to be executed on the matched element.
*
* @param element A DOM element to inspect for an attached queue.
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
queue(element: Element, queueName?: string): any[];
/**
* Manipulate the queue of functions to be executed on the matched element.
*
* @param element A DOM element where the array of queued functions is attached.
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
* @param newQueue An array of functions to replace the current queue contents.
*/
queue(element: Element, queueName: string, newQueue: Function[]): JQuery;
/**
* Manipulate the queue of functions to be executed on the matched element.
*
* @param element A DOM element on which to add a queued function.
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
* @param callback The new function to add to the queue.
*/
queue(element: Element, queueName: string, callback: Function): JQuery;
/**
* Remove a previously-stored piece of data.
*
* @param element A DOM element from which to remove data.
* @param name A string naming the piece of data to remove.
*/
removeData(element: Element, name?: string): JQuery;
/**
* A constructor function that returns a chainable utility object with methods to register multiple callbacks into callback queues, invoke callback queues, and relay the success or failure state of any synchronous or asynchronous function.
*
* @param beforeStart A function that is called just before the constructor returns.
*/
Deferred<T>(beforeStart?: (deferred: JQueryDeferred<T>) => any): JQueryDeferred<T>;
/**
* Effects
*/
fx: {
tick: () => void;
/**
* The rate (in milliseconds) at which animations fire.
*/
interval: number;
stop: () => void;
speeds: { slow: number; fast: number; };
/**
* Globally disable all animations.
*/
off: boolean;
step: any;
};
/**
* Takes a function and returns a new one that will always have a particular context.
*
* @param fnction The function whose context will be changed.
* @param context The object to which the context (this) of the function should be set.
* @param additionalArguments Any number of arguments to be passed to the function referenced in the function argument.
*/
proxy(fnction: (...args: any[]) => any, context: Object, ...additionalArguments: any[]): any;
/**
* Takes a function and returns a new one that will always have a particular context.
*
* @param context The object to which the context (this) of the function should be set.
* @param name The name of the function whose context will be changed (should be a property of the context object).
* @param additionalArguments Any number of arguments to be passed to the function named in the name argument.
*/
proxy(context: Object, name: string, ...additionalArguments: any[]): any;
Event: JQueryEventConstructor;
/**
* Takes a string and throws an exception containing it.
*
* @param message The message to send out.
*/
error(message: any): JQuery;
expr: any;
fn: any; //TODO: Decide how we want to type this
isReady: boolean;
// Properties
support: JQuerySupport;
/**
* Check to see if a DOM element is a descendant of another DOM element.
*
* @param container The DOM element that may contain the other element.
* @param contained The DOM element that may be contained by (a descendant of) the other element.
*/
contains(container: Element, contained: Element): boolean;
/**
* A generic iterator function, which can be used to seamlessly iterate over both objects and arrays. Arrays and array-like objects with a length property (such as a function's arguments object) are iterated by numeric index, from 0 to length-1. Other objects are iterated via their named properties.
*
* @param collection The object or array to iterate over.
* @param callback The function that will be executed on every object.
*/
each<T>(
collection: T[],
callback: (indexInArray: number, valueOfElement: T) => any
): any;
/**
* A generic iterator function, which can be used to seamlessly iterate over both objects and arrays. Arrays and array-like objects with a length property (such as a function's arguments object) are iterated by numeric index, from 0 to length-1. Other objects are iterated via their named properties.
*
* @param collection The object or array to iterate over.
* @param callback The function that will be executed on every object.
*/
each(
collection: any,
callback: (indexInArray: any, valueOfElement: any) => any
): any;
/**
* Merge the contents of two or more objects together into the first object.
*
* @param target An object that will receive the new properties if additional objects are passed in or that will extend the jQuery namespace if it is the sole argument.
* @param object1 An object containing additional properties to merge in.
* @param objectN Additional objects containing properties to merge in.
*/
extend(target: any, object1?: any, ...objectN: any[]): any;
/**
* Merge the contents of two or more objects together into the first object.
*
* @param deep If true, the merge becomes recursive (aka. deep copy).
* @param target The object to extend. It will receive the new properties.
* @param object1 An object containing additional properties to merge in.
* @param objectN Additional objects containing properties to merge in.
*/
extend(deep: boolean, target: any, object1?: any, ...objectN: any[]): any;
/**
* Execute some JavaScript code globally.
*
* @param code The JavaScript code to execute.
*/
globalEval(code: string): any;
/**
* Finds the elements of an array which satisfy a filter function. The original array is not affected.
*
* @param array The array to search through.
* @param func The function to process each item against. The first argument to the function is the item, and the second argument is the index. The function should return a Boolean value. this will be the global window object.
* @param invert If "invert" is false, or not provided, then the function returns an array consisting of all elements for which "callback" returns true. If "invert" is true, then the function returns an array consisting of all elements for which "callback" returns false.
*/
grep<T>(array: T[], func: (elementOfArray: T, indexInArray: number) => boolean, invert?: boolean): T[];
/**
* Search for a specified value within an array and return its index (or -1 if not found).
*
* @param value The value to search for.
* @param array An array through which to search.
* @param fromIndex he index of the array at which to begin the search. The default is 0, which will search the whole array.
*/
inArray<T>(value: T, array: T[], fromIndex?: number): number;
/**
* Determine whether the argument is an array.
*
* @param obj Object to test whether or not it is an array.
*/
isArray(obj: any): boolean;
/**
* Check to see if an object is empty (contains no enumerable properties).
*
* @param obj The object that will be checked to see if it's empty.
*/
isEmptyObject(obj: any): boolean;
/**
* Determine if the argument passed is a Javascript function object.
*
* @param obj Object to test whether or not it is a function.
*/
isFunction(obj: any): boolean;
/**
* Determines whether its argument is a number.
*
* @param obj The value to be tested.
*/
isNumeric(value: any): boolean;
/**
* Check to see if an object is a plain object (created using "{}" or "new Object").
*
* @param obj The object that will be checked to see if it's a plain object.
*/
isPlainObject(obj: any): boolean;
/**
* Determine whether the argument is a window.
*
* @param obj Object to test whether or not it is a window.
*/
isWindow(obj: any): boolean;
/**
* Check to see if a DOM node is within an XML document (or is an XML document).
*
* @param node he DOM node that will be checked to see if it's in an XML document.
*/
isXMLDoc(node: Node): boolean;
/**
* Convert an array-like object into a true JavaScript array.
*
* @param obj Any object to turn into a native Array.
*/
makeArray(obj: any): any[];
/**
* Translate all items in an array or object to new array of items.
*
* @param array The Array to translate.
* @param callback The function to process each item against. The first argument to the function is the array item, the second argument is the index in array The function can return any value. Within the function, this refers to the global (window) object.
*/
map<T, U>(array: T[], callback: (elementOfArray: T, indexInArray: number) => U): U[];
/**
* Translate all items in an array or object to new array of items.
*
* @param arrayOrObject The Array or Object to translate.
* @param callback The function to process each item against. The first argument to the function is the value; the second argument is the index or key of the array or object property. The function can return any value to add to the array. A returned array will be flattened into the resulting array. Within the function, this refers to the global (window) object.
*/
map(arrayOrObject: any, callback: (value: any, indexOrKey: any) => any): any;
/**
* Merge the contents of two arrays together into the first array.
*
* @param first The first array to merge, the elements of second added.
* @param second The second array to merge into the first, unaltered.
*/
merge<T>(first: T[], second: T[]): T[];
/**
* An empty function.
*/
noop(): any;
/**
* Return a number representing the current time.
*/
now(): number;
/**
* Takes a well-formed JSON string and returns the resulting JavaScript object.
*
* @param json The JSON string to parse.
*/
parseJSON(json: string): any;
/**
* Parses a string into an XML document.
*
* @param data a well-formed XML string to be parsed
*/
parseXML(data: string): XMLDocument;
/**
* Remove the whitespace from the beginning and end of a string.
*
* @param str Remove the whitespace from the beginning and end of a string.
*/
trim(str: string): string;
/**
* Determine the internal JavaScript [[Class]] of an object.
*
* @param obj Object to get the internal JavaScript [[Class]] of.
*/
type(obj: any): string;
/**
* Sorts an array of DOM elements, in place, with the duplicates removed. Note that this only works on arrays of DOM elements, not strings or numbers.
*
* @param array The Array of DOM elements.
*/
unique(array: Element[]): Element[];
/**
* Parses a string into an array of DOM nodes.
*
* @param data HTML string to be parsed
* @param context DOM element to serve as the context in which the HTML fragment will be created
* @param keepScripts A Boolean indicating whether to include scripts passed in the HTML string
*/
parseHTML(data: string, context?: HTMLElement, keepScripts?: boolean): any[];
/**
* Parses a string into an array of DOM nodes.
*
* @param data HTML string to be parsed
* @param context DOM element to serve as the context in which the HTML fragment will be created
* @param keepScripts A Boolean indicating whether to include scripts passed in the HTML string
*/
parseHTML(data: string, context?: Document, keepScripts?: boolean): any[];
}
/**
* The jQuery instance members
*/
interface JQuery {
/**
* Register a handler to be called when Ajax requests complete. This is an AjaxEvent.
*
* @param handler The function to be invoked.
*/
ajaxComplete(handler: (event: JQueryEventObject, XMLHttpRequest: XMLHttpRequest, ajaxOptions: any) => any): JQuery;
/**
* Register a handler to be called when Ajax requests complete with an error. This is an Ajax Event.
*
* @param handler The function to be invoked.
*/
ajaxError(handler: (event: JQueryEventObject, jqXHR: JQueryXHR, ajaxSettings: JQueryAjaxSettings, thrownError: any) => any): JQuery;
/**
* Attach a function to be executed before an Ajax request is sent. This is an Ajax Event.
*
* @param handler The function to be invoked.
*/
ajaxSend(handler: (event: JQueryEventObject, jqXHR: JQueryXHR, ajaxOptions: JQueryAjaxSettings) => any): JQuery;
/**
* Register a handler to be called when the first Ajax request begins. This is an Ajax Event.
*
* @param handler The function to be invoked.
*/
ajaxStart(handler: () => any): JQuery;
/**
* Register a handler to be called when all Ajax requests have completed. This is an Ajax Event.
*
* @param handler The function to be invoked.
*/
ajaxStop(handler: () => any): JQuery;
/**
* Attach a function to be executed whenever an Ajax request completes successfully. This is an Ajax Event.
*
* @param handler The function to be invoked.
*/
ajaxSuccess(handler: (event: JQueryEventObject, XMLHttpRequest: XMLHttpRequest, ajaxOptions: JQueryAjaxSettings) => any): JQuery;
/**
* Load data from the server and place the returned HTML into the matched element.
*
* @param url A string containing the URL to which the request is sent.
* @param data A plain object or string that is sent to the server with the request.
* @param complete A callback function that is executed when the request completes.
*/
load(url: string, data?: string|Object, complete?: (responseText: string, textStatus: string, XMLHttpRequest: XMLHttpRequest) => any): JQuery;
/**
* Encode a set of form elements as a string for submission.
*/
serialize(): string;
/**
* Encode a set of form elements as an array of names and values.
*/
serializeArray(): JQuerySerializeArrayElement[];
/**
* Adds the specified class(es) to each of the set of matched elements.
*
* @param className One or more space-separated classes to be added to the class attribute of each matched element.
*/
addClass(className: string): JQuery;
/**
* Adds the specified class(es) to each of the set of matched elements.
*
* @param function A function returning one or more space-separated class names to be added to the existing class name(s). Receives the index position of the element in the set and the existing class name(s) as arguments. Within the function, this refers to the current element in the set.
*/
addClass(func: (index: number, className: string) => string): JQuery;
/**
* Add the previous set of elements on the stack to the current set, optionally filtered by a selector.
*/
addBack(selector?: string): JQuery;
/**
* Get the value of an attribute for the first element in the set of matched elements.
*
* @param attributeName The name of the attribute to get.
*/
attr(attributeName: string): string;
/**
* Set one or more attributes for the set of matched elements.
*
* @param attributeName The name of the attribute to set.
* @param value A value to set for the attribute.
*/
attr(attributeName: string, value: string|number): JQuery;
/**
* Set one or more attributes for the set of matched elements.
*
* @param attributeName The name of the attribute to set.
* @param func A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old attribute value as arguments.
*/
attr(attributeName: string, func: (index: number, attr: string) => string|number): JQuery;
/**
* Set one or more attributes for the set of matched elements.
*
* @param attributes An object of attribute-value pairs to set.
*/
attr(attributes: Object): JQuery;
/**
* Determine whether any of the matched elements are assigned the given class.
*
* @param className The class name to search for.
*/
hasClass(className: string): boolean;
/**
* Get the HTML contents of the first element in the set of matched elements.
*/
html(): string;
/**
* Set the HTML contents of each element in the set of matched elements.
*
* @param htmlString A string of HTML to set as the content of each matched element.
*/
html(htmlString: string): JQuery;
/**
* Set the HTML contents of each element in the set of matched elements.
*
* @param func A function returning the HTML content to set. Receives the index position of the element in the set and the old HTML value as arguments. jQuery empties the element before calling the function; use the oldhtml argument to reference the previous content. Within the function, this refers to the current element in the set.
*/
html(func: (index: number, oldhtml: string) => string): JQuery;
/**
* Set the HTML contents of each element in the set of matched elements.
*
* @param func A function returning the HTML content to set. Receives the index position of the element in the set and the old HTML value as arguments. jQuery empties the element before calling the function; use the oldhtml argument to reference the previous content. Within the function, this refers to the current element in the set.
*/
/**
* Get the value of a property for the first element in the set of matched elements.
*
* @param propertyName The name of the property to get.
*/
prop(propertyName: string): any;
/**
* Set one or more properties for the set of matched elements.
*
* @param propertyName The name of the property to set.
* @param value A value to set for the property.
*/
prop(propertyName: string, value: string|number|boolean): JQuery;
/**
* Set one or more properties for the set of matched elements.
*
* @param properties An object of property-value pairs to set.
*/
prop(properties: Object): JQuery;
/**
* Set one or more properties for the set of matched elements.
*
* @param propertyName The name of the property to set.
* @param func A function returning the value to set. Receives the index position of the element in the set and the old property value as arguments. Within the function, the keyword this refers to the current element.
*/
prop(propertyName: string, func: (index: number, oldPropertyValue: any) => any): JQuery;
/**
* Remove an attribute from each element in the set of matched elements.
*
* @param attributeName An attribute to remove; as of version 1.7, it can be a space-separated list of attributes.
*/
removeAttr(attributeName: string): JQuery;
/**
* Remove a single class, multiple classes, or all classes from each element in the set of matched elements.
*
* @param className One or more space-separated classes to be removed from the class attribute of each matched element.
*/
removeClass(className?: string): JQuery;
/**
* Remove a single class, multiple classes, or all classes from each element in the set of matched elements.
*
* @param function A function returning one or more space-separated class names to be removed. Receives the index position of the element in the set and the old class value as arguments.
*/
removeClass(func: (index: number, className: string) => string): JQuery;
/**
* Remove a property for the set of matched elements.
*
* @param propertyName The name of the property to remove.
*/
removeProp(propertyName: string): JQuery;
/**
* Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument.
*
* @param className One or more class names (separated by spaces) to be toggled for each element in the matched set.
* @param swtch A Boolean (not just truthy/falsy) value to determine whether the class should be added or removed.
*/
toggleClass(className: string, swtch?: boolean): JQuery;
/**
* Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument.
*
* @param swtch A boolean value to determine whether the class should be added or removed.
*/
toggleClass(swtch?: boolean): JQuery;
/**
* Add or remove one or more classes from each element in the set of matched elements, depending on either the class's presence or the value of the switch argument.
*
* @param func A function that returns class names to be toggled in the class attribute of each element in the matched set. Receives the index position of the element in the set, the old class value, and the switch as arguments.
* @param swtch A boolean value to determine whether the class should be added or removed.
*/
toggleClass(func: (index: number, className: string, swtch: boolean) => string, swtch?: boolean): JQuery;
/**
* Get the current value of the first element in the set of matched elements.
*/
val(): any;
/**
* Set the value of each element in the set of matched elements.
*
* @param value A string of text or an array of strings corresponding to the value of each matched element to set as selected/checked.
*/
val(value: string|string[]): JQuery;
/**
* Set the value of each element in the set of matched elements.
*
* @param func A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old value as arguments.
*/
val(func: (index: number, value: string) => string): JQuery;
/**
* Get the value of style properties for the first element in the set of matched elements.
*
* @param propertyName A CSS property.
*/
css(propertyName: string): string;
/**
* Set one or more CSS properties for the set of matched elements.
*
* @param propertyName A CSS property name.
* @param value A value to set for the property.
*/
css(propertyName: string, value: string|number): JQuery;
/**
* Set one or more CSS properties for the set of matched elements.
*
* @param propertyName A CSS property name.
* @param value A function returning the value to set. this is the current element. Receives the index position of the element in the set and the old value as arguments.
*/
css(propertyName: string, value: (index: number, value: string) => string|number): JQuery;
/**
* Set one or more CSS properties for the set of matched elements.
*
* @param properties An object of property-value pairs to set.
*/
css(properties: Object): JQuery;
/**
* Get the current computed height for the first element in the set of matched elements.
*/
height(): number;
/**
* Set the CSS height of every matched element.
*
* @param value An integer representing the number of pixels, or an integer with an optional unit of measure appended (as a string).
*/
height(value: number|string): JQuery;
/**
* Set the CSS height of every matched element.
*
* @param func A function returning the height to set. Receives the index position of the element in the set and the old height as arguments. Within the function, this refers to the current element in the set.
*/
height(func: (index: number, height: number) => number|string): JQuery;
/**
* Get the current computed height for the first element in the set of matched elements, including padding but not border.
*/
innerHeight(): number;
/**
* Sets the inner height on elements in the set of matched elements, including padding but not border.
*
* @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string).
*/
innerHeight(height: number|string): JQuery;
/**
* Get the current computed width for the first element in the set of matched elements, including padding but not border.
*/
innerWidth(): number;
/**
* Sets the inner width on elements in the set of matched elements, including padding but not border.
*
* @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string).
*/
innerWidth(width: number|string): JQuery;
/**
* Get the current coordinates of the first element in the set of matched elements, relative to the document.
*/
offset(): JQueryCoordinates;
/**
* An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements.
*
* @param coordinates An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements.
*/
offset(coordinates: JQueryCoordinates): JQuery;
/**
* An object containing the properties top and left, which are integers indicating the new top and left coordinates for the elements.
*
* @param func A function to return the coordinates to set. Receives the index of the element in the collection as the first argument and the current coordinates as the second argument. The function should return an object with the new top and left properties.
*/
offset(func: (index: number, coords: JQueryCoordinates) => JQueryCoordinates): JQuery;
/**
* Get the current computed height for the first element in the set of matched elements, including padding, border, and optionally margin. Returns an integer (without "px") representation of the value or null if called on an empty set of elements.
*
* @param includeMargin A Boolean indicating whether to include the element's margin in the calculation.
*/
outerHeight(includeMargin?: boolean): number;
/**
* Sets the outer height on elements in the set of matched elements, including padding and border.
*
* @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string).
*/
outerHeight(height: number|string): JQuery;
/**
* Get the current computed width for the first element in the set of matched elements, including padding and border.
*
* @param includeMargin A Boolean indicating whether to include the element's margin in the calculation.
*/
outerWidth(includeMargin?: boolean): number;
/**
* Sets the outer width on elements in the set of matched elements, including padding and border.
*
* @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string).
*/
outerWidth(width: number|string): JQuery;
/**
* Get the current coordinates of the first element in the set of matched elements, relative to the offset parent.
*/
position(): JQueryCoordinates;
/**
* Get the current horizontal position of the scroll bar for the first element in the set of matched elements or set the horizontal position of the scroll bar for every matched element.
*/
scrollLeft(): number;
/**
* Set the current horizontal position of the scroll bar for each of the set of matched elements.
*
* @param value An integer indicating the new position to set the scroll bar to.
*/
scrollLeft(value: number): JQuery;
/**
* Get the current vertical position of the scroll bar for the first element in the set of matched elements or set the vertical position of the scroll bar for every matched element.
*/
scrollTop(): number;
/**
* Set the current vertical position of the scroll bar for each of the set of matched elements.
*
* @param value An integer indicating the new position to set the scroll bar to.
*/
scrollTop(value: number): JQuery;
/**
* Get the current computed width for the first element in the set of matched elements.
*/
width(): number;
/**
* Set the CSS width of each element in the set of matched elements.
*
* @param value An integer representing the number of pixels, or an integer along with an optional unit of measure appended (as a string).
*/
width(value: number|string): JQuery;
/**
* Set the CSS width of each element in the set of matched elements.
*
* @param func A function returning the width to set. Receives the index position of the element in the set and the old width as arguments. Within the function, this refers to the current element in the set.
*/
width(func: (index: number, width: number) => number|string): JQuery;
/**
* Remove from the queue all items that have not yet been run.
*
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
clearQueue(queueName?: string): JQuery;
/**
* Store arbitrary data associated with the matched elements.
*
* @param key A string naming the piece of data to set.
* @param value The new data value; it can be any Javascript type including Array or Object.
*/
data(key: string, value: any): JQuery;
/**
* Store arbitrary data associated with the matched elements.
*
* @param obj An object of key-value pairs of data to update.
*/
data(obj: { [key: string]: any; }): JQuery;
/**
* Return the value at the named data store for the first element in the jQuery collection, as set by data(name, value) or by an HTML5 data-* attribute.
*
* @param key Name of the data stored.
*/
data(key: string): any;
/**
* Return the value at the named data store for the first element in the jQuery collection, as set by data(name, value) or by an HTML5 data-* attribute.
*/
data(): any;
/**
* Execute the next function on the queue for the matched elements.
*
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
dequeue(queueName?: string): JQuery;
/**
* Remove a previously-stored piece of data.
*
* @param name A string naming the piece of data to delete or space-separated string naming the pieces of data to delete.
*/
removeData(name: string): JQuery;
/**
* Remove a previously-stored piece of data.
*
* @param list An array of strings naming the pieces of data to delete.
*/
removeData(list: string[]): JQuery;
/**
* Return a Promise object to observe when all actions of a certain type bound to the collection, queued or not, have finished.
*
* @param type The type of queue that needs to be observed. (default: fx)
* @param target Object onto which the promise methods have to be attached
*/
promise(type?: string, target?: Object): JQueryPromise<any>;
/**
* Perform a custom animation of a set of CSS properties.
*
* @param properties An object of CSS properties and values that the animation will move toward.
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
animate(properties: Object, duration?: string|number, complete?: Function): JQuery;
/**
* Perform a custom animation of a set of CSS properties.
*
* @param properties An object of CSS properties and values that the animation will move toward.
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition. (default: swing)
* @param complete A function to call once the animation is complete.
*/
animate(properties: Object, duration?: string|number, easing?: string, complete?: Function): JQuery;
/**
* Perform a custom animation of a set of CSS properties.
*
* @param properties An object of CSS properties and values that the animation will move toward.
* @param options A map of additional options to pass to the method.
*/
animate(properties: Object, options: JQueryAnimationOptions): JQuery;
/**
* Set a timer to delay execution of subsequent items in the queue.
*
* @param duration An integer indicating the number of milliseconds to delay execution of the next item in the queue.
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
delay(duration: number, queueName?: string): JQuery;
/**
* Display the matched elements by fading them to opaque.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
fadeIn(duration?: number|string, complete?: Function): JQuery;
/**
* Display the matched elements by fading them to opaque.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
fadeIn(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display the matched elements by fading them to opaque.
*
* @param options A map of additional options to pass to the method.
*/
fadeIn(options: JQueryAnimationOptions): JQuery;
/**
* Hide the matched elements by fading them to transparent.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
fadeOut(duration?: number|string, complete?: Function): JQuery;
/**
* Hide the matched elements by fading them to transparent.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
fadeOut(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Hide the matched elements by fading them to transparent.
*
* @param options A map of additional options to pass to the method.
*/
fadeOut(options: JQueryAnimationOptions): JQuery;
/**
* Adjust the opacity of the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param opacity A number between 0 and 1 denoting the target opacity.
* @param complete A function to call once the animation is complete.
*/
fadeTo(duration: string|number, opacity: number, complete?: Function): JQuery;
/**
* Adjust the opacity of the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param opacity A number between 0 and 1 denoting the target opacity.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
fadeTo(duration: string|number, opacity: number, easing?: string, complete?: Function): JQuery;
/**
* Display or hide the matched elements by animating their opacity.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
fadeToggle(duration?: number|string, complete?: Function): JQuery;
/**
* Display or hide the matched elements by animating their opacity.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
fadeToggle(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display or hide the matched elements by animating their opacity.
*
* @param options A map of additional options to pass to the method.
*/
fadeToggle(options: JQueryAnimationOptions): JQuery;
/**
* Stop the currently-running animation, remove all queued animations, and complete all animations for the matched elements.
*
* @param queue The name of the queue in which to stop animations.
*/
finish(queue?: string): JQuery;
/**
* Hide the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
hide(duration?: number|string, complete?: Function): JQuery;
/**
* Hide the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
hide(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Hide the matched elements.
*
* @param options A map of additional options to pass to the method.
*/
hide(options: JQueryAnimationOptions): JQuery;
/**
* Display the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
show(duration?: number|string, complete?: Function): JQuery;
/**
* Display the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
show(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display the matched elements.
*
* @param options A map of additional options to pass to the method.
*/
show(options: JQueryAnimationOptions): JQuery;
/**
* Display the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
slideDown(duration?: number|string, complete?: Function): JQuery;
/**
* Display the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
slideDown(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display the matched elements with a sliding motion.
*
* @param options A map of additional options to pass to the method.
*/
slideDown(options: JQueryAnimationOptions): JQuery;
/**
* Display or hide the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
slideToggle(duration?: number|string, complete?: Function): JQuery;
/**
* Display or hide the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
slideToggle(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display or hide the matched elements with a sliding motion.
*
* @param options A map of additional options to pass to the method.
*/
slideToggle(options: JQueryAnimationOptions): JQuery;
/**
* Hide the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
slideUp(duration?: number|string, complete?: Function): JQuery;
/**
* Hide the matched elements with a sliding motion.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
slideUp(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Hide the matched elements with a sliding motion.
*
* @param options A map of additional options to pass to the method.
*/
slideUp(options: JQueryAnimationOptions): JQuery;
/**
* Stop the currently-running animation on the matched elements.
*
* @param clearQueue A Boolean indicating whether to remove queued animation as well. Defaults to false.
* @param jumpToEnd A Boolean indicating whether to complete the current animation immediately. Defaults to false.
*/
stop(clearQueue?: boolean, jumpToEnd?: boolean): JQuery;
/**
* Stop the currently-running animation on the matched elements.
*
* @param queue The name of the queue in which to stop animations.
* @param clearQueue A Boolean indicating whether to remove queued animation as well. Defaults to false.
* @param jumpToEnd A Boolean indicating whether to complete the current animation immediately. Defaults to false.
*/
stop(queue?: string, clearQueue?: boolean, jumpToEnd?: boolean): JQuery;
/**
* Display or hide the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param complete A function to call once the animation is complete.
*/
toggle(duration?: number|string, complete?: Function): JQuery;
/**
* Display or hide the matched elements.
*
* @param duration A string or number determining how long the animation will run.
* @param easing A string indicating which easing function to use for the transition.
* @param complete A function to call once the animation is complete.
*/
toggle(duration?: number|string, easing?: string, complete?: Function): JQuery;
/**
* Display or hide the matched elements.
*
* @param options A map of additional options to pass to the method.
*/
toggle(options: JQueryAnimationOptions): JQuery;
/**
* Display or hide the matched elements.
*
* @param showOrHide A Boolean indicating whether to show or hide the elements.
*/
toggle(showOrHide: boolean): JQuery;
/**
* Attach a handler to an event for the elements.
*
* @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names.
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
bind(eventType: string, eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements.
*
* @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names.
* @param handler A function to execute each time the event is triggered.
*/
bind(eventType: string, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements.
*
* @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names.
* @param eventData An object containing data that will be passed to the event handler.
* @param preventBubble Setting the third argument to false will attach a function that prevents the default action from occurring and stops the event from bubbling. The default is true.
*/
bind(eventType: string, eventData: any, preventBubble: boolean): JQuery;
/**
* Attach a handler to an event for the elements.
*
* @param eventType A string containing one or more DOM event types, such as "click" or "submit," or custom event names.
* @param preventBubble Setting the third argument to false will attach a function that prevents the default action from occurring and stops the event from bubbling. The default is true.
*/
bind(eventType: string, preventBubble: boolean): JQuery;
/**
* Attach a handler to an event for the elements.
*
* @param events An object containing one or more DOM event types and functions to execute for them.
*/
bind(events: any): JQuery;
/**
* Trigger the "blur" event on an element
*/
blur(): JQuery;
/**
* Bind an event handler to the "blur" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
blur(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "blur" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
blur(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "change" event on an element.
*/
change(): JQuery;
/**
* Bind an event handler to the "change" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
change(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "change" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
change(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "click" event on an element.
*/
click(): JQuery;
/**
* Bind an event handler to the "click" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
*/
click(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "click" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
click(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "dblclick" event on an element.
*/
dblclick(): JQuery;
/**
* Bind an event handler to the "dblclick" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
dblclick(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "dblclick" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
dblclick(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
delegate(selector: any, eventType: string, handler: (eventObject: JQueryEventObject) => any): JQuery;
delegate(selector: any, eventType: string, eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "focus" event on an element.
*/
focus(): JQuery;
/**
* Bind an event handler to the "focus" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
focus(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "focus" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
focus(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "focusin" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
focusin(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "focusin" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
focusin(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "focusout" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
focusout(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "focusout" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
focusout(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind two handlers to the matched elements, to be executed when the mouse pointer enters and leaves the elements.
*
* @param handlerIn A function to execute when the mouse pointer enters the element.
* @param handlerOut A function to execute when the mouse pointer leaves the element.
*/
hover(handlerIn: (eventObject: JQueryEventObject) => any, handlerOut: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind a single handler to the matched elements, to be executed when the mouse pointer enters or leaves the elements.
*
* @param handlerInOut A function to execute when the mouse pointer enters or leaves the element.
*/
hover(handlerInOut: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "keydown" event on an element.
*/
keydown(): JQuery;
/**
* Bind an event handler to the "keydown" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
keydown(handler: (eventObject: JQueryKeyEventObject) => any): JQuery;
/**
* Bind an event handler to the "keydown" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
keydown(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery;
/**
* Trigger the "keypress" event on an element.
*/
keypress(): JQuery;
/**
* Bind an event handler to the "keypress" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
keypress(handler: (eventObject: JQueryKeyEventObject) => any): JQuery;
/**
* Bind an event handler to the "keypress" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
keypress(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery;
<|fim▁hole|> /**
* Trigger the "keyup" event on an element.
*/
keyup(): JQuery;
/**
* Bind an event handler to the "keyup" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
keyup(handler: (eventObject: JQueryKeyEventObject) => any): JQuery;
/**
* Bind an event handler to the "keyup" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
keyup(eventData?: any, handler?: (eventObject: JQueryKeyEventObject) => any): JQuery;
/**
* Bind an event handler to the "load" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
load(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "load" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
load(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "mousedown" event on an element.
*/
mousedown(): JQuery;
/**
* Bind an event handler to the "mousedown" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
mousedown(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to the "mousedown" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mousedown(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mouseenter" event on an element.
*/
mouseenter(): JQuery;
/**
* Bind an event handler to be fired when the mouse enters an element.
*
* @param handler A function to execute when the event is triggered.
*/
mouseenter(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to be fired when the mouse enters an element.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mouseenter(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mouseleave" event on an element.
*/
mouseleave(): JQuery;
/**
* Bind an event handler to be fired when the mouse leaves an element.
*
* @param handler A function to execute when the event is triggered.
*/
mouseleave(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to be fired when the mouse leaves an element.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mouseleave(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mousemove" event on an element.
*/
mousemove(): JQuery;
/**
* Bind an event handler to the "mousemove" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
mousemove(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to the "mousemove" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mousemove(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mouseout" event on an element.
*/
mouseout(): JQuery;
/**
* Bind an event handler to the "mouseout" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
mouseout(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to the "mouseout" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mouseout(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mouseover" event on an element.
*/
mouseover(): JQuery;
/**
* Bind an event handler to the "mouseover" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
mouseover(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to the "mouseover" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mouseover(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Trigger the "mouseup" event on an element.
*/
mouseup(): JQuery;
/**
* Bind an event handler to the "mouseup" JavaScript event.
*
* @param handler A function to execute when the event is triggered.
*/
mouseup(handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Bind an event handler to the "mouseup" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
mouseup(eventData: Object, handler: (eventObject: JQueryMouseEventObject) => any): JQuery;
/**
* Remove an event handler.
*/
off(): JQuery;
/**
* Remove an event handler.
*
* @param events One or more space-separated event types and optional namespaces, or just namespaces, such as "click", "keydown.myPlugin", or ".myPlugin".
* @param selector A selector which should match the one originally passed to .on() when attaching event handlers.
* @param handler A handler function previously attached for the event(s), or the special value false.
*/
off(events: string, selector?: string, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Remove an event handler.
*
* @param events One or more space-separated event types and optional namespaces, or just namespaces, such as "click", "keydown.myPlugin", or ".myPlugin".
* @param handler A handler function previously attached for the event(s), or the special value false.
*/
off(events: string, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Remove an event handler.
*
* @param events An object where the string keys represent one or more space-separated event types and optional namespaces, and the values represent handler functions previously attached for the event(s).
* @param selector A selector which should match the one originally passed to .on() when attaching event handlers.
*/
off(events: { [key: string]: any; }, selector?: string): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false. Rest parameter args is for optional parameters passed to jQuery.trigger(). Note that the actual parameters on the event handler function must be marked as optional (? syntax).
*/
on(events: string, handler: (eventObject: JQueryEventObject, ...args: any[]) => any): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param data Data to be passed to the handler in event.data when an event is triggered.
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false.
*/
on(events: string, data: any, handler: (eventObject: JQueryEventObject, ...args: any[]) => any): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element.
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false.
*/
on(events: string, selector: string, handler: (eventObject: JQueryEventObject, ...eventData: any[]) => any): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element.
* @param data Data to be passed to the handler in event.data when an event is triggered.
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false.
*/
on(events: string, selector: string, data: any, handler: (eventObject: JQueryEventObject, ...eventData: any[]) => any): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s).
* @param selector A selector string to filter the descendants of the selected elements that will call the handler. If the selector is null or omitted, the handler is always called when it reaches the selected element.
* @param data Data to be passed to the handler in event.data when an event occurs.
*/
on(events: { [key: string]: any; }, selector?: string, data?: any): JQuery;
/**
* Attach an event handler function for one or more events to the selected elements.
*
* @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s).
* @param data Data to be passed to the handler in event.data when an event occurs.
*/
on(events: { [key: string]: any; }, data?: any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events A string containing one or more JavaScript event types, such as "click" or "submit," or custom event names.
* @param handler A function to execute at the time the event is triggered.
*/
one(events: string, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events A string containing one or more JavaScript event types, such as "click" or "submit," or custom event names.
* @param data An object containing data that will be passed to the event handler.
* @param handler A function to execute at the time the event is triggered.
*/
one(events: string, data: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element.
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false.
*/
one(events: string, selector: string, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events One or more space-separated event types and optional namespaces, such as "click" or "keydown.myPlugin".
* @param selector A selector string to filter the descendants of the selected elements that trigger the event. If the selector is null or omitted, the event is always triggered when it reaches the selected element.
* @param data Data to be passed to the handler in event.data when an event is triggered.
* @param handler A function to execute when the event is triggered. The value false is also allowed as a shorthand for a function that simply does return false.
*/
one(events: string, selector: string, data: any, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s).
* @param selector A selector string to filter the descendants of the selected elements that will call the handler. If the selector is null or omitted, the handler is always called when it reaches the selected element.
* @param data Data to be passed to the handler in event.data when an event occurs.
*/
one(events: { [key: string]: any; }, selector?: string, data?: any): JQuery;
/**
* Attach a handler to an event for the elements. The handler is executed at most once per element per event type.
*
* @param events An object in which the string keys represent one or more space-separated event types and optional namespaces, and the values represent a handler function to be called for the event(s).
* @param data Data to be passed to the handler in event.data when an event occurs.
*/
one(events: { [key: string]: any; }, data?: any): JQuery;
/**
* Specify a function to execute when the DOM is fully loaded.
*
* @param handler A function to execute after the DOM is ready.
*/
ready(handler: Function): JQuery;
/**
* Trigger the "resize" event on an element.
*/
resize(): JQuery;
/**
* Bind an event handler to the "resize" JavaScript event.
*
* @param handler A function to execute each time the event is triggered.
*/
resize(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "resize" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
resize(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "scroll" event on an element.
*/
scroll(): JQuery;
/**
* Bind an event handler to the "scroll" JavaScript event.
*
* @param handler A function to execute each time the event is triggered.
*/
scroll(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "scroll" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
scroll(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "select" event on an element.
*/
select(): JQuery;
/**
* Bind an event handler to the "select" JavaScript event.
*
* @param handler A function to execute each time the event is triggered.
*/
select(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "select" JavaScript event.
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
select(eventData: Object, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Trigger the "submit" event on an element.
*/
submit(): JQuery;
/**
* Bind an event handler to the "submit" JavaScript event
*
* @param handler A function to execute each time the event is triggered.
*/
submit(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "submit" JavaScript event
*
* @param eventData An object containing data that will be passed to the event handler.
* @param handler A function to execute each time the event is triggered.
*/
submit(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Execute all handlers and behaviors attached to the matched elements for the given event type.
*
* @param eventType A string containing a JavaScript event type, such as click or submit.
* @param extraParameters Additional parameters to pass along to the event handler.
*/
trigger(eventType: string, extraParameters?: any[]|Object): JQuery;
/**
* Execute all handlers and behaviors attached to the matched elements for the given event type.
*
* @param event A jQuery.Event object.
* @param extraParameters Additional parameters to pass along to the event handler.
*/
trigger(event: JQueryEventObject, extraParameters?: any[]|Object): JQuery;
/**
* Execute all handlers attached to an element for an event.
*
* @param eventType A string containing a JavaScript event type, such as click or submit.
* @param extraParameters An array of additional parameters to pass along to the event handler.
*/
triggerHandler(eventType: string, ...extraParameters: any[]): Object;
/**
* Remove a previously-attached event handler from the elements.
*
* @param eventType A string containing a JavaScript event type, such as click or submit.
* @param handler The function that is to be no longer executed.
*/
unbind(eventType?: string, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Remove a previously-attached event handler from the elements.
*
* @param eventType A string containing a JavaScript event type, such as click or submit.
* @param fls Unbinds the corresponding 'return false' function that was bound using .bind( eventType, false ).
*/
unbind(eventType: string, fls: boolean): JQuery;
/**
* Remove a previously-attached event handler from the elements.
*
* @param evt A JavaScript event object as passed to an event handler.
*/
unbind(evt: any): JQuery;
/**
* Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements.
*/
undelegate(): JQuery;
/**
* Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements.
*
* @param selector A selector which will be used to filter the event results.
* @param eventType A string containing a JavaScript event type, such as "click" or "keydown"
* @param handler A function to execute at the time the event is triggered.
*/
undelegate(selector: string, eventType: string, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements.
*
* @param selector A selector which will be used to filter the event results.
* @param events An object of one or more event types and previously bound functions to unbind from them.
*/
undelegate(selector: string, events: Object): JQuery;
/**
* Remove a handler from the event for all elements which match the current selector, based upon a specific set of root elements.
*
* @param namespace A string containing a namespace to unbind all events from.
*/
undelegate(namespace: string): JQuery;
/**
* Bind an event handler to the "unload" JavaScript event. (DEPRECATED from v1.8)
*
* @param handler A function to execute when the event is triggered.
*/
unload(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "unload" JavaScript event. (DEPRECATED from v1.8)
*
* @param eventData A plain object of data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
unload(eventData?: any, handler?: (eventObject: JQueryEventObject) => any): JQuery;
/**
* The DOM node context originally passed to jQuery(); if none was passed then context will likely be the document. (DEPRECATED from v1.10)
*/
context: Element;
jquery: string;
/**
* Bind an event handler to the "error" JavaScript event. (DEPRECATED from v1.8)
*
* @param handler A function to execute when the event is triggered.
*/
error(handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Bind an event handler to the "error" JavaScript event. (DEPRECATED from v1.8)
*
* @param eventData A plain object of data that will be passed to the event handler.
* @param handler A function to execute when the event is triggered.
*/
error(eventData: any, handler: (eventObject: JQueryEventObject) => any): JQuery;
/**
* Add a collection of DOM elements onto the jQuery stack.
*
* @param elements An array of elements to push onto the stack and make into a new jQuery object.
*/
pushStack(elements: any[]): JQuery;
/**
* Add a collection of DOM elements onto the jQuery stack.
*
* @param elements An array of elements to push onto the stack and make into a new jQuery object.
* @param name The name of a jQuery method that generated the array of elements.
* @param arguments The arguments that were passed in to the jQuery method (for serialization).
*/
pushStack(elements: any[], name: string, arguments: any[]): JQuery;
/**
* Insert content, specified by the parameter, after each element in the set of matched elements.
*
* param content1 HTML string, DOM element, array of elements, or jQuery object to insert after each element in the set of matched elements.
* param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert after each element in the set of matched elements.
*/
after(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery;
/**
* Insert content, specified by the parameter, after each element in the set of matched elements.
*
* param func A function that returns an HTML string, DOM element(s), or jQuery object to insert after each element in the set of matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set.
*/
after(func: (index: number, html: string) => string|Element|JQuery): JQuery;
/**
* Insert content, specified by the parameter, to the end of each element in the set of matched elements.
*
* param content1 DOM element, array of elements, HTML string, or jQuery object to insert at the end of each element in the set of matched elements.
* param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert at the end of each element in the set of matched elements.
*/
append(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery;
/**
* Insert content, specified by the parameter, to the end of each element in the set of matched elements.
*
* param func A function that returns an HTML string, DOM element(s), or jQuery object to insert at the end of each element in the set of matched elements. Receives the index position of the element in the set and the old HTML value of the element as arguments. Within the function, this refers to the current element in the set.
*/
append(func: (index: number, html: string) => string|Element|JQuery): JQuery;
/**
* Insert every element in the set of matched elements to the end of the target.
*
* @param target A selector, element, HTML string, array of elements, or jQuery object; the matched set of elements will be inserted at the end of the element(s) specified by this parameter.
*/
appendTo(target: JQuery|any[]|Element|string): JQuery;
/**
* Insert content, specified by the parameter, before each element in the set of matched elements.
*
* param content1 HTML string, DOM element, array of elements, or jQuery object to insert before each element in the set of matched elements.
* param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert before each element in the set of matched elements.
*/
before(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery;
/**
* Insert content, specified by the parameter, before each element in the set of matched elements.
*
* param func A function that returns an HTML string, DOM element(s), or jQuery object to insert before each element in the set of matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set.
*/
before(func: (index: number, html: string) => string|Element|JQuery): JQuery;
/**
* Create a deep copy of the set of matched elements.
*
* param withDataAndEvents A Boolean indicating whether event handlers and data should be copied along with the elements. The default value is false.
* param deepWithDataAndEvents A Boolean indicating whether event handlers and data for all children of the cloned element should be copied. By default its value matches the first argument's value (which defaults to false).
*/
clone(withDataAndEvents?: boolean, deepWithDataAndEvents?: boolean): JQuery;
/**
* Remove the set of matched elements from the DOM.
*
* param selector A selector expression that filters the set of matched elements to be removed.
*/
detach(selector?: string): JQuery;
/**
* Remove all child nodes of the set of matched elements from the DOM.
*/
empty(): JQuery;
/**
* Insert every element in the set of matched elements after the target.
*
* param target A selector, element, array of elements, HTML string, or jQuery object; the matched set of elements will be inserted after the element(s) specified by this parameter.
*/
insertAfter(target: JQuery|any[]|Element|Text|string): JQuery;
/**
* Insert every element in the set of matched elements before the target.
*
* param target A selector, element, array of elements, HTML string, or jQuery object; the matched set of elements will be inserted before the element(s) specified by this parameter.
*/
insertBefore(target: JQuery|any[]|Element|Text|string): JQuery;
/**
* Insert content, specified by the parameter, to the beginning of each element in the set of matched elements.
*
* param content1 DOM element, array of elements, HTML string, or jQuery object to insert at the beginning of each element in the set of matched elements.
* param content2 One or more additional DOM elements, arrays of elements, HTML strings, or jQuery objects to insert at the beginning of each element in the set of matched elements.
*/
prepend(content1: JQuery|any[]|Element|Text|string, ...content2: any[]): JQuery;
/**
* Insert content, specified by the parameter, to the beginning of each element in the set of matched elements.
*
* param func A function that returns an HTML string, DOM element(s), or jQuery object to insert at the beginning of each element in the set of matched elements. Receives the index position of the element in the set and the old HTML value of the element as arguments. Within the function, this refers to the current element in the set.
*/
prepend(func: (index: number, html: string) => string|Element|JQuery): JQuery;
/**
* Insert every element in the set of matched elements to the beginning of the target.
*
* @param target A selector, element, HTML string, array of elements, or jQuery object; the matched set of elements will be inserted at the beginning of the element(s) specified by this parameter.
*/
prependTo(target: JQuery|any[]|Element|string): JQuery;
/**
* Remove the set of matched elements from the DOM.
*
* @param selector A selector expression that filters the set of matched elements to be removed.
*/
remove(selector?: string): JQuery;
/**
* Replace each target element with the set of matched elements.
*
* @param target A selector string, jQuery object, DOM element, or array of elements indicating which element(s) to replace.
*/
replaceAll(target: JQuery|any[]|Element|string): JQuery;
/**
* Replace each element in the set of matched elements with the provided new content and return the set of elements that was removed.
*
* param newContent The content to insert. May be an HTML string, DOM element, array of DOM elements, or jQuery object.
*/
replaceWith(newContent: JQuery|any[]|Element|Text|string): JQuery;
/**
* Replace each element in the set of matched elements with the provided new content and return the set of elements that was removed.
*
* param func A function that returns content with which to replace the set of matched elements.
*/
replaceWith(func: () => Element|JQuery): JQuery;
/**
* Get the combined text contents of each element in the set of matched elements, including their descendants.
*/
text(): string;
/**
* Set the content of each element in the set of matched elements to the specified text.
*
* @param text The text to set as the content of each matched element. When Number or Boolean is supplied, it will be converted to a String representation.
*/
text(text: string|number|boolean): JQuery;
/**
* Set the content of each element in the set of matched elements to the specified text.
*
* @param func A function returning the text content to set. Receives the index position of the element in the set and the old text value as arguments.
*/
text(func: (index: number, text: string) => string): JQuery;
/**
* Retrieve all the elements contained in the jQuery set, as an array.
*/
toArray(): any[];
/**
* Remove the parents of the set of matched elements from the DOM, leaving the matched elements in their place.
*/
unwrap(): JQuery;
/**
* Wrap an HTML structure around each element in the set of matched elements.
*
* @param wrappingElement A selector, element, HTML string, or jQuery object specifying the structure to wrap around the matched elements.
*/
wrap(wrappingElement: JQuery|Element|string): JQuery;
/**
* Wrap an HTML structure around each element in the set of matched elements.
*
* @param func A callback function returning the HTML content or jQuery object to wrap around the matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set.
*/
wrap(func: (index: number) => string|JQuery): JQuery;
/**
* Wrap an HTML structure around all elements in the set of matched elements.
*
* @param wrappingElement A selector, element, HTML string, or jQuery object specifying the structure to wrap around the matched elements.
*/
wrapAll(wrappingElement: JQuery|Element|string): JQuery;
wrapAll(func: (index: number) => string): JQuery;
/**
* Wrap an HTML structure around the content of each element in the set of matched elements.
*
* @param wrappingElement An HTML snippet, selector expression, jQuery object, or DOM element specifying the structure to wrap around the content of the matched elements.
*/
wrapInner(wrappingElement: JQuery|Element|string): JQuery;
/**
* Wrap an HTML structure around the content of each element in the set of matched elements.
*
* @param func A callback function which generates a structure to wrap around the content of the matched elements. Receives the index position of the element in the set as an argument. Within the function, this refers to the current element in the set.
*/
wrapInner(func: (index: number) => string): JQuery;
/**
* Iterate over a jQuery object, executing a function for each matched element.
*
* @param func A function to execute for each matched element.
*/
each(func: (index: number, elem: Element) => any): JQuery;
/**
* Retrieve one of the elements matched by the jQuery object.
*
* @param index A zero-based integer indicating which element to retrieve.
*/
get(index: number): HTMLElement;
/**
* Retrieve the elements matched by the jQuery object.
*/
get(): any[];
/**
* Search for a given element from among the matched elements.
*/
index(): number;
/**
* Search for a given element from among the matched elements.
*
* @param selector A selector representing a jQuery collection in which to look for an element.
*/
index(selector: string|JQuery|Element): number;
/**
* The number of elements in the jQuery object.
*/
length: number;
/**
* A selector representing selector passed to jQuery(), if any, when creating the original set.
* version deprecated: 1.7, removed: 1.9
*/
selector: string;
[index: string]: any;
[index: number]: HTMLElement;
/**
* Add elements to the set of matched elements.
*
* @param selector A string representing a selector expression to find additional elements to add to the set of matched elements.
* @param context The point in the document at which the selector should begin matching; similar to the context argument of the $(selector, context) method.
*/
add(selector: string, context?: Element): JQuery;
/**
* Add elements to the set of matched elements.
*
* @param elements One or more elements to add to the set of matched elements.
*/
add(...elements: Element[]): JQuery;
/**
* Add elements to the set of matched elements.
*
* @param html An HTML fragment to add to the set of matched elements.
*/
add(html: string): JQuery;
/**
* Add elements to the set of matched elements.
*
* @param obj An existing jQuery object to add to the set of matched elements.
*/
add(obj: JQuery): JQuery;
/**
* Get the children of each element in the set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
children(selector?: string): JQuery;
/**
* For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree.
*
* @param selector A string containing a selector expression to match elements against.
*/
closest(selector: string): JQuery;
/**
* For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree.
*
* @param selector A string containing a selector expression to match elements against.
* @param context A DOM element within which a matching element may be found. If no context is passed in then the context of the jQuery set will be used instead.
*/
closest(selector: string, context?: Element): JQuery;
/**
* For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree.
*
* @param obj A jQuery object to match elements against.
*/
closest(obj: JQuery): JQuery;
/**
* For each element in the set, get the first element that matches the selector by testing the element itself and traversing up through its ancestors in the DOM tree.
*
* @param element An element to match elements against.
*/
closest(element: Element): JQuery;
/**
* Get an array of all the elements and selectors matched against the current element up through the DOM tree.
*
* @param selectors An array or string containing a selector expression to match elements against (can also be a jQuery object).
* @param context A DOM element within which a matching element may be found. If no context is passed in then the context of the jQuery set will be used instead.
*/
closest(selectors: any, context?: Element): any[];
/**
* Get the children of each element in the set of matched elements, including text and comment nodes.
*/
contents(): JQuery;
/**
* End the most recent filtering operation in the current chain and return the set of matched elements to its previous state.
*/
end(): JQuery;
/**
* Reduce the set of matched elements to the one at the specified index.
*
* @param index An integer indicating the 0-based position of the element. OR An integer indicating the position of the element, counting backwards from the last element in the set.
*
*/
eq(index: number): JQuery;
/**
* Reduce the set of matched elements to those that match the selector or pass the function's test.
*
* @param selector A string containing a selector expression to match the current set of elements against.
*/
filter(selector: string): JQuery;
/**
* Reduce the set of matched elements to those that match the selector or pass the function's test.
*
* @param func A function used as a test for each element in the set. this is the current DOM element.
*/
filter(func: (index: number, element: Element) => any): JQuery;
/**
* Reduce the set of matched elements to those that match the selector or pass the function's test.
*
* @param element An element to match the current set of elements against.
*/
filter(element: Element): JQuery;
/**
* Reduce the set of matched elements to those that match the selector or pass the function's test.
*
* @param obj An existing jQuery object to match the current set of elements against.
*/
filter(obj: JQuery): JQuery;
/**
* Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element.
*
* @param selector A string containing a selector expression to match elements against.
*/
find(selector: string): JQuery;
/**
* Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element.
*
* @param element An element to match elements against.
*/
find(element: Element): JQuery;
/**
* Get the descendants of each element in the current set of matched elements, filtered by a selector, jQuery object, or element.
*
* @param obj A jQuery object to match elements against.
*/
find(obj: JQuery): JQuery;
/**
* Reduce the set of matched elements to the first in the set.
*/
first(): JQuery;
/**
* Reduce the set of matched elements to those that have a descendant that matches the selector or DOM element.
*
* @param selector A string containing a selector expression to match elements against.
*/
has(selector: string): JQuery;
/**
* Reduce the set of matched elements to those that have a descendant that matches the selector or DOM element.
*
* @param contained A DOM element to match elements against.
*/
has(contained: Element): JQuery;
/**
* Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments.
*
* @param selector A string containing a selector expression to match elements against.
*/
is(selector: string): boolean;
/**
* Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments.
*
* @param func A function used as a test for the set of elements. It accepts one argument, index, which is the element's index in the jQuery collection.Within the function, this refers to the current DOM element.
*/
is(func: (index: number, element: Element) => boolean): boolean;
/**
* Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments.
*
* @param obj An existing jQuery object to match the current set of elements against.
*/
is(obj: JQuery): boolean;
/**
* Check the current matched set of elements against a selector, element, or jQuery object and return true if at least one of these elements matches the given arguments.
*
* @param elements One or more elements to match the current set of elements against.
*/
is(elements: any): boolean;
/**
* Reduce the set of matched elements to the final one in the set.
*/
last(): JQuery;
/**
* Pass each element in the current matched set through a function, producing a new jQuery object containing the return values.
*
* @param callback A function object that will be invoked for each element in the current set.
*/
map(callback: (index: number, domElement: Element) => any): JQuery;
/**
* Get the immediately following sibling of each element in the set of matched elements. If a selector is provided, it retrieves the next sibling only if it matches that selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
next(selector?: string): JQuery;
/**
* Get all following siblings of each element in the set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
nextAll(selector?: string): JQuery;
/**
* Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed.
*
* @param selector A string containing a selector expression to indicate where to stop matching following sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
nextUntil(selector?: string, filter?: string): JQuery;
/**
* Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed.
*
* @param element A DOM node or jQuery object indicating where to stop matching following sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
nextUntil(element?: Element, filter?: string): JQuery;
/**
* Get all following siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object passed.
*
* @param obj A DOM node or jQuery object indicating where to stop matching following sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
nextUntil(obj?: JQuery, filter?: string): JQuery;
/**
* Remove elements from the set of matched elements.
*
* @param selector A string containing a selector expression to match elements against.
*/
not(selector: string): JQuery;
/**
* Remove elements from the set of matched elements.
*
* @param func A function used as a test for each element in the set. this is the current DOM element.
*/
not(func: (index: number, element: Element) => boolean): JQuery;
/**
* Remove elements from the set of matched elements.
*
* @param elements One or more DOM elements to remove from the matched set.
*/
not(...elements: Element[]): JQuery;
/**
* Remove elements from the set of matched elements.
*
* @param obj An existing jQuery object to match the current set of elements against.
*/
not(obj: JQuery): JQuery;
/**
* Get the closest ancestor element that is positioned.
*/
offsetParent(): JQuery;
/**
* Get the parent of each element in the current set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
parent(selector?: string): JQuery;
/**
* Get the ancestors of each element in the current set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
parents(selector?: string): JQuery;
/**
* Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param selector A string containing a selector expression to indicate where to stop matching ancestor elements.
* @param filter A string containing a selector expression to match elements against.
*/
parentsUntil(selector?: string, filter?: string): JQuery;
/**
* Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param element A DOM node or jQuery object indicating where to stop matching ancestor elements.
* @param filter A string containing a selector expression to match elements against.
*/
parentsUntil(element?: Element, filter?: string): JQuery;
/**
* Get the ancestors of each element in the current set of matched elements, up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param obj A DOM node or jQuery object indicating where to stop matching ancestor elements.
* @param filter A string containing a selector expression to match elements against.
*/
parentsUntil(obj?: JQuery, filter?: string): JQuery;
/**
* Get the immediately preceding sibling of each element in the set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
prev(selector?: string): JQuery;
/**
* Get all preceding siblings of each element in the set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
prevAll(selector?: string): JQuery;
/**
* Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param selector A string containing a selector expression to indicate where to stop matching preceding sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
prevUntil(selector?: string, filter?: string): JQuery;
/**
* Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param element A DOM node or jQuery object indicating where to stop matching preceding sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
prevUntil(element?: Element, filter?: string): JQuery;
/**
* Get all preceding siblings of each element up to but not including the element matched by the selector, DOM node, or jQuery object.
*
* @param obj A DOM node or jQuery object indicating where to stop matching preceding sibling elements.
* @param filter A string containing a selector expression to match elements against.
*/
prevUntil(obj?: JQuery, filter?: string): JQuery;
/**
* Get the siblings of each element in the set of matched elements, optionally filtered by a selector.
*
* @param selector A string containing a selector expression to match elements against.
*/
siblings(selector?: string): JQuery;
/**
* Reduce the set of matched elements to a subset specified by a range of indices.
*
* @param start An integer indicating the 0-based position at which the elements begin to be selected. If negative, it indicates an offset from the end of the set.
* @param end An integer indicating the 0-based position at which the elements stop being selected. If negative, it indicates an offset from the end of the set. If omitted, the range continues until the end of the set.
*/
slice(start: number, end?: number): JQuery;
/**
* Show the queue of functions to be executed on the matched elements.
*
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
*/
queue(queueName?: string): any[];
/**
* Manipulate the queue of functions to be executed, once for each matched element.
*
* @param newQueue An array of functions to replace the current queue contents.
*/
queue(newQueue: Function[]): JQuery;
/**
* Manipulate the queue of functions to be executed, once for each matched element.
*
* @param callback The new function to add to the queue, with a function to call that will dequeue the next item.
*/
queue(callback: Function): JQuery;
/**
* Manipulate the queue of functions to be executed, once for each matched element.
*
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
* @param newQueue An array of functions to replace the current queue contents.
*/
queue(queueName: string, newQueue: Function[]): JQuery;
/**
* Manipulate the queue of functions to be executed, once for each matched element.
*
* @param queueName A string containing the name of the queue. Defaults to fx, the standard effects queue.
* @param callback The new function to add to the queue, with a function to call that will dequeue the next item.
*/
queue(queueName: string, callback: Function): JQuery;
}
declare module "jquery" {
export = $;
}
declare var jQuery: JQueryStatic;
declare var $: JQueryStatic;<|fim▁end|>
| |
<|file_name|>session.py<|end_file_name|><|fim▁begin|>import math
import unicodedata as uda
from binascii import unhexlify, hexlify
from torba.rpc.jsonrpc import RPCError
from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
from torba.server import util
from lbry.schema.result import Outputs
from lbry.schema.url import URL
from lbry.wallet.server.block_processor import LBRYBlockProcessor
from lbry.wallet.server.db import LBRYDB
class LBRYElectrumX(ElectrumX):
PROTOCOL_MIN = (0, 0) # temporary, for supporting 0.10 protocol
max_errors = math.inf # don't disconnect people for errors! let them happen...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# fixme: this is a rebase hack, we need to go through ChainState instead later
self.daemon = self.session_mgr.daemon
self.bp: LBRYBlockProcessor = self.session_mgr.bp
self.db: LBRYDB = self.bp.db
def set_request_handlers(self, ptuple):
super().set_request_handlers(ptuple)
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.search': self.claimtrie_search,
'blockchain.claimtrie.resolve': self.claimtrie_resolve,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
'blockchain.claimtrie.getvalue': self.claimtrie_getvalue,
'blockchain.claimtrie.getnthclaimforname': self.claimtrie_getnthclaimforname,
'blockchain.claimtrie.getclaimsintx': self.claimtrie_getclaimsintx,
'blockchain.claimtrie.getclaimssignedby': self.claimtrie_getclaimssignedby,
'blockchain.claimtrie.getclaimssignedbynthtoname': self.claimtrie_getclaimssignedbynthtoname,
'blockchain.claimtrie.getvalueforuri': self.claimtrie_getvalueforuri,
'blockchain.claimtrie.getvaluesforuris': self.claimtrie_getvalueforuris,
'blockchain.claimtrie.getclaimssignedbyid': self.claimtrie_getclaimssignedbyid,
'blockchain.block.get_server_height': self.get_server_height,
}
self.request_handlers.update(handlers)
async def claimtrie_search(self, **kwargs):
if 'claim_id' in kwargs:
self.assert_claim_id(kwargs['claim_id'])
return Outputs.to_base64(*self.db.sql.search(kwargs))
async def claimtrie_resolve(self, *urls):
return Outputs.to_base64(*self.db.sql.resolve(urls))
async def get_server_height(self):
return self.bp.height
async def transaction_get_height(self, tx_hash):
self.assert_tx_hash(tx_hash)
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
return (self.db.db_height - transaction_info['confirmations']) + 1
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def claimtrie_getclaimssignedbyidminimal(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
ret = []
for claim_id in claim_ids:
raw_claim_id = unhexlify(claim_id)[::-1]
info = self.db.get_claim_info(raw_claim_id)
if info:
ret.append({
'claim_id': claim_id,
'height': info.height,
'name': info.name.decode()
})
return ret
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
<|fim▁hole|> return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim['claim_id']))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex'] # should have never included this (or the call to get it)
raw_claim_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim = await self.claimtrie_getclaimbyid(claim_id)
result.update(claim)
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result and len(result['claims']) > n >= 0:
# TODO: revist this after lbrycrd_#209 to see if we can sort by claim_sequence at this point
result['claims'].sort(key=lambda c: (int(c['height']), int(c['nout'])))
result['claims'][n]['claim_sequence'] = n
return result['claims'][n]
async def claimtrie_getpartialmatch(self, name, part):
result = await self.claimtrie_getclaimsforname(name)
if 'claims' in result:
return next(filter(lambda x: x['claim_id'].starts_with(part), result['claims']), None)
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['claims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = [] # fixme temporary
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim in claims:
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
return result
def format_claim_from_daemon(self, claim, name=None):
"""Changes the returned claim data to the format expected by lbry and adds missing fields."""
if not claim:
return {}
# this ISO-8859 nonsense stems from a nasty form of encoding extended characters in lbrycrd
# it will be fixed after the lbrycrd upstream merge to v17 is done
# it originated as a fear of terminals not supporting unicode. alas, they all do
if 'name' in claim:
name = claim['name'].encode('ISO-8859-1').decode()
info = self.db.sql.get_claims(claim_id=claim['claimId'])
if not info:
# raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".format(claim_id))
return {}
address = info.address.decode()
# fixme: temporary
#supports = self.format_supports_from_daemon(claim.get('supports', []))
supports = []
amount = get_from_possible_keys(claim, 'amount', 'nAmount')
height = get_from_possible_keys(claim, 'height', 'nHeight')
effective_amount = get_from_possible_keys(claim, 'effective amount', 'nEffectiveAmount')
valid_at_height = get_from_possible_keys(claim, 'valid at height', 'nValidAtHeight')
result = {
"name": name,
"claim_id": claim['claimId'],
"txid": claim['txid'],
"nout": claim['n'],
"amount": amount,
"depth": self.db.db_height - height + 1,
"height": height,
"value": hexlify(claim['value'].encode('ISO-8859-1')).decode(),
"address": address, # from index
"supports": supports,
"effective_amount": effective_amount,
"valid_at_height": valid_at_height
}
if 'claim_sequence' in claim:
# TODO: ensure that lbrycrd #209 fills in this value
result['claim_sequence'] = claim['claim_sequence']
else:
result['claim_sequence'] = -1
if 'normalized_name' in claim:
result['normalized_name'] = claim['normalized_name'].encode('ISO-8859-1').decode()
return result
def format_supports_from_daemon(self, supports):
return [[support['txid'], support['n'], get_from_possible_keys(support, 'amount', 'nAmount')] for
support in supports]
async def claimtrie_getclaimbyid(self, claim_id):
self.assert_claim_id(claim_id)
claim = await self.daemon.getclaimbyid(claim_id)
return self.format_claim_from_daemon(claim)
async def claimtrie_getclaimsbyids(self, *claim_ids):
claims = await self.batched_formatted_claims_from_daemon(claim_ids)
return dict(zip(claim_ids, claims))
def assert_tx_hash(self, value):
'''Raise an RPCError if the value is not a valid transaction
hash.'''
try:
if len(util.hex_to_bytes(value)) == 32:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a transaction hash')
def assert_claim_id(self, value):
'''Raise an RPCError if the value is not a valid claim id
hash.'''
try:
if len(util.hex_to_bytes(value)) == 20:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a claim id hash')
def normalize_name(self, name):
# this is designed to match lbrycrd; change it here if it changes there
return uda.normalize('NFD', name).casefold()
def claim_matches_name(self, claim, name):
if not name:
return False
if 'normalized_name' in claim:
return self.normalize_name(name) == claim['normalized_name']
return name == claim['name']
async def claimtrie_getvalueforuri(self, block_hash, uri, known_certificates=None):
# TODO: this thing is huge, refactor
CLAIM_ID = "claim_id"
WINNING = "winning"
SEQUENCE = "sequence"
uri = uri
block_hash = block_hash
try:
parsed_uri = URL.parse(uri)
except ValueError as err:
return {'error': err.args[0]}
result = {}
if parsed_uri.has_channel:
certificate = None
# TODO: this is also done on the else, refactor
if parsed_uri.channel.claim_id:
if len(parsed_uri.channel.claim_id) < 40:
certificate_info = self.claimtrie_getpartialmatch(
parsed_uri.channel.name, parsed_uri.channel.claim_id)
else:
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.channel.claim_id)
if certificate_info and self.claim_matches_name(certificate_info, parsed_uri.channel.name):
certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info}
elif parsed_uri.claim_sequence:
certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if certificate_info:
certificate = {'resolution_type': SEQUENCE, 'result': certificate_info}
else:
certificate_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if certificate_info:
certificate = {'resolution_type': WINNING, 'result': certificate_info}
if certificate and 'claim_id' not in certificate['result']:
return result
if certificate:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claims_in_channel = self.claimtrie_getclaimssignedbyidminimal(channel_id)
if not parsed_uri.path:
result['unverified_claims_in_channel'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel}
else:
# making an assumption that there aren't case conflicts on an existing channel
norm_path = self.normalize_name(parsed_uri.path)
result['unverified_claims_for_name'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel
if self.normalize_name(claim['name']) == norm_path}
else:
claim = None
if parsed_uri.claim_id:
if len(parsed_uri.claim_id) < 40:
claim_info = self.claimtrie_getpartialmatch(parsed_uri.name, parsed_uri.claim_id)
else:
claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if claim_info and self.claim_matches_name(claim_info, parsed_uri.name):
claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
elif parsed_uri.claim_sequence:
claim_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if claim_info:
claim = {'resolution_type': SEQUENCE, 'result': claim_info}
else:
claim_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if claim_info:
claim = {'resolution_type': WINNING, 'result': claim_info}
if (claim and
# is not an unclaimed winning name
(claim['resolution_type'] != WINNING or proof_has_winning_claim(claim['result']['proof']))):
raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
raw_certificate_id = self.db.get_claim_info(raw_claim_id).cert_id
if raw_certificate_id:
certificate_id = hash_to_hex_str(raw_certificate_id)
certificate = await self.claimtrie_getclaimbyid(certificate_id)
if certificate:
certificate = {'resolution_type': CLAIM_ID,
'result': certificate}
result['certificate'] = certificate
result['claim'] = claim
return result
async def claimtrie_getvalueforuris(self, block_hash, *uris):
MAX_BATCH_URIS = 500
if len(uris) > MAX_BATCH_URIS:
raise Exception("Exceeds max batch uris of {}".format(MAX_BATCH_URIS))
return {uri: await self.claimtrie_getvalueforuri(block_hash, uri) for uri in uris}
# TODO: get it all concurrently when lbrycrd pending changes goes into a stable release
#async def getvalue(uri):
# value = await self.claimtrie_getvalueforuri(block_hash, uri)
# return uri, value,
#return dict([await asyncio.gather(*tuple(getvalue(uri) for uri in uris))][0])
def proof_has_winning_claim(proof):
return {'txhash', 'nOut'}.issubset(proof.keys())
def get_from_possible_keys(dictionary, *keys):
for key in keys:
if key in dictionary:
return dictionary[key]<|fim▁end|>
|
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
claim = self.claimtrie_getnthclaimforname(name, n)
if claim and 'claim_id' in claim:
|
<|file_name|>zabbix.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# (c) 2013, Greg Buehler
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Zabbix Server external inventory script.
========================================
Returns hosts and hostgroups from Zabbix Server.
If you want to run with --limit against a host group with space in the
name, use asterisk. For example --limit="Linux*servers".
Configuration is read from `zabbix.ini`.<|fim▁hole|>"""
from __future__ import print_function
import os
import sys
import argparse
try:
import ConfigParser as configparser
except ImportError:
import configparser
try:
from zabbix_api import ZabbixAPI
except:
print("Error: Zabbix API library must be installed: pip install zabbix-api.",
file=sys.stderr)
sys.exit(1)
import json
class ZabbixInventory(object):
def read_settings(self):
config = configparser.SafeConfigParser()
conf_path = './zabbix.ini'
if not os.path.exists(conf_path):
conf_path = os.path.dirname(os.path.realpath(__file__)) + '/zabbix.ini'
if os.path.exists(conf_path):
config.read(conf_path)
# server
if config.has_option('zabbix', 'server'):
self.zabbix_server = config.get('zabbix', 'server')
# login
if config.has_option('zabbix', 'username'):
self.zabbix_username = config.get('zabbix', 'username')
if config.has_option('zabbix', 'password'):
self.zabbix_password = config.get('zabbix', 'password')
# ssl certs
if config.has_option('zabbix', 'validate_certs'):
if config.get('zabbix', 'validate_certs') in ['false', 'False', False]:
self.validate_certs = False
def read_cli(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--list', action='store_true')
self.options = parser.parse_args()
def hoststub(self):
return {
'hosts': []
}
def get_host(self, api, name):
data = {'ansible_ssh_host': name}
return data
def get_list(self, api):
hostsData = api.host.get({'output': 'extend', 'selectGroups': 'extend'})
data = {}
data[self.defaultgroup] = self.hoststub()
for host in hostsData:
hostname = host['name']
data[self.defaultgroup]['hosts'].append(hostname)
for group in host['groups']:
groupname = group['name']
if groupname not in data:
data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname)
# Prevents Ansible from calling this script for each server with --host
data['_meta'] = {'hostvars': self.meta}
return data
def __init__(self):
self.defaultgroup = 'group_all'
self.zabbix_server = None
self.zabbix_username = None
self.zabbix_password = None
self.validate_certs = True
self.meta = {}
self.read_settings()
self.read_cli()
if self.zabbix_server and self.zabbix_username:
try:
api = ZabbixAPI(server=self.zabbix_server, validate_certs=self.validate_certs)
api.login(user=self.zabbix_username, password=self.zabbix_password)
except BaseException as e:
print("Error: Could not login to Zabbix server. Check your zabbix.ini.", file=sys.stderr)
sys.exit(1)
if self.options.host:
data = self.get_host(api, self.options.host)
print(json.dumps(data, indent=2))
elif self.options.list:
data = self.get_list(api)
print(json.dumps(data, indent=2))
else:
print("usage: --list ..OR.. --host <hostname>", file=sys.stderr)
sys.exit(1)
else:
print("Error: Configuration of server and credentials are required. See zabbix.ini.", file=sys.stderr)
sys.exit(1)
ZabbixInventory()<|fim▁end|>
|
Tested with Zabbix Server 2.0.6 and 3.2.3.
|
<|file_name|>chrome_render_frame_observer_browsertest.cc<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/renderer/chrome_render_frame_observer.h"
#include "base/test/histogram_tester.h"
#include "chrome/test/base/chrome_render_view_test.h"
#include "components/translate/content/common/translate_messages.h"
#include "components/translate/content/renderer/translate_helper.h"
#include "components/translate/core/common/translate_constants.h"
#include "content/public/renderer/render_frame.h"
#include "content/public/renderer/render_view.h"
#include "third_party/WebKit/public/web/WebView.h"
// Constants for UMA statistic collection.
static const char kTranslateCaptureText[] = "Translate.CaptureText";
class ChromeRenderFrameObserverTest : public ChromeRenderViewTest {};<|fim▁hole|> "<!DOCTYPE html><body>"
"This is a main document"
"<iframe srcdoc=\"This a document in an iframe.\">"
"</body>");
view_->GetWebView()->updateAllLifecyclePhases();
const IPC::Message* message = render_thread_->sink().GetUniqueMessageMatching(
ChromeFrameHostMsg_TranslateLanguageDetermined::ID);
ASSERT_NE(static_cast<IPC::Message*>(NULL), message);
ChromeFrameHostMsg_TranslateLanguageDetermined::Param params;
ChromeFrameHostMsg_TranslateLanguageDetermined::Read(message, ¶ms);
EXPECT_TRUE(base::get<1>(params)) << "Page should be translatable.";
// Should have 2 samples: one for preliminary capture, one for final capture.
// If there are more, then subframes are being captured more than once.
histogram_tester.ExpectTotalCount(kTranslateCaptureText, 2);
}<|fim▁end|>
|
TEST_F(ChromeRenderFrameObserverTest, SkipCapturingSubFrames) {
base::HistogramTester histogram_tester;
LoadHTML(
|
<|file_name|>atoi.py<|end_file_name|><|fim▁begin|>"""
Implement atoi() in Python (given a string, return a number).
Assume all the strings are always valid.
"""
import unittest
def atoi(string):
l = len(string)
t = 0
v = 10 ** (l - 1)
for ch in string:
t += v * int(ch)
v /= 10
return t
<|fim▁hole|> l, t = len(string), 0
for idx, ch in enumerate(string):
t += int(ch) * (10 ** (l - idx - 1))
return t
def atoi3(string):
l = len(string)
return sum([
int(ch) * (10 ** (l - idx - 1))
for idx, ch in enumerate(string)
])
class AtoITest(unittest.TestCase):
def test_atoi(self):
self.assertEqual(12345, atoi("12345"))
self.assertEqual(1234, atoi("1234"))
self.assertEqual(123, atoi("123"))
self.assertEqual(12, atoi("12"))
self.assertEqual(1, atoi("1"))
self.assertEqual(0, atoi("0"))
def test_atoi2(self):
self.assertEqual(12345, atoi2("12345"))
self.assertEqual(1234, atoi2("1234"))
self.assertEqual(123, atoi2("123"))
self.assertEqual(12, atoi2("12"))
self.assertEqual(1, atoi2("1"))
self.assertEqual(0, atoi2("0"))
def test_atoi3(self):
self.assertEqual(12345, atoi3("12345"))
self.assertEqual(1234, atoi3("1234"))
self.assertEqual(123, atoi3("123"))
self.assertEqual(12, atoi3("12"))
self.assertEqual(1, atoi3("1"))
self.assertEqual(0, atoi3("0"))<|fim▁end|>
|
def atoi2(string):
|
<|file_name|>IgnoredExceptionsTest.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import unittest
import IECore
class IgnoredExceptionsTest( unittest.TestCase ) :
def test( self ) :
def f( toRaise, toIgnore ) :
with IECore.IgnoredExceptions( toIgnore ) :
raise toRaise
self.assertRaises( RuntimeError, f, RuntimeError, KeyError )
self.assertRaises( RuntimeError, f, RuntimeError, ( KeyError, IndexError ) )
f( KeyError, KeyError )
f( KeyError, ( KeyError, IndexError ) )
f( IndexError, ( KeyError, IndexError ) )
c = IECore.CompoundObject()
with IECore.IgnoredExceptions( KeyError ) :
c["d"]
with IECore.IgnoredExceptions( Exception ) :
c["d"]
p = IECore.Parameterised( "" )
with IECore.IgnoredExceptions( Exception ) :
p["d"]
def testNoExceptions( self ) :
with IECore.IgnoredExceptions( Exception ) :
pass
if __name__ == "__main__":<|fim▁hole|><|fim▁end|>
|
unittest.main()
|
<|file_name|>units.py<|end_file_name|><|fim▁begin|>import re
import quantities as pq
from numbers import NumberService
class ConversionService(object):
__exponents__ = {
'square': 2,
'squared': 2,
'cubed': 3
}
def _preprocess(self, input):
def handleExponents(input):
m = re.search(r'\bsquare (\w+)', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\bsquare (\w+)', r'\g<1>^2', input)
m = re.search(r'\bsquared (\w+)', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\bsquared (\w+)', r'\g<1>^2', input)
m = re.search(r'\b(\w+) squared', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\b(\w+) squared', r'\g<1>^2', input)
m = re.search(r'\bsq (\w+)', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\bsq (\w+)', r'\g<1>^2', input)
m = re.search(r'\b(\w+) cubed', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\b(\w+) cubed', r'\g<1>^3', input)
m = re.search(r'\bcubic (\w+)', input)
if m and self.isValidUnit(m.group(1)):
input = re.sub(r'\bcubic (\w+)', r'\g<1>^3', input)
service = NumberService()
m = re.search(r'\b(\w+) to the (\w+)( power)?', input)
if m and self.isValidUnit(m.group(1)):
if m.group(2) in service.__ordinals__:
exp = service.parseMagnitude(m.group(2))
input = re.sub(r'\b(\w+) to the (\w+)( power)?',
r'\g<1>^' + str(exp), input)
return input
input = re.sub(r'\sper\s', r' / ', input)
input = handleExponents(input)
return input
def parseUnits(self, input):
"""Carries out a conversion (represented as a string) and returns the
result as a human-readable string.
Args:
input (str): Text representing a unit conversion, which should
include a magnitude, a description of the initial units,
and a description of the target units to which the quantity
should be converted.
Returns:
A quantities object representing the converted quantity and its new
units.
"""
quantity = self.convert(input)
units = ' '.join(str(quantity.units).split(' ')[1:])
return NumberService.parseMagnitude(quantity.item()) + " " + units
def isValidUnit(self, w):
"""Checks if a string represents a valid quantities unit.
Args:
w (str): A string to be tested against the set of valid
quantities units.
Returns:
True if the string can be used as a unit in the quantities
module.
"""
bad = set(['point', 'a'])
if w in bad:
return False
try:
pq.Quantity(0.0, w)
return True
except:
return w == '/'
def extractUnits(self, input):
"""Collects all the valid units from an input string. Works by
appending consecutive words from the string and cross-referncing
them with a set of valid units.
Args:
input (str): Some text which hopefully contains descriptions
of different units.
Returns:
A list of strings, each entry in which is a valid quantities
unit.
"""
input = self._preprocess(input)
units = []
description = ""
for w in input.split(' '):
if self.isValidUnit(w) or w == '/':
if description:
description += " "
description += w
else:
if description:
units.append(description)
description = ""
if description:
units.append(description)
return units
def convert(self, input):
"""Converts a string representation of some quantity of units into a
quantities object.
Args:
input (str): A textual representation of some quantity of units,
e.g., "fifty kilograms".<|fim▁hole|>
Returns:
A quantities object representing the described quantity and its
units.
"""
input = self._preprocess(input)
n = NumberService().longestNumber(input)
units = self.extractUnits(input)
# Convert to quantity object, attempt conversion
quantity = pq.Quantity(float(n), units[0])
quantity.units = units[1]
return quantity<|fim▁end|>
| |
<|file_name|>list_file.py<|end_file_name|><|fim▁begin|># This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# list_files.py
# Copyright (C) 2015 Fracpete (pythonwekawrapper at gmail dot com)
import traceback
import tempfile
import weka.core.jvm as jvm
from weka.flow.control import Flow
from weka.flow.source import ListFiles
from weka.flow.sink import Console
def main():
"""
Just runs some example code.
"""
# setup the flow
flow = Flow(name="list files")
# flow.print_help()
listfiles = ListFiles()
listfiles.config["dir"] = str(tempfile.gettempdir())
listfiles.config["list_files"] = True
listfiles.config["list_dirs"] = False
listfiles.config["recursive"] = False
listfiles.config["regexp"] = ".*r.*"
# listfiles.print_help()
flow.actors.append(listfiles)
console = Console()<|fim▁hole|>
# run the flow
msg = flow.setup()
if msg is None:
print("\n" + flow.tree + "\n")
msg = flow.execute()
if msg is not None:
print("Error executing flow:\n" + msg)
else:
print("Error setting up flow:\n" + msg)
flow.wrapup()
flow.cleanup()
if __name__ == "__main__":
try:
jvm.start()
main()
except Exception, e:
print(traceback.format_exc())
finally:
jvm.stop()<|fim▁end|>
|
console.config["prefix"] = "Match: "
# console.print_help()
flow.actors.append(console)
|
<|file_name|>bitcounter-range.py<|end_file_name|><|fim▁begin|>from microbit_stub import *
while True:
if button_a.is_pressed():
for i in range(5):<|fim▁hole|> else:
display.set_pixel(i, 0, 9)
sleep(200)
break
elif button_b.is_pressed():
n = 0
for i in range(5):
if display.get_pixel(i, 0):
n = n + 2 ** i
display.clear()
display.show(str(n))
sleep(1000)
display.clear()
break
else:
sleep(50)<|fim▁end|>
|
if display.get_pixel(i, 0):
display.set_pixel(i, 0, 0)
sleep(10)
|
<|file_name|>processes.py<|end_file_name|><|fim▁begin|># stdlib
from collections import namedtuple
# project
from resources import (
agg,
ResourcePlugin,
SnapshotDescriptor,
SnapshotField,
)
from utils.subprocess_output import get_subprocess_output
class Processes(ResourcePlugin):
RESOURCE_KEY = "processes"
FLUSH_INTERVAL = 1 # in minutes
def describe_snapshot(self):
return SnapshotDescriptor(
1,
SnapshotField("user", 'str', aggregator=agg.append, temporal_aggregator=agg.append),
SnapshotField("pct_cpu", 'float'),
SnapshotField("pct_mem", 'float'),
SnapshotField("vsz", 'int'),
SnapshotField("rss", 'int'),
SnapshotField("family", 'str', aggregator=None, temporal_aggregator=None,
group_on=True, temporal_group_on=True),
SnapshotField("ps_count", 'int'))
def _get_proc_list(self):
# Get output from ps
try:
process_exclude_args = self.config.get('exclude_process_args', False)
if process_exclude_args:
ps_arg = 'aux'
else:
ps_arg = 'auxww'
output, _, _ = get_subprocess_output(['ps', ps_arg], self.log)
processLines = output.splitlines() # Also removes a trailing empty line
except Exception:
self.log.exception('Cannot get process list')
raise
del processLines[0] # Removes the headers
processes = []
for line in processLines:
line = line.split(None, 10)
processes.append(map(lambda s: s.strip(), line))
return processes
@staticmethod
def group_by_family(o):
return o[5]
@staticmethod
def filter_by_usage(o):
# keep everything over 1% (cpu or ram)
return o[0] > 1 or o[1] > 1
def _parse_proc_list(self, processes):
def _compute_family(command):
if command.startswith('['):
return 'kernel'
else:
return (command.split()[0]).split('/')[-1]
PSLine = namedtuple("PSLine", "user,pid,pct_cpu,pct_mem,vsz,rss,tty,stat,started,time,command")
self.start_snapshot()
for line in processes:
try:
psl = PSLine(*line)
self.add_to_snapshot([psl.user,
float(psl.pct_cpu),
float(psl.pct_mem),
int(psl.vsz),
int(psl.rss),
_compute_family(psl.command),
1])
except Exception:
pass<|fim▁hole|> group_by=self.group_by_family,
filter_by=self.filter_by_usage)
def check(self):
self._parse_proc_list(self._get_proc_list())<|fim▁end|>
|
self.end_snapshot(group_by=self.group_by_family)
def flush_snapshots(self, snapshot_group):
self._flush_snapshots(snapshot_group=snapshot_group,
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import sched, time
s = sched.scheduler(time.time, time.sleep)
def print_time():
print ("From print_time", time.time())
<|fim▁hole|> s.enter(5, 1, print_time, ())
s.enter(10, 1, print_time, ())
s.run()
print (time.time())
print_some_times()<|fim▁end|>
|
def print_some_times():
print (time.time())
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.utils.translation import ugettext as _<|fim▁hole|>
class Superhero(ModelBase):
name = models.CharField(max_length=256, editable=False)
class Meta:
verbose_name_plural = _("Superheroes")<|fim▁end|>
|
from django.db import models
from jmbo.models import ModelBase
|
<|file_name|>publish-docker.ts<|end_file_name|><|fim▁begin|>import { cd, exec } from 'shelljs';
import * as inquirer from 'inquirer';
import * as semver from 'semver';
const prompt = inquirer.createPromptModule();
async function release() {
cd(`${__dirname}/docker`);
const { channel } = await prompt([{
type: 'list',
name: 'channel',
message: 'To which channel would you like to release?',
choices: ['dev', 'next', 'latest'],
filter: function(val) {
return val.toLowerCase();
}
}]);
console.log(channel);
const result = exec(`npm show vaultage@${channel} version`);
const rawVersion = result.stdout.split('\n')[0];
const version = semver.valid(rawVersion);
if (version === null) {<|fim▁hole|>
console.log(`Last version on channel ${channel} is ${version}`);
exec(`docker build --rm --no-cache --build-arg VAULTAGE_CHANNEL=${channel} --tag hmil/vaultage:${version} .`);
exec(`docker tag hmil/vaultage:${version} hmil/vaultage:${channel}`);
exec(`docker push hmil/vaultage:${channel}`);
exec(`docker push hmil/vaultage:${version}`);
}
release();<|fim▁end|>
|
throw new Error(`Invalid version: ${version}`)
}
|
<|file_name|>3_prime_palindrome.py<|end_file_name|><|fim▁begin|>import math
def is_palindrome(n):
s = str(n)
return s == s[::-1]
def is_prime(n):
if n <= 1:
return False
if n % 2 == 0 and n != 2:
return False
if n == 2:<|fim▁hole|> while i <= root:
if n % i == 0:
return False
i += 2
return True
i = 999
while i > 0:
if not is_palindrome(i):
i -= 1
continue
if not is_prime(i):
i -= 1
continue
print i
break<|fim▁end|>
|
return True
root = math.sqrt(n)
i = 3
|
<|file_name|>makinframe.py<|end_file_name|><|fim▁begin|>from PySide import QtCore, QtGui
class MakinFrame(QtGui.QFrame):
mousegeser = QtCore.Signal(int,int)
def __init__(self,parent=None):
super(MakinFrame,self).__init__(parent)
self.setMouseTracking(True)
def setMouseTracking(self, flag):
def recursive_set(parent):
for child in parent.findChildren(QtCore.QObject):
try:
child.setMouseTracking(flag)
except:
pass
recursive_set(child)
QtGui.QWidget.setMouseTracking(self,flag)
recursive_set(self)
def mouseMoveEvent(self, me):
a = QtGui.QFrame.mouseMoveEvent(self,me)
self.mousegeser.emit(me.x(), me.y())<|fim▁hole|> return a<|fim▁end|>
| |
<|file_name|>issue-24780.rs<|end_file_name|><|fim▁begin|>// Verify that '>' is not both expected and found at the same time, as it used
// to happen in #24780. For example, following should be an error:<|fim▁hole|>// expected one of ..., `>`, ... found `>`.
fn foo() -> Vec<usize>> { //~ ERROR expected one of `!`, `+`, `::`, `;`, `where`, or `{`, found `>`
Vec::new()
}
fn main() {}<|fim▁end|>
| |
<|file_name|>1.0.0_Player.js<|end_file_name|><|fim▁begin|>/* *************************
* "CLASS": Player
* *************************/
function Player(x, y){
/* ### ATTRIBUTES ### */
Entity.call(this, x, y);
this.vx = 0;
this.vy = 0;
this.currentMaxHealth = 2;
this.health = 2;
this.sprite = new Sprite('res/spritesheet.png', [0, 0], [32,32] , 12, [0,1,2,3,4,5,6,7]);
this.speed = STARTING_PLAYER_SPEED;
this.isBlocking = false;
this.blockRadius = (PLAYER_SPRITE_WIDTH/2) + BLOCK_RADIUS;
this.radius = PLAYER_SPRITE_WIDTH/2;
this.handle = PLAYER_HANDLE; // the ability to turn better
this.teleportRange = 100;
this.bulletRange = 180;
this.fireDelay = 3*1000000; // in microseconds
/* METHODS */
this.checkBoundaries = function(){
if(this.x + this.sprite.width >= canvas.width){
this.x = canvas.width - this.sprite.width;
this.vx /= 2;
}
else if(this.x <= 0){
this.x = 0;
this.vx /= 2;
}
if(this.y + this.sprite.height >= canvas.height){
this.y = canvas.height - this.sprite.height;
this.vy /= 2;
}
else if(this.y <= 0){
this.y = 0;
this.vy /= 2;
}
}
this.update = function(dt){
this.sprite.update(dt);
this.checkEnemiesCollision();<|fim▁hole|>
this.vx *= PLAYER_FRICTION;
this.vy *= PLAYER_FRICTION;
this.x += this.vx;
this.y += this.vy;
this.checkBoundaries();
};
this.render = function(){
renderEntity(this);
drawBar(this.x, this.y-12, this.sprite.width, 6, this.health, this.currentMaxHealth, true, "green");
//posx, posy, size, width, state, maxState, horizontal, colorInside
};
this.checkEnemiesCollision = function(){
for(var i = 0; i<enemies.length; i++){
var enemy = enemies[i];
if(circleCollision(this, enemy) ){
enemy.destroy();
createExplosion(enemy.x, enemy.y);
this.health--;
this.checkHealth();
}
}
};
this.checkHealth = function(){
if(this.health <= 0){
//todo upgrade menu, so game over instead
alert("Game over! You survived for " + gameTime.toFixed(2) + " seconds!");
location.reload(true);
}
}
/*this.block = function(){
this.isBlocking = true;
var blockX = this.x + (this.sprite.width/2);
var blockY = this.y + (this.sprite.height/2);
daux.beginPath();
daux.arc(blockX, blockY, this.blockRadius, 0, Math.PI*2, true);
daux.stroke();
setTimeout(function(){
daux.clearRect(0, 0, auxcanvas.width, auxcanvas.height);
}, 50);
setTimeout(function(){
player.isBlocking = false;
}, BLOCK_DELAY);
};*/
return this;
}
var PLAYER_START_X = (canvas.width/2) - 32/2;
var PLAYER_START_Y = (canvas.height/2) - 32/2;
var player = new Player(PLAYER_START_X, PLAYER_START_Y);<|fim▁end|>
| |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>document.getElementById('input_search').onfocus = function () {<|fim▁hole|> document.getElementById('search').classList.add('activeSearch');
};
document.getElementById('input_search').onblur = function () {
document.getElementById('search').classList.remove('activeSearch');
};
try {
window.$ = window.jQuery = require('jquery');
require('./navbar');
require('./horizontalScroll');
} catch (e) {}<|fim▁end|>
| |
<|file_name|>object_safety.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! "Object safety" refers to the ability for a trait to be converted
//! to an object. In general, traits may only be converted to an
//! object if all of their methods meet certain criteria. In particular,
//! they must:
//!
//! - have a suitable receiver from which we can extract a vtable;
//! - not reference the erased type `Self` except for in this receiver;
//! - not have generic type parameters
use super::supertraits;
use super::elaborate_predicates;
use middle::subst::{self, SelfSpace, TypeSpace};
use middle::traits;
use middle::ty::{self, ToPolyTraitRef, Ty};
use std::rc::Rc;
use syntax::ast;
#[derive(Debug)]
pub enum ObjectSafetyViolation<'tcx> {
/// Self : Sized declared on the trait
SizedSelf,
/// Supertrait reference references `Self` an in illegal location
/// (e.g. `trait Foo : Bar<Self>`)
SupertraitSelf,
/// Method has something illegal
Method(Rc<ty::Method<'tcx>>, MethodViolationCode),
}
/// Reasons a method might not be object-safe.
#[derive(Copy,Clone,Debug)]
pub enum MethodViolationCode {
/// e.g., `fn foo()`
StaticMethod,
/// e.g., `fn foo(&self, x: Self)` or `fn foo(&self) -> Self`
ReferencesSelf,
/// e.g., `fn foo<A>()`
Generic,
}
pub fn is_object_safe<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId)
-> bool
{
// Because we query yes/no results frequently, we keep a cache:
let def = tcx.lookup_trait_def(trait_def_id);
let result = def.object_safety().unwrap_or_else(|| {
let result = object_safety_violations(tcx, trait_def_id).is_empty();
// Record just a yes/no result in the cache; this is what is
// queried most frequently. Note that this may overwrite a
// previous result, but always with the same thing.
def.set_object_safety(result);
result
});
debug!("is_object_safe({:?}) = {}", trait_def_id, result);
result
}
pub fn object_safety_violations<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId)
-> Vec<ObjectSafetyViolation<'tcx>>
{
traits::supertrait_def_ids(tcx, trait_def_id)
.flat_map(|def_id| object_safety_violations_for_trait(tcx, def_id))
.collect()
}
fn object_safety_violations_for_trait<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId)
-> Vec<ObjectSafetyViolation<'tcx>>
{
// Check methods for violations.
let mut violations: Vec<_> =
tcx.trait_items(trait_def_id).iter()
.flat_map(|item| {
match *item {
ty::MethodTraitItem(ref m) => {
object_safety_violation_for_method(tcx, trait_def_id, &**m)
.map(|code| ObjectSafetyViolation::Method(m.clone(), code))
.into_iter()
}
_ => None.into_iter(),
}
})
.collect();
// Check the trait itself.
if trait_has_sized_self(tcx, trait_def_id) {
violations.push(ObjectSafetyViolation::SizedSelf);
}
if supertraits_reference_self(tcx, trait_def_id) {
violations.push(ObjectSafetyViolation::SupertraitSelf);
}
debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}",
trait_def_id,
violations);
<|fim▁hole|> trait_def_id: ast::DefId)
-> bool
{
let trait_def = tcx.lookup_trait_def(trait_def_id);
let trait_ref = trait_def.trait_ref.clone();
let trait_ref = trait_ref.to_poly_trait_ref();
let predicates = tcx.lookup_super_predicates(trait_def_id);
predicates
.predicates
.into_iter()
.map(|predicate| predicate.subst_supertrait(tcx, &trait_ref))
.any(|predicate| {
match predicate {
ty::Predicate::Trait(ref data) => {
// In the case of a trait predicate, we can skip the "self" type.
data.0.trait_ref.substs.types.get_slice(TypeSpace)
.iter()
.cloned()
.any(is_self)
}
ty::Predicate::Projection(..) |
ty::Predicate::TypeOutlives(..) |
ty::Predicate::RegionOutlives(..) |
ty::Predicate::Equate(..) => {
false
}
}
})
}
fn trait_has_sized_self<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId)
-> bool
{
let trait_def = tcx.lookup_trait_def(trait_def_id);
let trait_predicates = tcx.lookup_predicates(trait_def_id);
generics_require_sized_self(tcx, &trait_def.generics, &trait_predicates)
}
fn generics_require_sized_self<'tcx>(tcx: &ty::ctxt<'tcx>,
generics: &ty::Generics<'tcx>,
predicates: &ty::GenericPredicates<'tcx>)
-> bool
{
let sized_def_id = match tcx.lang_items.sized_trait() {
Some(def_id) => def_id,
None => { return false; /* No Sized trait, can't require it! */ }
};
// Search for a predicate like `Self : Sized` amongst the trait bounds.
let free_substs = tcx.construct_free_substs(generics, ast::DUMMY_NODE_ID);
let predicates = predicates.instantiate(tcx, &free_substs).predicates.into_vec();
elaborate_predicates(tcx, predicates)
.any(|predicate| {
match predicate {
ty::Predicate::Trait(ref trait_pred) if trait_pred.def_id() == sized_def_id => {
is_self(trait_pred.0.self_ty())
}
ty::Predicate::Projection(..) |
ty::Predicate::Trait(..) |
ty::Predicate::Equate(..) |
ty::Predicate::RegionOutlives(..) |
ty::Predicate::TypeOutlives(..) => {
false
}
}
})
}
/// Returns `Some(_)` if this method makes the containing trait not object safe.
fn object_safety_violation_for_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method: &ty::Method<'tcx>)
-> Option<MethodViolationCode>
{
// Any method that has a `Self : Sized` requisite is otherwise
// exempt from the regulations.
if generics_require_sized_self(tcx, &method.generics, &method.predicates) {
return None;
}
virtual_call_violation_for_method(tcx, trait_def_id, method)
}
/// We say a method is *vtable safe* if it can be invoked on a trait
/// object. Note that object-safe traits can have some
/// non-vtable-safe methods, so long as they require `Self:Sized` or
/// otherwise ensure that they cannot be used when `Self=Trait`.
pub fn is_vtable_safe_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method: &ty::Method<'tcx>)
-> bool
{
virtual_call_violation_for_method(tcx, trait_def_id, method).is_none()
}
/// Returns `Some(_)` if this method cannot be called on a trait
/// object; this does not necessarily imply that the enclosing trait
/// is not object safe, because the method might have a where clause
/// `Self:Sized`.
fn virtual_call_violation_for_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method: &ty::Method<'tcx>)
-> Option<MethodViolationCode>
{
// The method's first parameter must be something that derefs (or
// autorefs) to `&self`. For now, we only accept `self`, `&self`
// and `Box<Self>`.
match method.explicit_self {
ty::StaticExplicitSelfCategory => {
return Some(MethodViolationCode::StaticMethod);
}
ty::ByValueExplicitSelfCategory |
ty::ByReferenceExplicitSelfCategory(..) |
ty::ByBoxExplicitSelfCategory => {
}
}
// The `Self` type is erased, so it should not appear in list of
// arguments or return type apart from the receiver.
let ref sig = method.fty.sig;
for &input_ty in &sig.0.inputs[1..] {
if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) {
return Some(MethodViolationCode::ReferencesSelf);
}
}
if let ty::FnConverging(result_type) = sig.0.output {
if contains_illegal_self_type_reference(tcx, trait_def_id, result_type) {
return Some(MethodViolationCode::ReferencesSelf);
}
}
// We can't monomorphize things like `fn foo<A>(...)`.
if !method.generics.types.is_empty_in(subst::FnSpace) {
return Some(MethodViolationCode::Generic);
}
None
}
fn contains_illegal_self_type_reference<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
ty: Ty<'tcx>)
-> bool
{
// This is somewhat subtle. In general, we want to forbid
// references to `Self` in the argument and return types,
// since the value of `Self` is erased. However, there is one
// exception: it is ok to reference `Self` in order to access
// an associated type of the current trait, since we retain
// the value of those associated types in the object type
// itself.
//
// ```rust
// trait SuperTrait {
// type X;
// }
//
// trait Trait : SuperTrait {
// type Y;
// fn foo(&self, x: Self) // bad
// fn foo(&self) -> Self // bad
// fn foo(&self) -> Option<Self> // bad
// fn foo(&self) -> Self::Y // OK, desugars to next example
// fn foo(&self) -> <Self as Trait>::Y // OK
// fn foo(&self) -> Self::X // OK, desugars to next example
// fn foo(&self) -> <Self as SuperTrait>::X // OK
// }
// ```
//
// However, it is not as simple as allowing `Self` in a projected
// type, because there are illegal ways to use `Self` as well:
//
// ```rust
// trait Trait : SuperTrait {
// ...
// fn foo(&self) -> <Self as SomeOtherTrait>::X;
// }
// ```
//
// Here we will not have the type of `X` recorded in the
// object type, and we cannot resolve `Self as SomeOtherTrait`
// without knowing what `Self` is.
let mut supertraits: Option<Vec<ty::PolyTraitRef<'tcx>>> = None;
let mut error = false;
ty.maybe_walk(|ty| {
match ty.sty {
ty::TyParam(ref param_ty) => {
if param_ty.space == SelfSpace {
error = true;
}
false // no contained types to walk
}
ty::TyProjection(ref data) => {
// This is a projected type `<Foo as SomeTrait>::X`.
// Compute supertraits of current trait lazily.
if supertraits.is_none() {
let trait_def = tcx.lookup_trait_def(trait_def_id);
let trait_ref = ty::Binder(trait_def.trait_ref.clone());
supertraits = Some(traits::supertraits(tcx, trait_ref).collect());
}
// Determine whether the trait reference `Foo as
// SomeTrait` is in fact a supertrait of the
// current trait. In that case, this type is
// legal, because the type `X` will be specified
// in the object type. Note that we can just use
// direct equality here because all of these types
// are part of the formal parameter listing, and
// hence there should be no inference variables.
let projection_trait_ref = ty::Binder(data.trait_ref.clone());
let is_supertrait_of_current_trait =
supertraits.as_ref().unwrap().contains(&projection_trait_ref);
if is_supertrait_of_current_trait {
false // do not walk contained types, do not report error, do collect $200
} else {
true // DO walk contained types, POSSIBLY reporting an error
}
}
_ => true, // walk contained types, if any
}
});
error
}
fn is_self<'tcx>(ty: Ty<'tcx>) -> bool {
match ty.sty {
ty::TyParam(ref data) => data.space == subst::SelfSpace,
_ => false,
}
}<|fim▁end|>
|
violations
}
fn supertraits_reference_self<'tcx>(tcx: &ty::ctxt<'tcx>,
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>;(function() {
var ValueDirective = function() {
};
_.extend(ValueDirective.prototype, {
matcher: function($el) {
return $el.data('value');
},
run: function($el) {
$el.html($el.data('value'));
}
});
window.app = new xin.App({
el: xin.$('body'),
directives: {
'[data-role=app]': xin.directive.AppDirective,
'[data-role]': xin.directive.RoleDirective,
'[data-uri]': xin.directive.URIDirective,
'[data-bind]': xin.directive.BindDirective,
'[data-value]': ValueDirective,
'[data-background]': xin.directive.BackgroundDirective
},
middlewares: {
'AuthMiddleware': AuthMiddleware
},
providers: {
}
});<|fim▁hole|> db: null,
user: {},
config: function(param) {
if(param) {
return window.config[param];
} else {
return window.config;
}
},
invoke: function(api, param, cb) {
api = api.split('.');
if(typeof(param) == "function") {
window.API[api[0]][api[1]](param);
} else {
delete arguments[0];
var opt = [],
j = 0;
for (var i in arguments) {
opt.push(arguments[i]);
}
window.API[api[0]][api[1]].apply(this, opt);
}
},
loading: {
show: function(options) {
ActivityIndicator.show(options);
},
hide: function() {
ActivityIndicator.hide();
}
},
storage: function(type, key, value, cb) {
if (!key && !value) return;
if(key && !value) {
var res = window[type].getItem(key);
try {
res = JSON.parse(res);
} catch(e) {}
if(cb) cb(res);
} else if(key && value) {
if(typeof value !== "string") value = JSON.stringify(value);
window[type].setItem(key, value);
}
},
sessionStorage: function(key, value) {
if(typeof value === 'function') {
this.storage('sessionStorage', key, undefined, value);
} else {
this.storage('sessionStorage', key, value);
}
},
localStorage: function(key, value) {
if(typeof value === 'function') {
this.storage('localStorage', key, undefined, value);
} else {
this.storage('localStorage', key, value);
}
},
clearStorage: function(type) { // type = localStorage || sessionStorage
if(!type) {
localStorage.clear();
sessionStorage.clear();
return;
}
if(type === 'localStorage' || type === 'sessionStorage') {
window[type].clear();
}
}
});
})();<|fim▁end|>
|
_.extend(app, {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Linguistic and other taggers.
Tagging each token in a sentence with supplementary information,
such as its part-of-speech (POS) tag, and named entity (NE) tag.
"""
__all__ = [
"PerceptronTagger",
"pos_tag",
"pos_tag_sents",
"tag_provinces",
"chunk_parse",
"NER",
]
from pythainlp.tag.locations import tag_provinces
from pythainlp.tag.pos_tag import pos_tag, pos_tag_sents
from pythainlp.tag._tag_perceptron import PerceptronTagger<|fim▁hole|>from pythainlp.tag.named_entity import NER<|fim▁end|>
|
from pythainlp.tag.chunk import chunk_parse
|
<|file_name|>inputs.py<|end_file_name|><|fim▁begin|>import numpy as np
import tensorflow as tf
import os
def get_inputs(split, config):
split_dir = config['split_dir']
data_dir = config['data_dir']
dataset = config['dataset']
split_file = os.path.join(split_dir, dataset, split + '.lst')
filename_queue = get_filename_queue(split_file, os.path.join(data_dir, dataset))
if dataset == 'mnist':
image = get_inputs_mnist(filename_queue, config)
config['output_size'] = 28
config['c_dim'] = 1
elif dataset == "cifar-10":
image = get_inputs_cifar10(filename_queue, config)
config['output_size'] = 32
config['c_dim'] = 3
else:
image = get_inputs_image(filename_queue, config)
image_batch = create_batch([image], config['batch_size'])
return image_batch
def get_inputs_image(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Read a record, getting filenames from the filename_queue.
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
image = tf.image.decode_image(value, channels=c_dim)
image = tf.cast(image, tf.float32)/255.
image_shape = tf.shape(image)
image_height, image_width = image_shape[0], image_shape[1]
offset_height = tf.cast((image_height - image_size)/2, tf.int32)
offset_width = tf.cast((image_width - image_size)/2, tf.int32)
image = tf.image.crop_to_bounding_box(image, offset_height, offset_width, image_size, image_size)
image = tf.image.resize_images(image, [output_size, output_size])
image.set_shape([output_size, output_size, c_dim])
return image
def get_inputs_mnist(filename_queue, config):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
# Defaults are not specified since all keys are required.
features={
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'depth': tf.FixedLenFeature([], tf.int64),
'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
})
image = tf.decode_raw(features['image_raw'], tf.uint8)
image.set_shape([784])
image = tf.reshape(image, [28, 28, 1])
image = tf.cast(image, tf.float32) / 255.
# Convert label from a scalar uint8 tensor to an int32 scalar.
label = tf.cast(features['label'], tf.int32)
binary_image = (tf.random_uniform(image.get_shape()) <= image)
binary_image = tf.cast(binary_image, tf.float32)
return binary_image
def get_inputs_cifar10(filename_queue, config):
output_size = config['output_size']
image_size = config['image_size']
c_dim = config['c_dim']
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
image_bytes = 32 * 32 * 3
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
key, value = reader.read(filename_queue)
record = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
label = tf.cast(record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
#tf.strided_slice(record, [label_bytes], [label_bytes + image_bytes])
image = tf.reshape(record[label_bytes:label_bytes+image_bytes], [3, 32, 32])
image = tf.cast(image, tf.float32)/255.
# Convert from [depth, height, width] to [height, width, depth].
image = tf.transpose(image, [1, 2, 0])
return image<|fim▁hole|> filenames = f.readlines()
filenames = [os.path.join(data_dir, f.strip()) for f in filenames]
for f in filenames:
if not os.path.exists(f):
raise ValueError('Failed to find file: ' + f)
filename_queue = tf.train.string_input_producer(filenames)
return filename_queue
def create_batch(inputs, batch_size=64, min_queue_examples=1000, num_preprocess_threads=12, enqueue_many=False):
# Generate a batch of images and labels by building up a queue of examples.
batch = tf.train.shuffle_batch(
inputs,
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples,
enqueue_many=enqueue_many,
)
return batch<|fim▁end|>
|
def get_filename_queue(split_file, data_dir):
with open(split_file, 'r') as f:
|
<|file_name|>golangDepsUpdate.py<|end_file_name|><|fim▁begin|>#!/bin/python
# -*- coding: utf-8 -*-
# ####################################################################
# gofed-ng - Golang system
# Copyright (C) 2016 Fridolin Pokorny, [email protected]
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
import sys
from tqdm import tqdm
from common.helpers.output import log
from scenario import Scenario, SwitchAttr, Flag
class GolangDepsUpdate(Scenario):
''' update dependencies of Golang projects packaged in Fedora '''
max_depth = SwitchAttr("--max-depth", int, default=None,
help="specify commit depth limit")
since_date = SwitchAttr("--since-date", str, default=None,
help="specify since date")
skip_errors = Flag("--skip-errors",
help="errors will be reported, but the computation will not be interrupted")
def main(self):
with self.get_system() as system:
golang_pkgs = system.async_call.goland_package_listing()
stored_projects = system.async_call.deps_project_listing()
for pkg in golang_pkgs.result:
if not pkg['name'].startswith('golang-github-'):
log.warning("Skipping %s" % pkg['name'])
# TODO: remove once support for mercurial and full package->upstream translation will be available
continue
try:
raise ValueError("value error")
print("Inspecting '%s'" % pkg['name'])
upstream_url = system.async_call.golang_package2upstream(pkg['name'])
if pkg['name'] in stored_projects.result:
stored_commits = system.async_call.deps_project_commit_listing(pkg['name'])
else:
stored_commits = None
scm_log = system.async_call.scm_log(upstream_url.result,
max_depth=self.max_depth,
since_date=self.since_date)
for commit in tqdm(scm_log.result):
log.debug("Commit %s project %s" % (commit['hash'], pkg['name']))
if not stored_commits or commit not in stored_commits.result:
file_id = system.async_call.scm_store(upstream_url.result, commit['hash'])
deps = system.async_call.deps_analysis(file_id.result)
system.async_call.deps_store_project(pkg['name'], commit['hash'], commit['time'],
deps.result, deps.meta)
except:
exc_info = sys.exc_info()
if self.skip_errors:
log.error(exc_info[2].print_exc())<|fim▁hole|> else:
raise exc_info
if __name__ == '__main__':
sys.exit(1)<|fim▁end|>
| |
<|file_name|>push.go<|end_file_name|><|fim▁begin|>package distribution // import "github.com/tiborvass/docker/distribution"
import (
"bufio"
"compress/gzip"
"context"
"fmt"
"io"
"github.com/docker/distribution/reference"
"github.com/tiborvass/docker/distribution/metadata"
"github.com/tiborvass/docker/pkg/progress"
"github.com/tiborvass/docker/registry"
"github.com/sirupsen/logrus"
)
// Pusher is an interface that abstracts pushing for different API versions.
type Pusher interface {
// Push tries to push the image configured at the creation of Pusher.
// Push returns an error if any, as well as a boolean that determines whether to retry Push on the next configured endpoint.
//
// TODO(tiborvass): have Push() take a reference to repository + tag, so that the pusher itself is repository-agnostic.
Push(ctx context.Context) error
}
const compressionBufSize = 32768
// NewPusher creates a new Pusher interface that will push to either a v1 or v2
// registry. The endpoint argument contains a Version field that determines
// whether a v1 or v2 pusher will be created. The other parameters are passed
// through to the underlying pusher implementation for use during the actual
// push operation.
func NewPusher(ref reference.Named, endpoint registry.APIEndpoint, repoInfo *registry.RepositoryInfo, imagePushConfig *ImagePushConfig) (Pusher, error) {
switch endpoint.Version {
case registry.APIVersion2:
return &v2Pusher{
v2MetadataService: metadata.NewV2MetadataService(imagePushConfig.MetadataStore),
ref: ref,
endpoint: endpoint,
repoInfo: repoInfo,
config: imagePushConfig,
}, nil
case registry.APIVersion1:
return nil, fmt.Errorf("protocol version %d no longer supported. Please contact admins of registry %s", endpoint.Version, endpoint.URL)
}
return nil, fmt.Errorf("unknown version %d for registry %s", endpoint.Version, endpoint.URL)
}
// Push initiates a push operation on ref.
// ref is the specific variant of the image to be pushed.
// If no tag is provided, all tags will be pushed.
func Push(ctx context.Context, ref reference.Named, imagePushConfig *ImagePushConfig) error {
// FIXME: Allow to interrupt current push when new push of same image is done.
// Resolve the Repository name from fqn to RepositoryInfo
repoInfo, err := imagePushConfig.RegistryService.ResolveRepository(ref)
if err != nil {
return err
}
endpoints, err := imagePushConfig.RegistryService.LookupPushEndpoints(reference.Domain(repoInfo.Name))
if err != nil {
return err
}
progress.Messagef(imagePushConfig.ProgressOutput, "", "The push refers to repository [%s]", repoInfo.Name.Name())
associations := imagePushConfig.ReferenceStore.ReferencesByName(repoInfo.Name)
if len(associations) == 0 {
return fmt.Errorf("An image does not exist locally with the tag: %s", reference.FamiliarName(repoInfo.Name))
}
var (
lastErr error
// confirmedV2 is set to true if a push attempt managed to
// confirm that it was talking to a v2 registry. This will
// prevent fallback to the v1 protocol.
confirmedV2 bool
<|fim▁hole|> // confirmedTLSRegistries is a map indicating which registries
// are known to be using TLS. There should never be a plaintext
// retry for any of these.
confirmedTLSRegistries = make(map[string]struct{})
)
for _, endpoint := range endpoints {
if imagePushConfig.RequireSchema2 && endpoint.Version == registry.APIVersion1 {
continue
}
if confirmedV2 && endpoint.Version == registry.APIVersion1 {
logrus.Debugf("Skipping v1 endpoint %s because v2 registry was detected", endpoint.URL)
continue
}
if endpoint.URL.Scheme != "https" {
if _, confirmedTLS := confirmedTLSRegistries[endpoint.URL.Host]; confirmedTLS {
logrus.Debugf("Skipping non-TLS endpoint %s for host/port that appears to use TLS", endpoint.URL)
continue
}
}
logrus.Debugf("Trying to push %s to %s %s", repoInfo.Name.Name(), endpoint.URL, endpoint.Version)
pusher, err := NewPusher(ref, endpoint, repoInfo, imagePushConfig)
if err != nil {
lastErr = err
continue
}
if err := pusher.Push(ctx); err != nil {
// Was this push cancelled? If so, don't try to fall
// back.
select {
case <-ctx.Done():
default:
if fallbackErr, ok := err.(fallbackError); ok {
confirmedV2 = confirmedV2 || fallbackErr.confirmedV2
if fallbackErr.transportOK && endpoint.URL.Scheme == "https" {
confirmedTLSRegistries[endpoint.URL.Host] = struct{}{}
}
err = fallbackErr.err
lastErr = err
logrus.Infof("Attempting next endpoint for push after error: %v", err)
continue
}
}
logrus.Errorf("Not continuing with push after error: %v", err)
return err
}
imagePushConfig.ImageEventLogger(reference.FamiliarString(ref), reference.FamiliarName(repoInfo.Name), "push")
return nil
}
if lastErr == nil {
lastErr = fmt.Errorf("no endpoints found for %s", repoInfo.Name.Name())
}
return lastErr
}
// compress returns an io.ReadCloser which will supply a compressed version of
// the provided Reader. The caller must close the ReadCloser after reading the
// compressed data.
//
// Note that this function returns a reader instead of taking a writer as an
// argument so that it can be used with httpBlobWriter's ReadFrom method.
// Using httpBlobWriter's Write method would send a PATCH request for every
// Write call.
//
// The second return value is a channel that gets closed when the goroutine
// is finished. This allows the caller to make sure the goroutine finishes
// before it releases any resources connected with the reader that was
// passed in.
func compress(in io.Reader) (io.ReadCloser, chan struct{}) {
compressionDone := make(chan struct{})
pipeReader, pipeWriter := io.Pipe()
// Use a bufio.Writer to avoid excessive chunking in HTTP request.
bufWriter := bufio.NewWriterSize(pipeWriter, compressionBufSize)
compressor := gzip.NewWriter(bufWriter)
go func() {
_, err := io.Copy(compressor, in)
if err == nil {
err = compressor.Close()
}
if err == nil {
err = bufWriter.Flush()
}
if err != nil {
pipeWriter.CloseWithError(err)
} else {
pipeWriter.Close()
}
close(compressionDone)
}()
return pipeReader, compressionDone
}<|fim▁end|>
| |
<|file_name|>validator.py<|end_file_name|><|fim▁begin|>import os
from zeroinstall.injector import namespaces
from zeroinstall.injector.reader import InvalidInterface, load_feed
from xml.dom import minidom, Node, XMLNS_NAMESPACE
import tempfile
from logging import warn, info
group_impl_attribs = ['version', 'version-modifier', 'released', 'main', 'stability', 'arch', 'license', 'doc-dir', 'self-test', 'langs', 'local-path']
known_elements = {
'interface' : ['uri', 'min-injector-version', 'main'], # (main is deprecated)
'name' : [],
'summary' : [],
'description' : [],
'needs-terminal' : [],
'homepage' : [],
'category' : ['type'],
'icon' : ['type', 'href'],
'feed' : ['src', 'arch'],
'feed-for' : ['interface'],
'group' : group_impl_attribs,
'implementation' : ['id'] + group_impl_attribs,
'package-implementation' : ['package', 'main', 'distributions'],
'manifest-digest' : ['sha1new', 'sha256'],
'command' : ['name', 'path', 'shell-command'],
'arg' : [],
'archive' : ['href', 'size', 'extract', 'type', 'start-offset'],
'recipe' : [],
'requires' : ['interface', 'use'],
'runner' : ['interface', 'use', 'command'],
'version' : ['not-before', 'before'],
'environment' : ['name', 'insert', 'value', 'default', 'mode'],
'executable-in-var' : ['name', 'command'],
'executable-in-path' : ['name', 'command'],
#'overlay' : ['src', 'mount-point'],
}
def checkElement(elem):
if elem.namespaceURI != namespaces.XMLNS_IFACE:
info("Note: Skipping unknown (but namespaced) element <%s>", elem.localName)
return # Namespaces elements are OK
if elem.localName not in known_elements:
warn("Unknown Zero Install element <%s>.\nNon Zero-Install elements should be namespaced.", elem.localName)
return
known_attrs = known_elements[elem.localName]
for (uri, name), value in elem.attributes.itemsNS():
if uri == XMLNS_NAMESPACE:
continue # Namespace declarations are fine
if uri:
info("Note: Skipping unknown (but namespaced) attribute '%s'", name)
continue
if name not in known_attrs:
warn("Unknown Zero Install attribute '%s' on <%s>.\nNon Zero-Install attributes should be namespaced.",
name, elem.localName)
for child in elem.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
checkElement(child)
def check(data, warnings = True, implementation_id_alg=None, generate_sizes=False):
fd, tmp_name = tempfile.mkstemp(prefix = '0publish-validate-')
os.close(fd)
try:
tmp_file = file(tmp_name, 'w')
tmp_file.write(data)
tmp_file.close()
try:
feed = load_feed(tmp_name, local=True, implementation_id_alg=implementation_id_alg, generate_sizes=generate_sizes)<|fim▁hole|> raise
except Exception, ex:
warn("Internal error: %s", ex)
raise InvalidInterface(str(ex))
finally:
os.unlink(tmp_name)
if warnings:
doc = minidom.parseString(data)
checkElement(doc.documentElement)
return feed<|fim▁end|>
|
except InvalidInterface, ex:
|
<|file_name|>log.py<|end_file_name|><|fim▁begin|>def read_logfile_by_line(logfile):
"""generator function that yields the log file content line by line"""
with open(logfile, 'r') as f:
for line in f:
yield line
yield None
def parse_commands(log_content):
"""
parse cwl commands from the line-by-line generator of log file content and
returns the commands as a list of command line lists, each corresponding to a step run.
"""
command_list = []
command = []
in_command = False
line = next(log_content)
while(line):
line = line.strip('\n')
if '[job' in line and line.endswith('docker \\'):
line = 'docker \\' # remove the other stuff<|fim▁hole|> in_command = False
command_list.append(command)
command = []
line = next(log_content)
return(command_list)<|fim▁end|>
|
in_command = True
if in_command:
command.append(line.strip('\\').rstrip(' '))
if not line.endswith('\\'):
|
<|file_name|>sale_order.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# © 2017 Comunitea
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields, api, _
from odoo.tools import float_compare
from lxml import etree
class SaleOrderLineTemplate(models.Model):
_name = 'sale.order.line.template'
_inherit = 'sale.order.line'
product_template = fields.Many2one(
'product.template', string='Product',
domain=[('sale_ok', '=', True), ('product_attribute_count', '=', 0)],
change_default=True, ondelete='restrict', required=True)
order_lines = fields.One2many('sale.order.line', 'template_line',
copy=True)
lines_qty = fields.Integer(compute='_compute_order_lines_qty')
price_subtotal = fields.Monetary(
compute='_compute_amount', string='Subtotal', readonly=True,
store=True)
global_available_stock = fields.\
Float('Stock', related='product_template.global_available_stock')
@api.depends('order_lines.price_subtotal')
def _compute_amount(self):
for line in self:
line.price_subtotal = sum(
[x.price_subtotal for x in line.order_lines])
@api.multi
def unlink(self):
if not self._context.get('unlink_product_line', False):
ctx = self._context.copy()
ctx.update(unlink_template_line=True)
self.mapped('order_lines').with_context(ctx).unlink()
return super(SaleOrderLineTemplate, self).unlink()
@api.multi
def write(self, vals):
for template in self:
line_vals = vals.copy()
if template.lines_qty > 1:
line_vals.pop('product_id', False)
#line_vals.pop('price_unit', False)
line_vals.pop('product_uom_qty', False)
line_vals.pop('purchase_price', False)
line_vals.pop('name', False)
line_vals.pop('sequence', False)
template.order_lines.write(line_vals)
return super(models.Model, self).write(vals)
@api.model
def create(self, vals):
# Se controla el create con order_lines debido que al duplicar un
# pedido el vals de las lineas viene sin order_id
if vals.get('order_lines', False):
for line_vals in vals['order_lines']:
if line_vals[0] == 0:
line_vals[2]['order_id'] = vals.get('order_id', False)
if not self._context.get('no_create_line', False):
# Nos aseguramos que el name de sale.order.line sea el correcto
# (con referencia y atributos de variante)
line_vals = vals.copy()<|fim▁hole|> line_vals['product_id'])
line_vals['name'] = product_vals.display_name
new_line = self.env['sale.order.line'].with_context(
no_create_template_line=True).create(line_vals)
vals['order_lines'] = [(6, 0, [new_line.id])]
vals['name'] = template_product.display_name
return super(
SaleOrderLineTemplate,
self.with_context(no_create_template_line=True)).create(vals)
@api.model
def create_mal(self, vals):
## TODO: REVISAR KIKO. No traslada el precio de la primera variante
ctx = self._context.copy()
# Se controla el create con order_lines debido que al duplicar un
# pedido el vals de las lineas viene sin order_id
order_id = vals.get('order_id', False)
if vals.get('order_lines', False):
for line_vals in vals['order_lines']:
if line_vals[0] == 0:
line_vals[2]['order_id'] = vals.get('order_id', False)
if not self._context.get('no_create_line', False):
# Nos aseguramos que el name de sale.order.line sea el correcto
# (con referencia y atributos de variante)
ctx.update(no_create_template_line=True)
line_vals = vals.copy()
orig = True
if orig:
line_vals = vals.copy()
template_product = self.env['product.template'].browse(vals['product_template'])
if template_product.display_name == line_vals['name']:
product_vals = self.env['product.product'].browse(
line_vals['product_id'])
line_vals['name'] = product_vals.display_name
new_line = self.env['sale.order.line'].with_context(ctx).create(line_vals)
vals['order_lines'] = [(6, 0, [new_line.id])]
else:
new_line_ids = self.env['sale.order.line']
template_product = self.env['product.template'].browse(vals['product_template'])
product_id = self.env['product.product'].browse(line_vals['product_id'])
if template_product.display_name == line_vals['name']:
line_vals['name'] = product_id.display_name
line_vals.update({
'product_id': product_id.id,
'product_uom': product_id.uom_id,
'order_id': order_id,
})
order_line = self.env['sale.order.line'].with_context(ctx).new(line_vals)
order_line.product_id_change()
order_line_vals = order_line._convert_to_write(order_line._cache)
new_line_ids |= new_line_ids.with_context(ctx).create(order_line_vals)
vals['order_lines'] = [(6, 0, new_line_ids.ids)]
return super(
SaleOrderLineTemplate,
self.with_context(no_create_template_line=True)).create(vals)
def _compute_order_lines_qty(self):
for template in self:
template.lines_qty = len(template.order_lines)
@api.onchange('product_template')
def onchange_template(self):
if not self.product_template:
return
self.product_id = self.product_template.product_variant_ids[0]
# @api.onchange('product_uom_qty', 'product_uom', 'route_id')
# def _onchange_product_id_check_availability(self):
# return
#
# @api.onchange('product_id')
# def _onchange_product_id_uom_check_availability(self):
# return
#
# @api.onchange('product_uom_qty')
# def _onchange_product_uom_qty(self):
# return
#
# @api.onchange('product_id')
# def _onchange_product_id_set_customer_lead(self):
# return
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
@api.multi
@api.depends('product_id')
def _get_global_stock(self):
for line in self:
if line.product_id:
line.global_available_stock = \
line.product_id.web_global_stock
else:
line.global_available_stock = 0.0
template_line = fields.Many2one('sale.order.line.template')
global_available_stock = fields.Float('Stock', readonly=True,
compute="_get_global_stock",
store=True)
note = fields.Text("Notas")
partner_id = fields.Many2one(related='order_id.partner_id', string='partner', store=True, readonly=True)
pricelist_id = fields.Many2one(related='order_id.pricelist_id', string='partner', store=True, readonly=True)
check_edit = fields.Boolean(compute='_compute_check_edit')
@api.depends('template_line.lines_qty', 'product_id.product_tmpl_id.product_attribute_count')
def _compute_check_edit(self):
for line in self:
check_edit = True
if line.product_id.product_tmpl_id.product_attribute_count > 0:
check_edit = False
if line.template_line.lines_qty > 1:
check_edit = False
line.check_edit = check_edit
@api.model
def create(self, vals):
if self._context.get('template_line', False):
vals['template_line'] = self._context.get('template_line', False)
if not vals.get('template_line', False) and not \
self._context.get('no_create_template_line', False):
product = self.env['product.product'].browse(
vals.get('product_id'))
vals['product_template'] = product.product_tmpl_id.id
new_template = self.env['sale.order.line.template'].with_context(
no_create_template_line=True, no_create_line=True).create(vals)
vals.pop('product_template')
vals['template_line'] = new_template.id
return super(SaleOrderLine, self).create(vals)
@api.onchange('product_uom_qty', 'product_uom', 'route_id')
def _onchange_product_id_check_availability(self):
res = super(SaleOrderLine, self).\
_onchange_product_id_check_availability()
if not self.product_id or self.product_id.type != 'product':
return res
precision = self.env['decimal.precision'].\
precision_get('Product Unit of Measure')
product_qty = self.product_uom.\
_compute_quantity(self.product_uom_qty,
self.product_id.uom_id)
if float_compare(self.product_id.web_global_stock,
product_qty, precision_digits=precision) == -1:
warning_mess = {
'title': _('Not enough inventory!'),
'message':
_('You plan to sell %s %s but you only have %s %s '
'available!\nThe stock on hand is %s %s.') %
(self.product_uom_qty, self.product_uom.name,
self.product_id.web_global_stock,
self.product_id.uom_id.name,
self.product_id.web_global_stock,
self.product_id.uom_id.name)}
res['warning'] = warning_mess
elif res.get('warning'):
del res['warning']
return res
@api.multi
def show_details(self):
view_id = self.env.ref('custom_sale_order_variant_mgmt.sale_order_line_custom_form_note').id
return {
'name': _('Sale order line details'),
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'sale.order.line',
'views': [(view_id, 'form')],
'view_id': view_id,
'target': 'new',
'res_id': self.ids[0],
'context': self.env.context}
@api.multi
def _action_procurement_create(self):
if self._name == 'sale.order.line':
return super(SaleOrderLine, self)._action_procurement_create()
@api.multi
def unlink(self):
templates = self.mapped('template_line')
res = super(SaleOrderLine, self).unlink()
if not self._context.get('unlink_template_line', False):
templates_tu = templates.filtered(lambda x: not x.order_lines)
if templates_tu:
ctx = self._context.copy()
ctx.update(unlink_product_line=True)
templates_tu.with_context(ctx).unlink()
return res
class SaleOrder(models.Model):
_inherit = 'sale.order'
template_lines = fields.One2many('sale.order.line.template', 'order_id',
copy=True)
order_line = fields.One2many(copy=False)
sale_order_line_count = fields.Integer(
compute='_compute_sale_order_line_count')
@api.model
def fields_view_get(self, view_id=None, view_type='form', toolbar=False,
submenu=False):
"""
Override to add message_content field in all the objects
that inherits mail.thread
"""
res = super(SaleOrder, self).fields_view_get(
view_id=view_id, view_type=view_type, toolbar=toolbar,
submenu=submenu)
if view_type == 'form':
doc = etree.XML(res['arch'])
for node in doc.xpath("//field[@name='order_line']"):
# Add message_content in search view
node.getparent().remove(node)
res['arch'] = etree.tostring(doc)
return res
@api.depends('order_line')
def _compute_sale_order_line_count(self):
for order in self:
order.sale_order_line_count = len(order.order_line)
@api.multi
def action_view_order_lines(self):
action = self.env.ref(
'custom_sale_order_variant_mgmt.sale_order_line_action').read()[0]
action['domain'] = [('id', 'in', self.order_line.ids)]
action['context'] = {
'default_order_id': self.id,
}
return action
@api.multi
def copy(self, default={}):
return super(
SaleOrder,
self.with_context(no_create_line=True,
no_create_template_line=True)).copy(default)
def clear_existing_promotion_lines(self):
order = self
order_line_obj = self.env['sale.order.line']
# Delete all template lines related with promotion sale order lines
domain = [('order_id', '=', order.id), ('promotion_line', '=', True)]
order_line_objs = order_line_obj.search(domain)
related_template_lines = order_line_objs.mapped('template_line')
related_template_lines.unlink()
res = super(SaleOrder, self).clear_existing_promotion_lines()
return res<|fim▁end|>
|
template_product = self.env['product.template'].browse(vals['product_template'])
if template_product.display_name == line_vals['name']:
product_vals = self.env['product.product'].browse(
|
<|file_name|>lookup_util.cc<|end_file_name|><|fim▁begin|>/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/kernels/lookup_util.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/framework/tensor_shape.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/io/inputbuffer.h"
namespace tensorflow {
namespace lookup {
namespace {
static const int kInputBufferSize = 1 * 1024 * 1024; /* bytes */
static const int kLineNumber = -1;
static const int kWholeLine = -2;
Status GetNumLinesInTextFile(Env* env, const string& vocab_file,
int64* num_lines) {
std::unique_ptr<RandomAccessFile> file;
TF_RETURN_IF_ERROR(env->NewRandomAccessFile(vocab_file, &file));
io::InputBuffer input_buffer(file.get(), kInputBufferSize);
string line;
Status s = input_buffer.ReadLine(&line);
int64 next_id = 0;
while (s.ok()) {
next_id++;
s = input_buffer.ReadLine(&line);
}
if (!errors::IsOutOfRange(s)) {
return s;
}
*num_lines = next_id;
return Status::OK();
}
// Iterator that reads a text file. Each iteration process one line, it parses
// the line and populates the keys and values tensors used for initialization
// with a single key and corresponding value.
//
// What information of the line to populate the key or values is specified by
// providing key_index and value_index.
class TextFileLineIterator
: public InitializableLookupTable::InitTableIterator {
public:
TextFileLineIterator()
: valid_(false),
vocab_size_(-1),
status_(errors::FailedPrecondition("Not initialized")) {}
// Initialize iterator.
//
// Prepares the file 'filename' and sets the data types to return the keys and
// values tensors. It requires the indices of the tokens in the line given a
// delimiter to specify where to pick the data from.
//
// - Index -2 means the entire line as string.
// - Index -1 means the line number stored in int64.
// - Index >= 0 represent index (starting at zero) of the split line based on
// delimiter.
Status Init(const string& filename, int64 vocab_size, char delimiter,
DataType key_dtype, int64 key_index, DataType value_dtype,
int64 value_index, Env* env) {
if (vocab_size == -1) {
TF_RETURN_IF_ERROR(GetNumLinesInTextFile(env, filename, &vocab_size));
}
filename_ = filename;
vocab_size_ = vocab_size;
delimiter_ = delimiter;
key_ = Tensor(key_dtype, TensorShape({}));
value_ = Tensor(value_dtype, TensorShape({}));
key_index_ = key_index;
value_index_ = value_index;
status_ = env->NewRandomAccessFile(filename_, &file_);
if (!status_.ok()) return status_;
input_buffer_.reset(new io::InputBuffer(file_.get(), kInputBufferSize));
valid_ = true;
next_id_ = 0;
ignore_split_ = std::max(key_index_, value_index_) < 0;
Next();
return status_;
}
void Next() override {
if (!valid_) return;
string line;
status_ = input_buffer_->ReadLine(&line);
if (!status_.ok()) {
if (errors::IsOutOfRange(status_) && next_id_ != vocab_size_) {
status_ = errors::InvalidArgument("Invalid vocab_size in ", filename_,
": expected ", vocab_size_,
" but got ", next_id_);
}
valid_ = false;
return;
}
if (next_id_ >= vocab_size_) {
LOG(WARNING) << "Truncated " << filename_ << " before its end at "
<< vocab_size_ << " records.";
LOG(WARNING) << "next_id_ : " << next_id_;
status_ = errors::OutOfRange("Finished reading ", vocab_size_,
" of lines from ", filename_);
valid_ = false;
return;
}
if (line.empty()) {
status_ = errors::InvalidArgument("Invalid content in ", filename_,
": empty line found at position ",
input_buffer_->Tell(), ".");
valid_ = false;
return;
}
std::vector<string> tokens;
if (!ignore_split_) {
tokens = str_util::Split(line, delimiter_);
if (std::max(key_index_, value_index_) >= tokens.size()) {
status_ = errors::InvalidArgument(
"Invalid number of columns in ", filename_, " line ", next_id_,
" (", line, ") : expected ", std::max(key_index_, value_index_),
" got ", tokens.size());
valid_ = false;<|fim▁hole|> }
}
status_ = SetValue(line, tokens, key_index_, &key_);
if (!status_.ok()) {
valid_ = false;
return;
}
status_ = SetValue(line, tokens, value_index_, &value_);
if (!status_.ok()) {
valid_ = false;
return;
}
next_id_++;
}
bool Valid() const override { return valid_; }
const Tensor& keys() const override { return key_; }
const Tensor& values() const override { return value_; }
Status status() const override { return status_; }
int64 total_size() const override { return vocab_size_; }
private:
Tensor key_;
Tensor value_;
bool valid_; // true if the iterator points to an existing range.
int64 key_index_;
int64 value_index_;
int64 next_id_;
int64 vocab_size_;
string filename_;
char delimiter_;
Status status_;
bool ignore_split_;
std::unique_ptr<RandomAccessFile> file_; // must outlive input_buffer_
std::unique_ptr<io::InputBuffer> input_buffer_;
// Set the corresponding value from line or tokens based on 'index' into the
// tensor 't'. The value is transformed to the given data type 'dtype'.
Status SetValue(const string& line, const std::vector<string>& tokens,
int64 index, Tensor* tensor) {
if (index == kLineNumber) {
tensor->flat<int64>()(0) = next_id_;
return Status::OK();
}
const string& token = (index == kWholeLine) ? line : tokens[index];
const DataType& dtype = tensor->dtype();
switch (dtype) {
case DT_INT32: {
int32 value;
if (!strings::safe_strto32(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid int32.");
}
tensor->flat<int32>()(0) = value;
} break;
case DT_INT64: {
int64 value;
if (!strings::safe_strto64(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid int64.");
}
tensor->flat<int64>()(0) = value;
} break;
case DT_FLOAT: {
float value;
if (!strings::safe_strtof(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid float.");
}
tensor->flat<float>()(0) = value;
} break;
case DT_DOUBLE: {
double value;
if (!strings::safe_strtod(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid double.");
}
tensor->flat<double>()(0) = value;
} break;
case DT_STRING:
tensor->flat<string>()(0) = token;
break;
default:
valid_ = false;
return errors::InvalidArgument("Data type ", dtype, " not supported.");
}
return Status::OK();
}
TF_DISALLOW_COPY_AND_ASSIGN(TextFileLineIterator);
};
Status GetTableHandle(const string& input_name, OpKernelContext* ctx,
string* container, string* table_handle) {
{
mutex* mu;
TF_RETURN_IF_ERROR(ctx->input_ref_mutex(input_name, &mu));
mutex_lock l(*mu);
Tensor tensor;
TF_RETURN_IF_ERROR(ctx->mutable_input(input_name, &tensor, true));
if (tensor.NumElements() != 2) {
return errors::InvalidArgument(
"Lookup table handle must be scalar, but had shape: ",
tensor.shape().DebugString());
}
auto h = tensor.flat<string>();
*container = h(0);
*table_handle = h(1);
}
return Status::OK();
}
} // namespace
Status GetLookupTable(const string& input_name, OpKernelContext* ctx,
LookupInterface** table) {
string container;
string table_handle;
DataType handle_dtype;
TF_RETURN_IF_ERROR(ctx->input_dtype(input_name, &handle_dtype));
if (handle_dtype == DT_RESOURCE) {
ResourceHandle handle;
TF_RETURN_IF_ERROR(HandleFromInput(ctx, input_name, &handle));
return LookupResource(ctx, handle, table);
} else {
TF_RETURN_IF_ERROR(
GetTableHandle(input_name, ctx, &container, &table_handle));
return ctx->resource_manager()->Lookup(container, table_handle, table);
}
}
Status GetInitializableLookupTable(const string& input_name,
OpKernelContext* ctx,
InitializableLookupTable** table) {
LookupInterface* lookup_table;
DataType handle_dtype;
TF_RETURN_IF_ERROR(ctx->input_dtype(input_name, &handle_dtype));
if (handle_dtype == DT_RESOURCE) {
ResourceHandle handle;
TF_RETURN_IF_ERROR(HandleFromInput(ctx, input_name, &handle));
TF_RETURN_IF_ERROR(LookupResource(ctx, handle, &lookup_table));
*table = lookup_table->GetInitializableLookupTable();
if (*table == nullptr) {
lookup_table->Unref();
return errors::InvalidArgument("Table ", handle.container(), " ",
handle.name(), " is not initializable");
}
} else {
string container;
string table_handle;
TF_RETURN_IF_ERROR(
GetTableHandle(input_name, ctx, &container, &table_handle));
TF_RETURN_IF_ERROR(ctx->resource_manager()->Lookup(container, table_handle,
&lookup_table));
*table = lookup_table->GetInitializableLookupTable();
if (*table == nullptr) {
lookup_table->Unref();
return errors::InvalidArgument("Table ", container, " ", table_handle,
" is not initializable");
}
}
return Status::OK();
}
Status CheckTableDataTypes(const LookupInterface& table, DataType key_dtype,
DataType value_dtype, const string& table_name) {
if (table.key_dtype() != key_dtype || table.value_dtype() != value_dtype) {
return errors::InvalidArgument(
"Conflicting key/value dtypes ", key_dtype, "->", value_dtype, " with ",
table.key_dtype(), "-", table.value_dtype(), " for table ", table_name);
}
return Status::OK();
}
// Helper function to initialize an InitializableLookupTable from a text file.
Status InitializeTableFromTextFile(const string& filename, int64 vocab_size,
char delimiter, int32 key_index,
int32 value_index, Env* env,
InitializableLookupTable* table) {
if (key_index == kLineNumber && table->key_dtype() != DT_INT64) {
return errors::InvalidArgument(
"Key index for line number requires table key dtype of int64, got ",
table->key_dtype());
}
const DataType& key_dtype = table->key_dtype();
const DataType& value_dtype = table->value_dtype();
if (key_index == kWholeLine && !DataTypeIsInteger(key_dtype) &&
key_dtype != DT_STRING) {
return errors::InvalidArgument(
"Key index for whole line requires string or integer table key, got ",
table->key_dtype());
}
if (value_index == kLineNumber && value_dtype != DT_INT64) {
return errors::InvalidArgument(
"Value index for line number requires table value dtype of int64, got ",
table->value_dtype());
}
if (value_index == kWholeLine && value_dtype != DT_STRING) {
return errors::InvalidArgument(
"Value index for whole line requires table value dtype of string, got ",
table->value_dtype());
}
TextFileLineIterator iter;
TF_RETURN_IF_ERROR(iter.Init(filename, vocab_size, delimiter, key_dtype,
key_index, value_dtype, value_index, env));
// For initialization from files, ignore if the table is already
// initialized. The table shared name should contain the filename to
// avoid trying to initialize the same table from the same file at the same
// time.
Status s = table->Initialize(iter);
if (errors::IsFailedPrecondition(s) && table->is_initialized()) {
LOG(INFO) << "Table trying to initialize from file " << filename
<< " is already initialized.";
return Status::OK();
}
return s;
}
} // namespace lookup
} // namespace tensorflow<|fim▁end|>
|
return;
|
<|file_name|>VirtualMachineScaleSetBootDiagnosticsTests.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.compute;
import com.azure.core.http.HttpPipeline;
import com.azure.resourcemanager.compute.fluent.models.VirtualMachineScaleSetInner;
import com.azure.resourcemanager.compute.models.KnownLinuxVirtualMachineImage;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSet;
import com.azure.resourcemanager.compute.models.VirtualMachineScaleSetSkuTypes;
import com.azure.resourcemanager.network.models.LoadBalancer;
import com.azure.resourcemanager.network.models.LoadBalancerSkuType;
import com.azure.resourcemanager.network.models.Network;
import com.azure.resourcemanager.resources.models.ResourceGroup;
import com.azure.core.management.Region;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import com.azure.core.management.profile.AzureProfile;
import com.azure.resourcemanager.storage.models.StorageAccount;
import java.util.ArrayList;
import java.util.List;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class VirtualMachineScaleSetBootDiagnosticsTests extends ComputeManagementTest {
private String rgName = "";
private final Region region = locationOrDefault(Region.US_SOUTH_CENTRAL);
private final String vmName = "javavm";
@Override
protected void initializeClients(HttpPipeline httpPipeline, AzureProfile profile) {
rgName = generateRandomResourceName("javacsmrg", 15);
super.initializeClients(httpPipeline, profile);
}
@Override
protected void cleanUpResources() {
resourceManager.resourceGroups().beginDeleteByName(rgName);
}
@Test
public void canEnableBootDiagnosticsWithImplicitStorageOnManagedVMSSCreation() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withBootDiagnostics()
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
}
@Test
public void canEnableBootDiagnosticsWithCreatableStorageOnManagedVMSSCreation() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String storageName = generateRandomResourceName("st", 14);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
Creatable<StorageAccount> creatableStorageAccount =
storageManager.storageAccounts().define(storageName).withRegion(region).withExistingResourceGroup(rgName);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withBootDiagnostics(creatableStorageAccount)
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
Assertions.assertTrue(virtualMachineScaleSet.bootDiagnosticsStorageUri().contains(storageName));
}
@Test
public void canEnableBootDiagnosticsWithExplicitStorageOnManagedVMSSCreation() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
final String storageName = generateRandomResourceName("st", 14);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(storageName)
.withRegion(region)
.withNewResourceGroup(rgName)
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withBootDiagnostics(storageAccount)
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
Assertions.assertTrue(virtualMachineScaleSet.bootDiagnosticsStorageUri().contains(storageName));
}
@Test
public void canDisableVMSSBootDiagnostics() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withBootDiagnostics()
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
virtualMachineScaleSet.update().withoutBootDiagnostics().apply();
Assertions.assertFalse(virtualMachineScaleSet.isBootDiagnosticsEnabled());
// Disabling boot diagnostics will not remove the storage uri from the vm payload.
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
}
@Test
public void bootDiagnosticsShouldUsesVMSSOSUnManagedDiskImplicitStorage() throws Exception {
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withUnmanagedDisks()
.withBootDiagnostics()
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
VirtualMachineScaleSetInner inner = virtualMachineScaleSet.innerModel();
Assertions.assertNotNull(inner);
Assertions.assertNotNull(inner.virtualMachineProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile().osDisk());
List<String> containers = inner.virtualMachineProfile().storageProfile().osDisk().vhdContainers();
Assertions.assertFalse(containers.isEmpty());
// Boot diagnostics should share storage used for os/disk containers
boolean found = false;
for (String containerStorageUri : containers) {
if (containerStorageUri
.toLowerCase()
.startsWith(virtualMachineScaleSet.bootDiagnosticsStorageUri().toLowerCase())) {
found = true;
break;
}
}
Assertions.assertTrue(found);
}
@Test
public void bootDiagnosticsShouldUseVMSSUnManagedDisksExplicitStorage() throws Exception {
final String storageName = generateRandomResourceName("st", 14);
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
StorageAccount storageAccount =
storageManager
.storageAccounts()
.define(storageName)
.withRegion(region)
.withNewResourceGroup(rgName)
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withUnmanagedDisks()
.withBootDiagnostics()
.withExistingStorageAccount(
storageAccount) // This storage account must be shared by disk and boot diagnostics
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
Assertions.assertTrue(virtualMachineScaleSet.bootDiagnosticsStorageUri().contains(storageName));
VirtualMachineScaleSetInner inner = virtualMachineScaleSet.innerModel();
Assertions.assertNotNull(inner);
Assertions.assertNotNull(inner.virtualMachineProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile().osDisk());
List<String> containers = inner.virtualMachineProfile().storageProfile().osDisk().vhdContainers();
Assertions.assertFalse(containers.isEmpty());
}
@Test
public void canEnableBootDiagnosticsWithCreatableStorageOnUnManagedVMSSCreation() throws Exception {
final String storageName = generateRandomResourceName("st", 14);
final String vmssName = generateRandomResourceName("vmss", 10);
ResourceGroup resourceGroup = this.resourceManager.resourceGroups().define(rgName).withRegion(region).create();
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
LoadBalancer publicLoadBalancer =
createInternetFacingLoadBalancer(region, resourceGroup, "1", LoadBalancerSkuType.BASIC);
List<String> backends = new ArrayList<>();
for (String backend : publicLoadBalancer.backends().keySet()) {
backends.add(backend);
}
Assertions.assertTrue(backends.size() == 2);
Creatable<StorageAccount> creatableStorageAccount =
storageManager.storageAccounts().define(storageName).withRegion(region).withExistingResourceGroup(rgName);
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmssName)
.withRegion(region)
.withExistingResourceGroup(resourceGroup)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
.withExistingPrimaryInternetFacingLoadBalancer(publicLoadBalancer)
.withPrimaryInternetFacingLoadBalancerBackends(backends.get(0), backends.get(1))
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withUnmanagedDisks()
.withBootDiagnostics(
creatableStorageAccount) // This storage account should be used for BDiagnostics not OS disk storage
// account
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNotNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
Assertions.assertTrue(virtualMachineScaleSet.bootDiagnosticsStorageUri().contains(storageName));
// There should be a different storage account created for VMSS OS Disk
VirtualMachineScaleSetInner inner = virtualMachineScaleSet.innerModel();
Assertions.assertNotNull(inner);
Assertions.assertNotNull(inner.virtualMachineProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile());
Assertions.assertNotNull(inner.virtualMachineProfile().storageProfile().osDisk());
List<String> containers = inner.virtualMachineProfile().storageProfile().osDisk().vhdContainers();
Assertions.assertFalse(containers.isEmpty());
boolean notFound = true;
for (String containerStorageUri : containers) {
if (containerStorageUri
.toLowerCase()
.startsWith(virtualMachineScaleSet.bootDiagnosticsStorageUri().toLowerCase())) {
notFound = false;
break;
}
}
Assertions.assertTrue(notFound);
}
@Test
public void canEnableBootDiagnosticsOnManagedStorageAccount() {
Network network =
this
.networkManager
.networks()
.define("vmssvnet")
.withRegion(region)
.withNewResourceGroup(rgName)
.withAddressSpace("10.0.0.0/28")
.withSubnet("subnet1", "10.0.0.0/28")
.create();
VirtualMachineScaleSet virtualMachineScaleSet =
this
.computeManager
.virtualMachineScaleSets()
.define(vmName)
.withRegion(region)<|fim▁hole|> .withoutPrimaryInternetFacingLoadBalancer()
.withoutPrimaryInternalLoadBalancer()
.withPopularLinuxImage(KnownLinuxVirtualMachineImage.UBUNTU_SERVER_16_04_LTS)
.withRootUsername("jvuser")
.withSsh(sshPublicKey())
.withBootDiagnosticsOnManagedStorageAccount()
.create();
Assertions.assertNotNull(virtualMachineScaleSet);
Assertions.assertTrue(virtualMachineScaleSet.isBootDiagnosticsEnabled());
Assertions.assertNull(virtualMachineScaleSet.bootDiagnosticsStorageUri());
}
}<|fim▁end|>
|
.withExistingResourceGroup(rgName)
.withSku(VirtualMachineScaleSetSkuTypes.STANDARD_A0)
.withExistingPrimaryNetworkSubnet(network, "subnet1")
|
<|file_name|>globalProperties.ts<|end_file_name|><|fim▁begin|>import {app} from 'electron'
import * as os from 'os'
/**
* creates string with path to folder, depending on OS
* @todo add other OSs
*/<|fim▁hole|> return 'C:\\Users\\' + os.userInfo().username + '\\AppData\\Roaming\\ToDoElectron\\'
case 'darwin':
// TODO
break
case 'linux':
return '/home/' + os.userInfo().username + '/.todoelectron/'
}
}
export const getAppVersion = () => {
return app.getVersion()
}
export const getAppName = () => {
return app.getName()
}<|fim▁end|>
|
export const getPath = () => {
switch (process.platform) {
case 'win32':
|
<|file_name|>subprod-render.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { SubprodRenderComponent } from './subprod-render.component';
describe('SubprodRenderComponent', () => {
let component: SubprodRenderComponent;
let fixture: ComponentFixture<SubprodRenderComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({<|fim▁hole|>
beforeEach(() => {
fixture = TestBed.createComponent(SubprodRenderComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|>
|
declarations: [ SubprodRenderComponent ]
})
.compileComponents();
}));
|
<|file_name|>rasterizer.cc<|end_file_name|><|fim▁begin|>/* Copyright 2020 The TensorFlow Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "rasterizer.h"
Rasterizer::Rasterizer(
std::unique_ptr<gl_utils::Program>&& program,
std::unique_ptr<gl_utils::RenderTargets>&& render_targets, float clear_red,
float clear_green, float clear_blue, float clear_alpha, float clear_depth,
bool enable_cull_face)
: program_(std::move(program)),
render_targets_(std::move(render_targets)),
clear_red_(clear_red),
clear_green_(clear_green),
clear_blue_(clear_blue),
clear_alpha_(clear_alpha),
clear_depth_(clear_depth),
enable_cull_face_(enable_cull_face) {}
Rasterizer::~Rasterizer() {}
void Rasterizer::Reset() {
program_.reset();
render_targets_.reset();
for (auto&& buffer : shader_storage_buffers_) buffer.second.reset();
}
tensorflow::Status Rasterizer::Render(int num_points,
absl::Span<float> result) {
return RenderImpl(num_points, result);
}
tensorflow::Status Rasterizer::Render(int num_points,
absl::Span<unsigned char> result) {
return RenderImpl(num_points, result);
}
tensorflow::Status Rasterizer::SetUniformMatrix(
const std::string& name, int num_columns, int num_rows, bool transpose,
absl::Span<const float> matrix) {
if (size_t(num_rows * num_columns) != matrix.size())
return TFG_INTERNAL_ERROR("num_rows * num_columns != matrix.size()");
typedef void (*setter_fn)(GLint location, GLsizei count, GLboolean transpose,
const GLfloat* value);
static const auto type_mapping =
std::unordered_map<int, std::tuple<int, int, setter_fn>>({
{GL_FLOAT_MAT2, std::make_tuple(2, 2, glUniformMatrix2fv)},
{GL_FLOAT_MAT3, std::make_tuple(3, 3, glUniformMatrix3fv)},
{GL_FLOAT_MAT4, std::make_tuple(4, 4, glUniformMatrix4fv)},
{GL_FLOAT_MAT2x3, std::make_tuple(2, 3, glUniformMatrix2x3fv)},
{GL_FLOAT_MAT2x4, std::make_tuple(2, 4, glUniformMatrix2x4fv)},
{GL_FLOAT_MAT3x2, std::make_tuple(3, 2, glUniformMatrix3x2fv)},<|fim▁hole|> {GL_FLOAT_MAT4x2, std::make_tuple(4, 2, glUniformMatrix4x2fv)},
{GL_FLOAT_MAT4x3, std::make_tuple(4, 3, glUniformMatrix4x3fv)},
});
GLint uniform_type;
GLenum property = GL_TYPE;
TF_RETURN_IF_ERROR(program_->GetResourceProperty(
name, GL_UNIFORM, 1, &property, 1, &uniform_type));
// Is a resource active under that name?
if (uniform_type == GLint(GL_INVALID_INDEX))
return TFG_INTERNAL_ERROR("GL_INVALID_INDEX");
auto type_info = type_mapping.find(uniform_type);
if (type_info == type_mapping.end())
return TFG_INTERNAL_ERROR("Unsupported type");
if (std::get<0>(type_info->second) != num_columns ||
std::get<1>(type_info->second) != num_rows)
return TFG_INTERNAL_ERROR("Invalid dimensions");
GLint uniform_location;
property = GL_LOCATION;
TF_RETURN_IF_ERROR(program_->GetResourceProperty(
name, GL_UNIFORM, 1, &property, 1, &uniform_location));
TF_RETURN_IF_ERROR(program_->Use());
auto program_cleanup = MakeCleanup([this]() { return program_->Detach(); });
// Specify the value of the uniform in the current program.
TFG_RETURN_IF_GL_ERROR(std::get<2>(type_info->second)(
uniform_location, 1, transpose ? GL_TRUE : GL_FALSE, matrix.data()));
// Cleanup the program; no program is active at this point.
return tensorflow::Status::OK();
}<|fim▁end|>
|
{GL_FLOAT_MAT3x4, std::make_tuple(3, 4, glUniformMatrix3x4fv)},
|
<|file_name|>hooks.tsx<|end_file_name|><|fim▁begin|>import { useCallback, useState, useEffect } from 'react';
import { KeyStore } from './keystore';
import type { Accounts, Hooks as HooksType } from './../types';
<|fim▁hole|> const [error, setError] = useState<string>();
useEffect(() => {
async function getAccounts() {
try {
setAccounts(await KeyStore.getAccounts());
} catch (err: any) {
setError(err);
}
}
getAccounts();
}, []);
const newAccount = useCallback((passphrase: string) => {
async function addAccount() {
try {
await KeyStore.newAccount(passphrase);
setAccounts(await KeyStore.getAccounts());
} catch (err: any) {
setError(err);
}
}
addAccount();
}, []);
return { accounts, error, newAccount };
},
useEthereumClient: () => {},
} as HooksType;<|fim▁end|>
|
export const Hooks = {
useKeyStore: () => {
const [accounts, setAccounts] = useState<Accounts>();
|
<|file_name|>event_creation.rs<|end_file_name|><|fim▁begin|>//! Endpoints for creating events.
use std::convert::TryInto;
use bodyparser;
use diesel::{Connection, ExecuteDsl, insert};
use diesel::pg::PgConnection;
use iron::{Chain, Handler, IronError, IronResult, Plugin, Request, Response, status};
use router::Router;
use ruma_events::call::answer::AnswerEvent;
use ruma_events::call::candidates::CandidatesEvent;
use ruma_events::call::hangup::HangupEvent;
use ruma_events::call::invite::InviteEvent;
use ruma_events::room::avatar::AvatarEvent;
use ruma_events::room::canonical_alias::CanonicalAliasEvent;
use ruma_events::room::guest_access::GuestAccessEvent;
use ruma_events::room::history_visibility::HistoryVisibilityEvent;
use ruma_events::room::join_rules::JoinRulesEvent;
use ruma_events::room::message::MessageEvent;
use ruma_events::room::name::NameEvent;
use ruma_events::room::power_levels::PowerLevelsEvent;
use ruma_events::room::third_party_invite::ThirdPartyInviteEvent;
use ruma_events::room::topic::TopicEvent;
use ruma_events::{CustomRoomEvent, CustomStateEvent, EventType};
use ruma_identifiers::{RoomId, EventId};
use serde::Deserialize;
use serde_json::{Value, from_value};
use db::DB;
use config::Config;
use error::{ApiError, MapApiError};
use middleware::{
AccessTokenAuth,
EventTypeParam,
JsonRequest,
MiddlewareChain,
RoomIdParam,
TransactionIdParam,
};
use models::event::NewEvent;
use models::room::Room;
use models::room_membership::RoomMembership;
use models::user::User;
use modifier::SerializableResponse;
use schema::events;
macro_rules! room_event {
(
$ty:ident,
$event_content:ident,
$event_type:ident,
$event_id:ident,
$room_id:ident,
$user:ident
) => {
$ty {
content: extract_event_content($event_content, &$event_type)?,
event_id: $event_id.clone(),
event_type: $event_type.clone(),
room_id: $room_id.clone(),
unsigned: None,
user_id: $user.id.clone(),
}.try_into().map_err(ApiError::from)?
};
}
macro_rules! state_event {
(
$ty:ident,
$event_content:ident,
$event_type:ident,
$event_id:ident,
$room_id:ident,
$state_key:ident,
$user:ident
) => {
$ty {
content: extract_event_content($event_content, &$event_type)?,
event_id: $event_id.clone(),
event_type: $event_type.clone(),
prev_content: None,
room_id: $room_id.clone(),
state_key: $state_key.to_string(),
unsigned: None,
user_id: $user.id.clone(),
}.try_into().map_err(ApiError::from)?
};
}
#[derive(Debug, Serialize)]
struct EventResponse {
/// A unique identifier for the event.
event_id: String,
}
/// The `/rooms/:room_id/send/:event_type/:transaction_id` endpoint.
pub struct SendMessageEvent;
middleware_chain!(SendMessageEvent, [JsonRequest, RoomIdParam, EventTypeParam, TransactionIdParam, AccessTokenAuth]);
impl Handler for SendMessageEvent {
fn handle(&self, request: &mut Request) -> IronResult<Response> {
let room_id = request.extensions.get::<RoomIdParam>()
.expect("Should have been required by RoomIdParam.").clone();
let event_type = request.extensions.get::<EventTypeParam>()
.expect("EventTypeParam should ensure an EventType").clone();
request.extensions.get::<TransactionIdParam>()
.expect("TransactionIdParam should ensure a TransactionId").clone();
let user = request.extensions.get::<User>()
.expect("AccessTokenAuth should ensure a user").clone();
let event_content = request
.get::<bodyparser::Json>()
.expect("JsonRequest verifies the Result is Ok")
.expect("JsonRequest verifies the Option is Some");
let config = Config::from_request(request)?;
let event_id = EventId::new(&config.domain).map_api_err(|_| {
ApiError::unknown("Failed to generated event ID for the new event.".to_string())
})?;
let room_event: NewEvent = match event_type {
EventType::CallAnswer => {
room_event!(AnswerEvent, event_content, event_type, event_id, room_id, user)
}
EventType::CallCandidates => {
room_event!(CandidatesEvent, event_content, event_type, event_id, room_id, user)
}
EventType::CallHangup => {
room_event!(HangupEvent, event_content, event_type, event_id, room_id, user)
}
EventType::CallInvite => {
room_event!(InviteEvent, event_content, event_type, event_id, room_id, user)
}
EventType::RoomMessage => {
room_event!(MessageEvent, event_content, event_type, event_id, room_id, user)
}
EventType::Custom(ref custom_event_type) => {
CustomRoomEvent {
content: event_content,
event_id: event_id.clone(),
event_type: EventType::Custom(custom_event_type.clone()),
room_id: room_id.clone(),
unsigned: None,
user_id: user.id.clone(),
}.try_into().map_err(ApiError::from)?
}
_ => {
let error = ApiError::bad_event(
format!("Events of type {} cannot be created with this API.", event_type)
);
return Err(IronError::from(error));
}
};
let connection = DB::from_request(request)?;
connection.transaction(|| {
verify_permissions(&connection, &room_id, &user, &event_type)?;
insert(&room_event)
.into(events::table)
.execute(&*connection)
.map_err(ApiError::from)
}).map_err(ApiError::from)?;
let response = EventResponse {
event_id: event_id.opaque_id().to_string(),
};
Ok(Response::with((status::Ok, SerializableResponse(response))))
}
}
/// The `/rooms/:room_id/state/:event_type/:state_key and /rooms/:room_id/state/:event_type`
/// endpoints.
pub struct StateMessageEvent;
middleware_chain!(StateMessageEvent, [JsonRequest, RoomIdParam, EventTypeParam, AccessTokenAuth]);
impl Handler for StateMessageEvent {
fn handle(&self, request: &mut Request) -> IronResult<Response> {
let params = request.extensions.get::<Router>().expect("Params object is missing").clone();
let room_id = request.extensions.get::<RoomIdParam>()
.expect("Should have been required by RoomIdParam.")
.clone();
let event_type = request.extensions.get::<EventTypeParam>()
.expect("EventTypeParam should ensure an EventType").clone();
let state_key = params
.find("state_key")
.unwrap_or("");
let user = request.extensions.get::<User>()
.expect("AccessTokenAuth should ensure a user").clone();
let event_content = request
.get::<bodyparser::Json>()
.expect("JsonRequest verifies the Result is Ok")
.expect("JsonRequest verifies the Option is Some");
let config = Config::from_request(request)?;
let event_id = EventId::new(&config.domain).map_api_err(|_| {
ApiError::unknown("Failed to generated event ID for the new event.".to_string())
})?;
let state_event: NewEvent = match event_type {
EventType::RoomAvatar => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
AvatarEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomCanonicalAlias => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
CanonicalAliasEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomGuestAccess => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
GuestAccessEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomHistoryVisibility => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
HistoryVisibilityEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomJoinRules => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
JoinRulesEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomName => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
NameEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomPowerLevels => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
PowerLevelsEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomThirdPartyInvite => {
state_event!(
ThirdPartyInviteEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::RoomTopic => {
ensure_empty_state_key(state_key, &event_type)?;
state_event!(
TopicEvent,
event_content,
event_type,
event_id,
room_id,
state_key,
user
)
}
EventType::Custom(ref custom_event_type) => {
CustomStateEvent {
content: event_content,
event_id: event_id.clone(),
event_type: EventType::Custom(custom_event_type.clone()),
prev_content: None,
room_id: room_id.clone(),
state_key: state_key.to_string(),
unsigned: None,
user_id: user.id.clone(),
}.try_into().map_err(ApiError::from)?
}
_ => {
let error = ApiError::bad_event(
format!("Events of type {} cannot be created with this API.", event_type)
);
return Err(IronError::from(error));
}
};
let connection = DB::from_request(request)?;
connection.transaction(|| {
verify_permissions(&connection, &room_id, &user, &event_type)?;
insert(&state_event)
.into(events::table)
.execute(&*connection)
.map_err(ApiError::from)
}).map_err(ApiError::from)?;
let response = EventResponse {
event_id: event_id.opaque_id().to_string(),
};
Ok(Response::with((status::Ok, SerializableResponse(response))))
}
}
/// Check if a `User` has permission to create an event in a given `Room`.
fn verify_permissions(connection: &PgConnection, room_id: &RoomId, user: &User, event_type: &EventType)
-> Result<(), ApiError> {
let room = match Room::find(connection, room_id)? {
Some(room) => room,
None => Err(ApiError::unauthorized("The room was not found on this server".to_string()))?,
};
match RoomMembership::find(connection, room_id, &user.id)? {
Some(membership) => {
if membership.membership != "join" {
Err(ApiError::unauthorized(
format!("The user {} has not joined the room", user.id)
))?
}
},
None => {
Err(ApiError::unauthorized(
format!("The user {} is not a member of the room", user.id)
))?
}
}
let power_levels = room.current_power_levels(&*connection)?;
let user_power_level = power_levels
.users
.get(&user.id)<|fim▁hole|> .unwrap_or(&power_levels.users_default);
let required_power_level = power_levels
.events
.get(event_type)
.unwrap_or(&power_levels.events_default);
if required_power_level > user_power_level {
return Err(
ApiError::unauthorized("Insufficient power level to create this event.".to_string())
);
}
Ok(())
}
/// Enforces an empty state key for an event type that requires it.
fn ensure_empty_state_key(state_key: &str, event_type: &EventType) -> Result<(), IronError> {
if state_key == "" {
Ok(())
} else {
Err(ApiError::bad_event(format!("Events of type {} must have an empty state key.", event_type)))?
}
}
/// Convert the JSON from the request into the correct type for the event's `content` field.
fn extract_event_content<T: Deserialize>(event_content: Value, event_type: &EventType)
-> Result<T, ApiError> {
from_value(event_content).map_api_err(|_| {
ApiError::bad_event(
format!(
"Event content did not match expected structure for event of type {}.",
event_type
)
)
})
}
#[cfg(test)]
mod tests {
use test::Test;
use iron::status::Status;
#[test]
fn create_message_event() {
let test = Test::new();
let user = test.create_user();
let room_id = test.create_room(&user.token);
let create_event_path = format!(
"/_matrix/client/r0/rooms/{}/send/m.room.message/1?access_token={}",
room_id,
user.token
);
let response = test.put(&create_event_path, r#"{"body":"Hi","msgtype":"m.text"}"#);
assert!(response.json().find("event_id").unwrap().as_str().is_some());
}
#[test]
fn event_content_does_not_match_event_type() {
let test = Test::new();
let user = test.create_user();
let room_id = test.create_room(&user.token);
let create_event_path = format!(
"/_matrix/client/r0/rooms/{}/send/m.call.answer/1?access_token={}",
room_id,
user.token
);
let response = test.put(&create_event_path, r#"{"body":"Hi","msgtype":"m.text"}"#);
let json = response.json();
assert_eq!(json.find("errcode").unwrap().as_str().unwrap(), "IO_RUMA_BAD_EVENT");
assert_eq!(
json.find("error").unwrap().as_str().unwrap(),
"Event content did not match expected structure for event of type m.call.answer."
);
}
#[test]
fn non_message_event_type() {
let test = Test::new();
let user = test.create_user();
let room_id = test.create_room(&user.token);
let create_event_path = format!(
"/_matrix/client/r0/rooms/{}/send/m.room.topic/1?access_token={}",
room_id,
user.token
);
let response = test.put(&create_event_path, r#"{"topic":"fail"}"#);
let json = response.json();
assert_eq!(json.find("errcode").unwrap().as_str().unwrap(), "IO_RUMA_BAD_EVENT");
assert_eq!(
json.find("error").unwrap().as_str().unwrap(),
"Events of type m.room.topic cannot be created with this API."
);
}
#[test]
fn custom_message_event() {
let test = Test::new();
let user = test.create_user();
let room_id = test.create_room(&user.token);
let create_event_path = format!(
"/_matrix/client/r0/rooms/{}/send/io.ruma.test/1?access_token={}",
room_id,
user.token
);
let response = test.put(&create_event_path, r#"{"foo":"bar"}"#);
assert!(response.json().find("event_id").unwrap().as_str().is_some());
}
#[test]
fn send_message_to_non_existent_room() {
let test = Test::new();
let user = test.create_user();
let room_id = "!random:ruma.test";
let create_event_path = format!(
"/_matrix/client/r0/rooms/{}/send/m.room.message/1?access_token={}",
room_id,
user.token
);
let response = test.put(&create_event_path, r#"{"body":"Hi","msgtype":"m.text"}"#);
assert_eq!(response.status, Status::Forbidden);
}
#[test]
fn send_message_without_room_membership() {
let test = Test::new();
let alice = test.create_user();
let bob = test.create_user();
let room_id = test.create_room(&alice.token);
let response = test.send_message(&bob.token, &room_id, "Hello");
assert_eq!(response.status, Status::Forbidden);
assert_eq!(
response.json().find("error").unwrap().as_str().unwrap(),
format!("The user {} is not a member of the room", bob.id));
}
#[test]
fn send_message_without_joining() {
let test = Test::new();
let alice = test.create_user();
let bob = test.create_user();
let room_options = format!(r#"{{ "invite": [ "{}" ] }}"#, bob.id);
let room_id = test.create_room_with_params(&alice.token, &room_options);
let response = test.send_message(&bob.token, &room_id, "Hello");
assert_eq!(response.status, Status::Forbidden);
assert_eq!(
response.json().find("error").unwrap().as_str().unwrap(),
format!("The user {} has not joined the room", bob.id));
}
#[test]
fn overwrite_state_event() {
let test = Test::new();
let alice = test.create_user();
let bob = test.create_user();
let room_options = format!(r#"{{ "invite": [ "{}" ] }}"#, bob.id);
let room_id = test.create_room_with_params(&alice.token, &room_options);
assert_eq!(test.join_room(&bob.token, &room_id).status, Status::Ok);
let state_event_path = format!(
"/_matrix/client/r0/rooms/{}/state/m.room.power_levels?access_token={}",
room_id,
alice.token
);
let event_content = format!(r#"{{
"ban": 100,"events": {{
"m.room.message": 100
}},
"events_default": 0,
"invite": 100,
"kick": 100,
"redact": 0,
"state_default": 0,
"users": {{
"{}": 50
}},
"users_default": 0
}}"#, bob.id);
let response = test.put(&state_event_path, &event_content);
assert_eq!(response.status, Status::Ok);
let response = test.send_message(&bob.token, &room_id, "Hello");
assert_eq!(response.status, Status::Forbidden);
assert_eq!(
response.json().find("error").unwrap().as_str().unwrap(),
"Insufficient power level to create this event."
);
let event_content = format!(r#"{{
"ban": 100,
"events": {{
"m.room.message": 0
}},
"events_default": 0,
"invite": 100,
"kick": 100,
"redact": 0,
"state_default": 0,
"users": {{
"{}": 50
}},
"users_default": 0
}}"#, bob.id);
// Now everyone can send messages
let response = test.put(&state_event_path, &event_content);
assert_eq!(response.status, Status::Ok);
let response = test.send_message(&bob.token, &room_id, "Hello again");
assert_eq!(response.status, Status::Ok);
}
}<|fim▁end|>
| |
<|file_name|>weather.py<|end_file_name|><|fim▁begin|>__author__ = 'Alexandre Menai [email protected]'
'''
The weather module intends to grab pilot weather from the aviation weather services in the USA
'''
#TODO put the following global parameters into a configuration file later on
WEAHTER_HOSTNAME="aviationweather.gov"
METAR_PATH="/adds/dataserver_current/httpparam?dataSource=metars&requestType=retrieve&format=xml&stationString="
#end global parameters
import httplib
import utilities.XML2Py as XML2Py
class Weather:
def __init__(self, start_date=1490468686,duration=86400,departure_airfield='KBED',destination_airfield='KRKD'):
"""
initializes a weather object.
:param start_date: the start date in epoch
:param duration: the duration of the flight in seconds
:param departure_airfield: is the 4 letter ICAO identifer of the departure airfield
:param destination_airfield: is the 4 letter ICAO identifer of the destination airfield
:return: the handle for the Object.
"""
self.start_date=start_date
self.duration=duration
self.departure_airfield=departure_airfield
self.destination_airfield=destination_airfield<|fim▁hole|> #construct the path and params
requestPath=METAR_PATH+airfield+"&hoursBeforeNow="+str(hours_before_now)
#initiate the connection
connection=httplib.HTTPConnection(WEAHTER_HOSTNAME)
# Get the METAR
connection.request('GET',requestPath)
metar_xml=connection.getresponse().read()
#close the connection
connection.close()
deserialized_metar = XML2Py.XML2Py().parse(metar_xml)
return deserialized_metar
#TODO scale down the useless information<|fim▁end|>
|
def grab_airfield_weather(self, airfield='KBED', hours_before_now=1):
|
<|file_name|>api_dispatch.rs<|end_file_name|><|fim▁begin|>/*#[cfg(feature = "window")]
pub use api::x11::{Window, WindowProxy, MonitorID, get_available_monitors, get_primary_monitor};
#[cfg(feature = "window")]
pub use api::x11::{WaitEventsIterator, PollEventsIterator};*/
use std::collections::VecDeque;
use std::sync::Arc;
use BuilderAttribs;
use ContextError;
use CreationError;
use CursorState;
use Event;
use GlContext;
use MouseCursor;
use PixelFormat;
use libc;
use api::wayland;
use api::x11;
use api::x11::XConnection;
enum Backend {
X(Arc<XConnection>),
Wayland
}
lazy_static!(
static ref BACKEND: Backend = {
// Wayland backend is not production-ready yet so we disable it
if false && wayland::is_available() {
Backend::Wayland
} else {
Backend::X(Arc::new(XConnection::new().unwrap()))
}
};
);
pub enum Window {
#[doc(hidden)]
X(x11::Window),
#[doc(hidden)]
Wayland(wayland::Window)
}
#[derive(Clone)]
pub enum WindowProxy {
#[doc(hidden)]
X(x11::WindowProxy),
#[doc(hidden)]
Wayland(wayland::WindowProxy)
}
impl WindowProxy {
pub fn wakeup_event_loop(&self) {
match self {
&WindowProxy::X(ref wp) => wp.wakeup_event_loop(),
&WindowProxy::Wayland(ref wp) => wp.wakeup_event_loop()
}
}
}
pub enum MonitorID {
#[doc(hidden)]
X(x11::MonitorID),
#[doc(hidden)]
Wayland(wayland::MonitorID)
}
pub fn get_available_monitors() -> VecDeque<MonitorID> {
match *BACKEND {
Backend::Wayland => wayland::get_available_monitors()
.into_iter()
.map(MonitorID::Wayland)
.collect(),
Backend::X(ref connec) => x11::get_available_monitors(connec)
.into_iter()
.map(MonitorID::X)
.collect(),
}
}
pub fn get_primary_monitor() -> MonitorID {
match *BACKEND {
Backend::Wayland => MonitorID::Wayland(wayland::get_primary_monitor()),
Backend::X(ref connec) => MonitorID::X(x11::get_primary_monitor(connec)),
}
}
impl MonitorID {
pub fn get_name(&self) -> Option<String> {
match self {
&MonitorID::X(ref m) => m.get_name(),
&MonitorID::Wayland(ref m) => m.get_name()
}
}
pub fn get_native_identifier(&self) -> ::native_monitor::NativeMonitorId {
match self {
&MonitorID::X(ref m) => m.get_native_identifier(),
&MonitorID::Wayland(ref m) => m.get_native_identifier()
}
}
pub fn get_dimensions(&self) -> (u32, u32) {
match self {
&MonitorID::X(ref m) => m.get_dimensions(),
&MonitorID::Wayland(ref m) => m.get_dimensions()
}
}
}
pub enum PollEventsIterator<'a> {
#[doc(hidden)]
X(x11::PollEventsIterator<'a>),
#[doc(hidden)]
Wayland(wayland::PollEventsIterator<'a>)
}
impl<'a> Iterator for PollEventsIterator<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
match self {
&mut PollEventsIterator::X(ref mut it) => it.next(),
&mut PollEventsIterator::Wayland(ref mut it) => it.next()
}
}
}
pub enum WaitEventsIterator<'a> {
#[doc(hidden)]
X(x11::WaitEventsIterator<'a>),
#[doc(hidden)]
Wayland(wayland::WaitEventsIterator<'a>)
}
impl<'a> Iterator for WaitEventsIterator<'a> {
type Item = Event;
fn next(&mut self) -> Option<Event> {
match self {
&mut WaitEventsIterator::X(ref mut it) => it.next(),
&mut WaitEventsIterator::Wayland(ref mut it) => it.next()
}
}
}
impl Window {
pub fn new(builder: BuilderAttribs) -> Result<Window, CreationError> {
match *BACKEND {
Backend::Wayland => wayland::Window::new(builder).map(Window::Wayland),
Backend::X(ref connec) => x11::Window::new(connec, builder).map(Window::X),
}
}
pub fn set_title(&self, title: &str) {
match self {
&Window::X(ref w) => w.set_title(title),
&Window::Wayland(ref w) => w.set_title(title)
}
}
pub fn show(&self) {
match self {
&Window::X(ref w) => w.show(),
&Window::Wayland(ref w) => w.show()
}
}
pub fn hide(&self) {
match self {
&Window::X(ref w) => w.hide(),
&Window::Wayland(ref w) => w.hide()
}
}
pub fn get_position(&self) -> Option<(i32, i32)> {
match self {
&Window::X(ref w) => w.get_position(),
&Window::Wayland(ref w) => w.get_position()
}
}
pub fn set_position(&self, x: i32, y: i32) {
match self {
&Window::X(ref w) => w.set_position(x, y),
&Window::Wayland(ref w) => w.set_position(x, y)
}
}
pub fn get_inner_size(&self) -> Option<(u32, u32)> {
match self {
&Window::X(ref w) => w.get_inner_size(),
&Window::Wayland(ref w) => w.get_inner_size()
}
}
pub fn get_outer_size(&self) -> Option<(u32, u32)> {
match self {
&Window::X(ref w) => w.get_outer_size(),
&Window::Wayland(ref w) => w.get_outer_size()
}
}
pub fn set_inner_size(&self, x: u32, y: u32) {
match self {
&Window::X(ref w) => w.set_inner_size(x, y),
&Window::Wayland(ref w) => w.set_inner_size(x, y)
}
}
pub fn create_window_proxy(&self) -> WindowProxy {
match self {
&Window::X(ref w) => WindowProxy::X(w.create_window_proxy()),
&Window::Wayland(ref w) => WindowProxy::Wayland(w.create_window_proxy())
}
}
pub fn poll_events(&self) -> PollEventsIterator {
match self {
&Window::X(ref w) => PollEventsIterator::X(w.poll_events()),
&Window::Wayland(ref w) => PollEventsIterator::Wayland(w.poll_events())
}
}
pub fn wait_events(&self) -> WaitEventsIterator {
match self {
&Window::X(ref w) => WaitEventsIterator::X(w.wait_events()),
&Window::Wayland(ref w) => WaitEventsIterator::Wayland(w.wait_events())
}
}
pub fn set_window_resize_callback(&mut self, callback: Option<fn(u32, u32)>) {
match self {
&mut Window::X(ref mut w) => w.set_window_resize_callback(callback),
&mut Window::Wayland(ref mut w) => w.set_window_resize_callback(callback)
}
}
pub fn set_cursor(&self, cursor: MouseCursor) {
match self {
&Window::X(ref w) => w.set_cursor(cursor),
&Window::Wayland(ref w) => w.set_cursor(cursor)
}
}
pub fn set_cursor_state(&self, state: CursorState) -> Result<(), String> {
match self {
&Window::X(ref w) => w.set_cursor_state(state),
&Window::Wayland(ref w) => w.set_cursor_state(state)
}
}
pub fn hidpi_factor(&self) -> f32 {
match self {
&Window::X(ref w) => w.hidpi_factor(),
&Window::Wayland(ref w) => w.hidpi_factor()
}
}
pub fn set_cursor_position(&self, x: i32, y: i32) -> Result<(), ()> {
match self {
&Window::X(ref w) => w.set_cursor_position(x, y),
&Window::Wayland(ref w) => w.set_cursor_position(x, y)
}
}
pub fn platform_display(&self) -> *mut libc::c_void {
match self {
&Window::X(ref w) => w.platform_display(),
&Window::Wayland(ref w) => w.platform_display()
}
}
pub fn platform_window(&self) -> *mut libc::c_void {<|fim▁hole|> match self {
&Window::X(ref w) => w.platform_window(),
&Window::Wayland(ref w) => w.platform_window()
}
}
}
impl GlContext for Window {
unsafe fn make_current(&self) -> Result<(), ContextError> {
match self {
&Window::X(ref w) => w.make_current(),
&Window::Wayland(ref w) => w.make_current()
}
}
fn is_current(&self) -> bool {
match self {
&Window::X(ref w) => w.is_current(),
&Window::Wayland(ref w) => w.is_current()
}
}
fn get_proc_address(&self, addr: &str) -> *const libc::c_void {
match self {
&Window::X(ref w) => w.get_proc_address(addr),
&Window::Wayland(ref w) => w.get_proc_address(addr)
}
}
fn swap_buffers(&self) -> Result<(), ContextError> {
match self {
&Window::X(ref w) => w.swap_buffers(),
&Window::Wayland(ref w) => w.swap_buffers()
}
}
fn get_api(&self) -> ::Api {
match self {
&Window::X(ref w) => w.get_api(),
&Window::Wayland(ref w) => w.get_api()
}
}
fn get_pixel_format(&self) -> PixelFormat {
match self {
&Window::X(ref w) => w.get_pixel_format(),
&Window::Wayland(ref w) => w.get_pixel_format()
}
}
}<|fim▁end|>
| |
<|file_name|>textbox.js<|end_file_name|><|fim▁begin|>/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*
*/
// #ifdef __AMLTEXTBOX || __AMLSECRET || __AMLTEXTAREA || __AMLINPUT || __INC_ALL
//@todo DOCUMENT the modules too
/**
* Element displaying a rectangular area wich allows a
* user to type information. The information typed can be
* restricted by using this.$masking. The information can also
* be hidden from view when used in password mode. By adding an
* {@link element.autocomplete autocomplete element} as a child the
* value for the textbox can be looked up as you type. By setting the
* {@link element.textbox.attribute.mask mask atribute}, complex data input
* validation is done while the users types.
*
* @constructor
* @define input, secret, textarea, textbox
* @allowchild autocomplete, {smartbinding}<|fim▁hole|> * @inherits apf.XForms
*
* @author Ruben Daniels (ruben AT ajax DOT org)
* @version %I%, %G%
* @since 0.1
*
* @binding value Determines the way the value for the element is retrieved
* from the bound data.
* Example:
* Sets the value based on data loaded into this component.
* <code>
* <a:model id="mdlTextbox">
* <data name="Lukasz"></data>
* </a:model>
* <a:textbox model="mdlTextbox" value="[@name]" />
* </code>
* Example:
* A shorter way to write this is:
* <code>
* <a:model id="mdlTextbox">
* <data name="Lukasz"></data>
* </a:model>
* <a:textbox value="[mdlTextbox::@name]" />
* </code>
*
* @event click Fires when the user presses a mousebutton while over this element and then let's the mousebutton go.
* @event mouseup Fires when the user lets go of a mousebutton while over this element.
* @event mousedown Fires when the user presses a mousebutton while over this element.
* @event keyup Fires when the user lets go of a keyboard button while this element is focussed.
* object:
* {Number} keyCode which key was pressed. This is an ascii number.
* @event clear Fires when the content of this element is cleared.
*/
apf.input = function(struct, tagName){
this.$init(tagName || "input", apf.NODE_VISIBLE, struct);
};
apf.secret = function(struct, tagName){
this.$init(tagName || "secret", apf.NODE_VISIBLE, struct);
};
apf.password = function(struct, tagName){
this.$init(tagName || "password", apf.NODE_VISIBLE, struct);
};
apf.textarea = function(struct, tagName){
this.$init(tagName || "textarea", apf.NODE_VISIBLE, struct);
this.multiline = true;
};
// HTML5 email element
apf.email = function(struct, tagName){
this.$init(tagName || "email", apf.NODE_VISIBLE, struct);
};
apf.textbox = function(struct, tagName){
this.$init(tagName || "textbox", apf.NODE_VISIBLE, struct);
};
(function(){
this.implement(
//#ifdef __WITH_DATAACTION
apf.DataAction
//#endif
//#ifdef __WITH_XFORMS
//,apf.XForms
//#endif
);
this.$focussable = true; // This object can get the focus
this.$masking = false;
this.$autoComplete = false;
this.$childProperty = "value";
//this.realtime = false;
this.value = "";
this.$isTextInput = true;
this.multiline = false;
/**
* @attribute {Boolean} realtime whether the value of the bound data is
* updated as the user types it, or only when this element looses focus or
* the user presses enter.
*/
this.$booleanProperties["focusselect"] = true;
this.$booleanProperties["realtime"] = true;
this.$supportedProperties.push("value", "mask", "initial-message",
"focusselect", "realtime", "type");
/**
* @attribute {String} value the text of this element
* @todo apf3.0 check use of this.$propHandlers["value"].call
*/
this.$propHandlers["value"] = function(value, prop, force, initial){
if (!this.$input || !initial && this.getValue() == value)
return;
// Set Value
if (!initial && !value && !this.hasFocus()) //@todo apf3.x research the use of clear
return this.$clear();
else if (this.isHTMLBox) {
if (this.$input.innerHTML != value)
this.$input.innerHTML = value;
}
else if (this.$input.value != value)
this.$input.value = value;
if (!initial)
apf.setStyleClass(this.$ext, "", [this.$baseCSSname + "Initial"]);
if (this.$button)
this.$button.style.display = value && !initial ? "block" : "none";
};
//See validation
//var oldPropHandler = this.$propHandlers["maxlength"];
this.addEventListener("prop.maxlength", function(e){
//Special validation support using nativate max-length browser support
if (this.$input.tagName.toLowerCase().match(/input|textarea/))
this.$input.maxLength = parseInt(e.value) || null;
});
this.addEventListener("prop.editable", function(e){
if (apf.isIE)
this.$input.unselectable = e.value ? "On" : "Off";
else {
if (e.value)
apf.addListener(this.$input, "mousedown", apf.preventDefault);
else
apf.removeListener(this.$input, "mousedown", apf.preventDefault);
}
});
/**
* @attribute {String} mask a complex input pattern that the user should
* adhere to. This is a string which is a combination of special and normal
* characters. Then comma seperated it has two options. The first option
* specifies whether the non input characters (the chars not typed by the
* user) are in the value of this element. The second option specifies the
* character that is displayed when the user hasn't yet filled in a
* character.
* Characters:
* 0 Any digit
* 1 The number 1 or 2.
* 9 Any digit or a space.
* # User can enter a digit, space, plus or minus sign.
* L Any alpha character, case insensitive.
* ? Any alpha character, case insensitive or space.
* A Any alphanumeric character.
* a Any alphanumeric character or space.
* X Hexadecimal character, case insensitive.
* x Hexadecimal character, case insensitive or space.
* & Any whitespace.
* C Any character.
* ! Causes the input mask to fill from left to right instead of from right to left.
* ' The start or end of a literal part.
* " The start or end of a literal part.
* > Converts all characters that follow to uppercase.
* < Converts all characters that follow to lowercase.
* \ Cancel the special meaning of a character.
* Example:
* An american style phone number.
* <code>
* <a:textbox mask="(000)0000-0000;;_" />
* </code>
* Example:
* A dutch postal code
* <code>
* <a:textbox mask="0000 AA;;_" />
* </code>
* Example:
* A date
* <code>
* <a:textbox mask="00-00-0000;;_" datatype="xsd:date" />
* </code>
* Example:
* A serial number
* <code>
* <a:textbox mask="'WCS74'0000-00000;1;_" />
* </code>
* Example:
* A MAC address
* <code>
* <a:textbox mask="XX-XX-XX-XX-XX-XX;;_" />
* </code>
*/
this.$propHandlers["mask"] = function(value){
if (this.mask.toLowerCase() == "password")// || !apf.hasMsRangeObject)
return;
if (!value) {
throw new Error("Not Implemented");
}
if (!this.$masking) {
this.$masking = true;
this.implement(apf.textbox.masking);
this.focusselect = false;
//this.realtime = false;
}
this.setMask(this.mask);
};
//this.$propHandlers["ref"] = function(value) {
// this.$input.setAttribute("name", value.split("/").pop().split("::").pop()
// .replace(/[\@\.\(\)]*/g, ""));
//};
/**
* @attribute {String} initial-message the message displayed by this element
* when it doesn't have a value set. This property is inherited from parent
* nodes. When none is found it is looked for on the appsettings element.
*/
this.$propHandlers["initial-message"] = function(value){
if (value) {
//#ifdef __WITH_WINDOW_FOCUS
if (apf.hasFocusBug)
this.$input.onblur();
//#endif
//this.$propHandlers["value"].call(this, value, null, true);
}
if (!this.value)
this.$clear(true);
if (this.type == "password" && this.$inputInitFix) {
this.$inputInitFix.innerHTML = value;
apf.setStyleClass(this.$inputInitFix, "initFxEnabled");
}
};
/**
* @attribute {Boolean} focusselect whether the text in this element is
* selected when this element receives focus.
*/
this.$propHandlers["focusselect"] = function(value){
var _self = this;
this.$input.onmousedown = function(){
_self.focusselect = false;
};
this.$input.onmouseup =
this.$input.onmouseout = function(){
_self.focusselect = value;
};
};
/**
* @attribute {String} type the type or function this element represents.
* This can be any arbitrary name. Although there are some special values.
* Possible values:
* username this element is used to type in the name part of login credentials.
* password this element is used to type in the password part of login credentials.
*/
this.$propHandlers["type"] = function(value){
if (value && "password|username".indexOf(value) > -1
&& typeof this.focusselect == "undefined") {
this.focusselect = true;
this.$propHandlers["focusselect"].call(this, true);
}
};
this.$isTextInput = function(e){
return true;
};
/**** Public Methods ****/
//#ifdef __WITH_CONVENIENCE_API
/**
* Sets the value of this element. This should be one of the values
* specified in the values attribute.
* @param {String} value the new value of this element
*/
this.setValue = function(value){
return this.setProperty("value", value, false, true);
};
this.clear = function(){
this.setProperty("value", "");
}
//@todo cleanup and put initial-message behaviour in one location
this.$clear = function(noEvent){
if (this["initial-message"]) {
apf.setStyleClass(this.$ext, this.$baseCSSname + "Initial");
this.$propHandlers["value"].call(this, this["initial-message"], null, null, true);
}
else {
this.$propHandlers["value"].call(this, "", null, null, true);
}
if (!noEvent)
this.dispatchEvent("clear");//@todo this should work via value change
}
/**
* Returns the current value of this element.
* @return {String}
*/
this.getValue = function(){
var v = this.isHTMLBox ? this.$input.innerHTML : this.$input.value;
return v == this["initial-message"] ? "" : v.replace(/\r/g, "");
};
//#endif
/**
* Selects the text in this element.
*/
this.select = function(){
try {
this.$input.select();
}
catch(e){}
};
/**
* Deselects the text in this element.
*/
this.deselect = function(){this.$input.deselect();};
/**** Private Methods *****/
this.$enable = function(){this.$input.disabled = false;};
this.$disable = function(){this.$input.disabled = true;};
this.$insertData = function(str){
return this.setValue(str);
};
/**
* @private
*/
this.insert = function(text){
if (apf.hasMsRangeObject) {
try {
this.$input.focus();
}
catch(e) {}
var range = document.selection.createRange();
if (this.oninsert)
text = this.oninsert(text);
range.pasteHTML(text);
range.collapse(true);
range.select();
}
else {
this.$input.value += text;
}
};
this.addEventListener("$clear", function(){
this.value = "";//@todo what about property binding?
if (this["initial-message"] && apf.document.activeElement != this) {
this.$propHandlers["value"].call(this, this["initial-message"], null, null, true);
apf.setStyleClass(this.$ext, this.$baseCSSname + "Initial");
}
else {
this.$propHandlers["value"].call(this, "");
}
if (!this.$input.tagName.toLowerCase().match(/input|textarea/i)) {
if (apf.hasMsRangeObject) {
try {
var range = document.selection.createRange();
range.moveStart("sentence", -1);
//range.text = "";
range.select();
}
catch(e) {}
}
}
this.dispatchEvent("clear"); //@todo apf3.0
});
this.$keyHandler = function(key, ctrlKey, shiftKey, altKey, e){
if (this.$button && key == 27) {
//this.$clear();
if (this.value) {
this.change("");
e.stopPropagation();
}
//this.focus({mouse:true});
}
/*if (this.dispatchEvent("keydown", {
keyCode : key,
ctrlKey : ctrlKey,
shiftKey : shiftKey,
altKey : altKey,
htmlEvent : e}) === false)
return false;
// @todo: revisit this IF statement - dead code?
if (false && apf.isIE && (key == 86 && ctrlKey || key == 45 && shiftKey)) {
var text = window.clipboardData.getData("Text");
if ((text = this.dispatchEvent("keydown", {
text : this.onpaste(text)}) === false))
return false;
if (!text)
text = window.clipboardData.getData("Text");
this.$input.focus();
var range = document.selection.createRange();
range.text = "";
range.collapse();
range.pasteHTML(text.replace(/\n/g, "<br />").replace(/\t/g, " "));
return false;
}*/
};
this.$registerElement = function(oNode) {
if (!oNode) return;
if (oNode.localName == "autocomplete")
this.$autoComplete = oNode;
};
var fTimer;
this.$focus = function(e){
if (!this.$ext || this.$ext.disabled)
return;
this.$setStyleClass(this.$ext, this.$baseCSSname + "Focus");
if (this["initial-message"] && this.$input.value == this["initial-message"]) {
this.$propHandlers["value"].call(this, "", null, null, true);
apf.setStyleClass(this.$ext, "", [this.$baseCSSname + "Initial"]);
}
var _self = this;
function delay(){
try {
if (!fTimer || document.activeElement != _self.$input) {
_self.$input.focus();
}
else {
clearInterval(fTimer);
return;
}
}
catch(e) {}
if (_self.$masking)
_self.setPosition();
if (_self.focusselect)
_self.select();
};
if ((!e || e.mouse) && apf.isIE) {
clearInterval(fTimer);
fTimer = setInterval(delay, 1);
}
else
delay();
};
this.$blur = function(e){
if (!this.$ext)
return;
if (!this.realtime)
this.change(this.getValue());
this.$setStyleClass(this.$ext, "", [this.$baseCSSname + "Focus", "capsLock"]);
if (this["initial-message"] && this.$input.value == "") {
this.$propHandlers["value"].call(this, this["initial-message"], null, null, true);
apf.setStyleClass(this.$ext, this.$baseCSSname + "Initial");
}
/*if (apf.hasMsRangeObject) {
var r = this.$input.createTextRange();
r.collapse();
r.select();
}*/
try {
if (apf.isIE || !e || e.srcElement != apf.window)
this.$input.blur();
}
catch(e) {}
// check if we clicked on the oContainer. ifso dont hide it
if (this.oContainer) {
$setTimeout("var o = apf.lookup(" + this.$uniqueId + ");\
o.oContainer.style.display = 'none'", 100);
}
clearInterval(fTimer);
};
/**** Init ****/
this.$draw = function(){
var _self = this,
typedBefore = false;
//#ifdef __AMLCODEEDITOR
if (this.localName == "codeeditor") {
this.skin = "textarea";
this.$loadSkin();
}
//#endif
//Build Main Skin
this.$ext = this.$getExternal(null, null, function(oExt){
var mask = this.getAttribute("mask");
if ((typeof mask == "string" && mask.toLowerCase() == "password")
|| "secret|password".indexOf(this.localName) > -1) {
this.type = "password";
this.$getLayoutNode("main", "input").setAttribute("type", "password");
}
//#ifdef __WITH_HTML5
else if (this.localName == "email") {
this.datatype = (this.prefix ? this.prefix + ":" : "") + "email";
this.$propHandlers["datatype"].call(this, this.datatype, "datatype");
}
else if (this.localName == "url") {
this.datatype = (this.prefix ? this.prefix + ":" : "") + "url";
this.$propHandlers["datatype"].call(this, this.datatype, "datatype");
}
//#endif
oExt.setAttribute("onmousedown", "if (!this.host.disabled) \
this.host.dispatchEvent('mousedown', {htmlEvent : event});");
oExt.setAttribute("onmouseup", "if (!this.host.disabled) \
this.host.dispatchEvent('mouseup', {htmlEvent : event});");
oExt.setAttribute("onclick", "if (!this.host.disabled) \
this.host.dispatchEvent('click', {htmlEvent : event});");
});
this.$input = this.$getLayoutNode("main", "input", this.$ext);
this.$button = this.$getLayoutNode("main", "button", this.$ext);
this.$inputInitFix = this.$getLayoutNode("main", "initialfix", this.$ext);
if (this.type == "password")
this.$propHandlers["type"].call(this, "password");
if (!apf.hasContentEditable && "input|textarea".indexOf(this.$input.tagName.toLowerCase()) == -1) {
var node = this.$input;
this.$input = node.parentNode.insertBefore(document.createElement("textarea"), node);
node.parentNode.removeChild(node);
this.$input.className = node.className;
if (this.$ext == node)
this.$ext = this.$input;
}
if (this.$button) {
this.$button.onmousedown = function(){
_self.$clear(); //@todo why are both needed for doc filter
_self.change(""); //@todo only this one should be needed
_self.focus({mouse:true});
}
}
//@todo for skin switching this should be removed
if (this.$input.tagName.toLowerCase() == "textarea") {
this.addEventListener("focus", function(e){
//if (this.multiline != "optional")
//e.returnValue = false
});
}
this.$input.onselectstart = function(e){
if (!e) e = event;
e.cancelBubble = true;
}
this.$input.host = this;
this.$input.onkeydown = function(e){
e = e || window.event;
if (this.host.disabled) {
e.returnValue = false;
return false;
}
//Change
if (!_self.realtime) {
var value = _self.getValue();
if (e.keyCode == 13 && value != _self.value)
_self.change(value);
}
else if (apf.isWebkit && _self.xmlRoot && _self.getValue() != _self.value) //safari issue (only old??)
$setTimeout("var o = apf.lookup(" + _self.$uniqueId + ");\
o.change(o.getValue())");
if (_self.multiline == "optional" && e.keyCode == 13 && !e.shiftKey
|| e.ctrlKey && (e.keyCode == 66 || e.keyCode == 73
|| e.keyCode == 85)) {
e.returnValue = false;
return false;
}
if (typedBefore && this.getAttribute("type") == "password" && this.value != "") {
var hasClass = (_self.$ext.className.indexOf("capsLock") > -1),
capsKey = (e.keyCode === 20);
if (capsKey) // caps off
apf.setStyleClass(_self.$ext, hasClass ? null : "capsLock", hasClass ? ["capsLock"] : null);
}
//Autocomplete
if (_self.$autoComplete || _self.oContainer) {
var keyCode = e.keyCode;
$setTimeout(function(){
if (_self.$autoComplete)
_self.$autoComplete.fillAutocomplete(keyCode);
else
_self.fillAutocomplete(keyCode);
});
}
//Non this.$masking
if (!_self.mask) {
return _self.$keyHandler(e.keyCode, e.ctrlKey,
e.shiftKey, e.altKey, e);
}
};
this.$input.onkeyup = function(e){
if (!e)
e = event;
if (this.host.disabled)
return false;
var keyCode = e.keyCode;
if (_self.$button)
_self.$button.style.display = this.value ? "block" : "none";
if (_self.realtime) {
$setTimeout(function(){
var v;
if (!_self.mask && (v = _self.getValue()) != _self.value)
_self.change(v);
_self.dispatchEvent("keyup", {keyCode : keyCode});//@todo
});
}
else {
_self.dispatchEvent("keyup", {keyCode : keyCode});//@todo
}
//#ifdef __WITH_VALIDATION
if (_self.isValid && _self.isValid() && e.keyCode != 13 && e.keyCode != 17)
_self.clearError();
//#endif
};
//#ifdef __WITH_WINDOW_FOCUS
if (apf.hasFocusBug)
apf.sanitizeTextbox(this.$input);
//#endif
if (apf.hasAutocompleteXulBug)
this.$input.setAttribute("autocomplete", "off");
if ("INPUT|TEXTAREA".indexOf(this.$input.tagName) == -1) {
this.isHTMLBox = true;
this.$input.unselectable = "Off";
this.$input.contentEditable = true;
this.$input.style.width = "1px";
this.$input.select = function(){
var r = document.selection.createRange();
r.moveToElementText(this);
r.select();
}
};
this.$input.deselect = function(){
if (!document.selection) return;
var r = document.selection.createRange();
r.collapse();
r.select();
};
var f;
apf.addListener(this.$input, "keypress", f = function(e) {
if (_self.$input.getAttribute("type") != "password")
return apf.removeListener(_self.$input, "keypress", f);
e = e || window.event;
// get key pressed
var which = -1;
if (e.which)
which = e.which;
else if (e.keyCode)
which = e.keyCode;
// get shift status
var shift_status = false;
if (e.shiftKey)
shift_status = e.shiftKey;
else if (e.modifiers)
shift_status = !!(e.modifiers & 4);
if (((which >= 65 && which <= 90) && !shift_status) ||
((which >= 97 && which <= 122) && shift_status)) {
// uppercase, no shift key
apf.setStyleClass(_self.$ext, "capsLock");
}
else {
apf.setStyleClass(_self.$ext, null, ["capsLock"]);
}
typedBefore = true;
});
};
this.$loadAml = function() {
if (typeof this["initial-message"] == "undefined")
this.$setInheritedAttribute("initial-message");
if (typeof this.realtime == "undefined")
this.$setInheritedAttribute("realtime");
}
this.addEventListener("DOMNodeRemovedFromDocument", function(){
if (this.$button)
this.$button.onmousedown = null;
if (this.$input) {
this.$input.onkeypress =
this.$input.onmouseup =
this.$input.onmouseout =
this.$input.onmousedown =
this.$input.onkeydown =
this.$input.onkeyup =
this.$input.onselectstart = null;
}
});
// #ifdef __WITH_DATABINDING
}).call(apf.textbox.prototype = new apf.StandardBinding());
/* #else
}).call(apf.textbox.prototype = new apf.Presentation());
#endif*/
apf.config.$inheritProperties["initial-message"] = 1;
apf.config.$inheritProperties["realtime"] = 1;
apf.input.prototype =
apf.secret.prototype =
apf.password.prototype =
apf.textarea.prototype =
apf.email.prototype = apf.textbox.prototype;
apf.aml.setElement("input", apf.input);
apf.aml.setElement("secret", apf.secret);
apf.aml.setElement("password", apf.password);
apf.aml.setElement("textarea", apf.textarea);
apf.aml.setElement("textbox", apf.textbox);
// #endif<|fim▁end|>
|
* @addnode elements
*
* @inherits apf.StandardBinding
|
<|file_name|>raytracer_test.go<|end_file_name|><|fim▁begin|>//Copyright (c) 2014 Michael Heier 8311689, Patrick Dahlke 2458357
package main
import (
"de/vorlesung/projekt/raytracer/Helper"
scene "de/vorlesung/projekt/raytracer/Raytracing"
objects "de/vorlesung/projekt/raytracer/SceneObjects"
"image/color/palette"
"log"
"os"
"path"
"runtime"
"testing"
"time"
)
func BenchmarkRaytracer(be *testing.B) {
for i := 0; i < 5; i++ {
numcpu := runtime.NumCPU()
runtime.GOMAXPROCS(numcpu)
h := new(Helper.Helper)
width := 640
height := 480
filename := path.Join(os.TempDir(), "out_"+time.Now().Format("20060102150405")+".png")
log.Println("Start rendering: ", filename)
sphere1 := objects.NewSphere(objects.NewVector(0.0, 0.0, 1.0), 1.0)
sphere2 := objects.NewSphere(objects.NewVector(float64(-i)-2.5, 0.0, -0.75), 1.0+0.2*float64(i))
plane := objects.NewPlane(objects.NewVector(0.0, -1.0, 0.0), objects.NewVector(0.0, 1.0, 0.0))
r1, g1, b1, _ := palette.Plan9[i*2].RGBA()
r2, g2, b2, _ := palette.Plan9[20-i*2-1].RGBA()
color1 := objects.NewVector(float64(r1)/255, float64(g1)/255, float64(b1)/255)
color2 := objects.NewVector(float64(r2)/255, float64(g2)/255, float64(b2)/255)
grid1 := scene.NewGrid(objects.NewVector(2.0, 1.00, -1.0), objects.NewVector(2.0, -0.50, 1.0))
ball1 := scene.NewBall(sphere1, color1, 0.9, 4.0, 30.0, 0.125)
ball2 := scene.NewBall(sphere2, color2, 0.9, 4.0, 30.0, 0.125)
surface1 := scene.NewSurface(plane, objects.NewVector(1.0, 1.0, 1.0), 1.0, 1.0, 8.0, 0.05)
light1 := scene.NewLight(objects.NewVector(1.0, 4.0, 0.5), objects.NewVector(1.0, 1.0, 1.0))
colorSky := objects.NewVector(0.85, 0.85, 0.95)
currentScene := scene.NewScene(objects.NewVector(4.0, 0.5, 0.0), grid1)
currentScene.AddElement(scene.SceneObject(ball1))
currentScene.AddElement(scene.SceneObject(ball2))<|fim▁hole|> currentScene.AddElement(scene.SceneObject(surface1))
currentScene.SetAmbient(objects.NewVector(0.25, 0.25, 0.3))
currentScene.SetLight(light1)
currentScene.SetSkyColor(colorSky)
//benchmark
t1 := time.Now()
//get better result with 3 or 4 instead of 1
//i := currentScene.Render(width, height, 5)
i := currentScene.Render(width, height, 3)
log.Println("Rendering time: ", time.Since(t1))
err := h.ImageWriter(filename, i)
if err != nil {
log.Println("Error in image write: ", err)
}
}
}<|fim▁end|>
| |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *<|fim▁hole|>class XapianCore(AutotoolsPackage):
"""Xapian is a highly adaptable toolkit which allows developers to easily
add advanced indexing and search facilities to their own applications.
It supports the Probabilistic Information Retrieval model and also
supports a rich set of boolean query operators."""
homepage = "https://xapian.org"
url = "http://oligarchy.co.uk/xapian/1.4.3/xapian-core-1.4.3.tar.xz"
version('1.4.3', '143f72693219f7fc5913815ed858f295')
depends_on('zlib')<|fim▁end|>
| |
<|file_name|>topology_util.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2013 cloudysunny14.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from ryu.exception import RyuException
def find_all_paths(graph, start, end, path=[]):
path = path + [start]
if start == end:
return [path]
if not graph.has_key(start):
return []
paths = []
for node in graph[start]:
if node not in path:
newpaths = find_all_paths(graph, node, end, path)
for newpath in newpaths:
paths.append(newpath)
paths.sort(key = len)
return paths
class LinkedPorts(object):
def __init__(self):
self.link = {}
def addLink(self, link):
link_roots = self.link.get(link.src.dpid, [])
link_roots.append(link)
self.link[link.src.dpid] = link_roots
def getLink(self, src_dpid, dst_dpid):
link_roots = self.link[src_dpid]
for link in link_roots:
if link.dst.dpid == dst_dpid:
return link
return None
class PathList(object):
class IllegalLink(RyuException):
message = '%(msg)s'
def __init__(self, link_list):
self.link_list = link_list<|fim▁hole|> def _createGraph(self, link_list):
graph = {}
for link in link_list:
self.linked_ports.addLink(link)
src_dpid = link.src.dpid
dst_dpid = link.dst.dpid
linked_nodes = graph.get(src_dpid, [])
linked_nodes.append(dst_dpid)
graph[src_dpid] = linked_nodes
return graph
def createWholePath(self, src_dpid, dst_dpid):
graph = self._createGraph(self.link_list)
paths = find_all_paths(graph, src_dpid, dst_dpid)
path_ports = []
for path in paths:
ports = []
for index in range(len(path)-1):
link = self.linked_ports.getLink(path[index],
path[index+1])
if link is None:
raise PathList.IllegalLink(
msg='Illegal link found. Can\'t create paths %s' % link)
else:
ports.append(link.src)
ports.append(link.dst)
path_ports.append(ports)
return path_ports<|fim▁end|>
|
self.ports = {}
self.linked_ports = LinkedPorts()
|
<|file_name|>general.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def python_print(*values, sep=' '):
joined = sep.join((str(v) for v in values))
print(joined)
def python_list(*args):
return args
def error(text=''):
raise RuntimeError(text)<|fim▁end|>
|
def python_evaluate(text):
return eval(str(text))
|
<|file_name|>test_metric.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import numpy as np
import json
def check_metric(metric, *args, **kwargs):
metric = mx.metric.create(metric, *args, **kwargs)
str_metric = json.dumps(metric.get_config())
metric2 = mx.metric.create(str_metric)
assert metric.get_config() == metric2.get_config()
<|fim▁hole|> check_metric('f1')
check_metric('perplexity', -1)
check_metric('pearsonr')
check_metric('nll_loss')
composite = mx.metric.create(['acc', 'f1'])
check_metric(composite)
def test_nll_loss():
metric = mx.metric.create('nll_loss')
pred = mx.nd.array([[0.2, 0.3, 0.5], [0.6, 0.1, 0.3]])
label = mx.nd.array([2, 1])
metric.update([label], [pred])
_, loss = metric.get()
expected_loss = 0.0
expected_loss = -(np.log(pred[0][2].asscalar()) + np.log(pred[1][1].asscalar())) / 2
assert loss == expected_loss
if __name__ == '__main__':
import nose
nose.runmodule()<|fim▁end|>
|
def test_metrics():
check_metric('acc', axis=0)
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>def test1():
SINK(SOURCE)
def test2():
s = SOURCE
SINK(s)
def source():
return SOURCE
def sink(arg):
SINK(arg)
def test3():
t = source()
SINK(t)
def test4():
t = SOURCE
sink(t)
def test5():
t = source()
sink(t)
def test6(cond):
if cond:
t = "Safe"
else:
t = SOURCE
if cond:
SINK(t)
def test7(cond):
if cond:
t = SOURCE
else:
t = "Safe"
if cond:
SINK(t)
def source2(arg):
return source(arg)
def sink2(arg):
sink(arg)
def sink3(cond, arg):
if cond:
sink(arg)
def test8(cond):
t = source2()
sink2(t)
#False positive
def test9(cond):
if cond:
t = "Safe"
else:
t = SOURCE
sink3(cond, t)
def test10(cond):
if cond:
t = SOURCE
else:
t = "Safe"
sink3(cond, t)
def hub(arg):
return arg
def test11():
t = SOURCE
t = hub(t)
SINK(t)
def test12():
t = "safe"
t = hub(t)
SINK(t)
import module
def test13():
t = module.dangerous
SINK(t)
def test14():
t = module.safe
SINK(t)
def test15():
t = module.safe2
SINK(t)
def test16():
t = module.dangerous_func()
SINK(t)
def test20(cond):
if cond:
t = CUSTOM_SOURCE
else:
t = SOURCE
if cond:
CUSTOM_SINK(t)
else:
SINK(t)
def test21(cond):
if cond:
t = CUSTOM_SOURCE
else:
t = SOURCE
if not cond:
CUSTOM_SINK(t)
else:
SINK(t)
def test22(cond):
if cond:
t = CUSTOM_SOURCE
else:
t = SOURCE
t = TAINT_FROM_ARG(t)
if cond:
CUSTOM_SINK(t)
else:
SINK(t)
from module import dangerous as unsafe
SINK(unsafe)
def test23():
with SOURCE as t:
SINK(t)
def test24():
s = SOURCE<|fim▁hole|>
def test_update_extend(x, y):
l = [SOURCE]
d = {"key" : SOURCE}
x.extend(l)
y.update(d)
SINK(x[0])
SINK(y["key"])
l2 = list(l)
d2 = dict(d)
def test_truth():
t = SOURCE
if t:
SINK(t)
else:
SINK(t)
if not t:
SINK(t)
else:
SINK(t)<|fim▁end|>
|
SANITIZE(s)
SINK(s)
|
<|file_name|>issue_9123.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|> fn x() {
fn f() { }
f();
}
fn dummy(&self) { }
}<|fim▁end|>
|
#![crate_type = "lib"]
pub trait X {
|
<|file_name|>resourcetool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (C) 2017
# ASTRON (Netherlands Institute for Radio Astronomy)
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.<|fim▁hole|># The LOFAR software suite is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
#
# $Id$
"""
resourcetool.py
Simple utility to list or update RADB resource availability values.
Essentially a tool around RADB getResources(), updateResourceAvailability(), getResourceClaims() and (parts of) updateResourceClaims().
Can also figure out available capacity for a mounted storage resource and update it in the RADB (-U/--update-available-storage-capacity option).
Can also update storage claim endtime to its task endtime (if ended) in the RADB (-E/--end-past-tasks-storage-claims option).
Examples (source lofarinit.sh to set LOFARROOT, PYTHONPATH, ...):
- Update available (local) storage capacity and set storage claim endtimes to task endtimes (if ended) for an observation storage node, e.g. via cron in operations:
source /opt/lofar/lofarinit.sh; LOFARENV=PRODUCTION /opt/lofar/bin/resourcetool --broker=scu001.control.lofar --end-past-tasks-storage-claims --update-available-storage-capacity
- Show all DRAGNET resources on the test system RADB:
LOFARENV=TEST resourcetool --broker=scu199.control.lofar --resource-group-root=DRAGNET
- Deactivate 2 storage resources in operations, because disks from both storage areas are found to be faulty (then still need to re-schedule tasks):
LOFARENV=PRODUCTION resourcetool --broker=scu001.control.lofar drg01_storage:/data1=False drg01_storage:/data2=False
NOTES:
! Be careful what system (operations or test) this command applies to! This can be set using the env vars LOFARENV=TEST or LOFARENV=PRODUCTION
Operations vs Test (vs Development) can be seen from the default RADB_BUSNAME in the usage info: lofar.* vs test.lofar.* vs devel.lofar.*
! By default, listed or updateable resources are restricted to resources under the localhost's resource group.
This is on purpose to make -U work correctly. The -G/--resource-group-root option can be used to widen the resource group scope for listing
or explicit command-line updates, but non-default -G with -U is rejected: it is too easy to mass-update other resources with local filesystem info.
"""
import logging
from datetime import datetime, timedelta
from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
from lofar.common.util import humanreadablesize
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.WARN)
logger = logging.getLogger(__name__)
def printResources(resources, scaled_units=True):
""" E.g.: resources = [{u'total_capacity': 3774873600, u'name': u'dragproc_bandwidth:/data', u'type_id': 3,
u'available_capacity': 3774873600, u'type_name': u'bandwidth', u'unit_id': 3,
u'active': True, u'used_capacity': 0, u'id': 118, u'unit': u'bits/second',
'claimable_capacity': 3774873600}, ...] # this key was added (not from RADB); it can be negative!
"""
header = {'id': 'RId', 'name': 'Resource Name', 'active': 'Active',
'available_capacity': ' Avail. Capacity', 'claimable_capacity': ' Claimable Cap.',
'total_capacity': ' Total Capacity', 'unit': 'Unit'}
print(('{id:4s} {name:24s} {active:6s} {available_capacity} {claimable_capacity} {total_capacity} {unit}'.format(**header)))
print('===================================================================================================')
resources.sort(key=lambda r: r['id']) # SQL could have done this better
for res in resources:
res['active'] = 'True' if res['active'] else 'False' # to solve bool formatting issue
if scaled_units and (res['type_name'] == 'storage' or res['type_name'] == 'bandwidth'):
unit_base = 1024 if res['type_name'] == 'storage' else 1000 # check type_name instead of unit as in printClaims()
res['available_capacity'] = humanreadablesize(res['available_capacity'], '', unit_base)
res['claimable_capacity'] = humanreadablesize(res['claimable_capacity'], '', unit_base)
res['total_capacity'] = humanreadablesize(res['total_capacity'] , '', unit_base)
cap_conv = '>16s'
else:
cap_conv = '16d'
print((('{id:4d} {name:24s} {active:6s} {available_capacity:' + cap_conv +
'} {claimable_capacity:' + cap_conv + '} {total_capacity:' + cap_conv + '} {unit}').format(**res)))
if not resources:
print('<no resources>')
def printClaims(claims, scaled_units=True):
""" E.g.: claims = [{u'claim_size': 76441190400, u'endtime': datetime.datetime(2018, 6, 13, 17, 40),
u'id': 67420, u'resource_id': 122, u'resource_name': u'drg01_storage:/data1',
u'resource_type_id': 5, u'resource_type_name': u'storage',
u'starttime': datetime.datetime(2017, 6, 13, 17, 30),
u'status': u'claimed', u'status_id': 1, u'task_id': 75409, ...}, ...]
"""
header = {'id': 'ClId', 'resource_name': 'Resource Name', 'starttime': 'Start Time', 'endtime': 'End Time',
'claim_size': 'Claim Size', 'status': 'Status'}
print(('{id:7s} {resource_name:24s} {starttime:19s} {endtime:19s} {claim_size:16s} {status:8s}'.format(**header)))
print('===================================================================================================')
claims.sort(key=lambda c: c['id']) # secondary sorting key; SQL could have done this better
claims.sort(key=lambda c: c['starttime']) # primary sorting key (stable sort)
for claim in claims:
if scaled_units and (claim['resource_type_name'] == 'storage' or claim['resource_type_name'] == 'bandwidth'):
unit_base = 1024 if claim['resource_type_name'] == 'storage' else 1000 # no unit name here, so check type_name
claim['claim_size'] = humanreadablesize(claim['claim_size'], '', unit_base)
size_conv = '>16s'
else:
size_conv = '16d'
print((('{id:7d} {resource_name:24s} {starttime} {endtime} {claim_size:' + size_conv +
'} {status:8s}').format(**claim)))
if not claims:
print('<no claims on specified resources and time range>')
def updateStorageClaimsEndTime(radb, resources, storage_resource_type_id, lower_bound=None, upper_bound=None):
""" Update storage claims on resources in the RADB that currently apply, but the task
they belong to has ended (+ a short while). Set end time of these claims to task endtime.
This is intended for user clusters (e.g. DRAGNET) that do not auto-terminate storage claims on
cleanup. If users manage clean up autonomously, then they manage all storage accounting themselves.
"""
status = 0
resource_ids = [res['id'] for res in resources]
now = datetime.utcnow()
if lower_bound is None:
lower_bound = now
if upper_bound is None:
upper_bound = now
claims = radb.getResourceClaims(lower_bound=lower_bound, upper_bound=upper_bound,
resource_ids=resource_ids,
resource_type=storage_resource_type_id)
# Get associated tasks for their end times. Update claims for tasks that ended.
task_ids = list(set({claim['task_id'] for claim in claims}))
tasks = radb.getTasks(task_ids=task_ids)
for task in tasks:
# Wait until task ended. Do not race with OTDBtoRATaskStatusPropagator that extends storage claim endtime.
# We effectively undo that extension here. Intended for clusters (e.g. DRAGNET) where end users manage storage.
new_endtime = task['endtime']
if now < new_endtime + timedelta(minutes=1):
continue
claim_ids = [claim['id'] for claim in claims if claim['task_id'] == task['id'] and \
claim['endtime'] > new_endtime]
print(("Updating RADB storage claims {} endtime to {}".format(claim_ids, new_endtime)))
updated_dict = radb.updateResourceClaims(where_resource_claim_ids=claim_ids, endtime=new_endtime)
if not updated_dict['updated']:
logger.error('failed to update RADB storage claims') # why is N/A here; check the RA logs
status = 1
return status
def updateResource(radb, resource):
""" Update the RADB using the resource dict. """
print(("Updating RADB with resource {}".format(resource)))
updated_dict = radb.updateResourceAvailability(resource_id=resource['id'], active=resource['active'],
available_capacity=resource['available_capacity'],
total_capacity=resource['total_capacity'])
if not updated_dict['updated']:
logger.error('failed to update RADB resource') # why is N/A here; check the RA logs
return 1
return 0
def getMountPoint(resource_name):
""" E.g. with resource_name 'CEP4_storage:/data' or 'drg15_bandwidth:/data2' or 'CS002bw0',
this function returns: '/data' or '/data2' or None.
"""
sep_idx = resource_name.find(':/') # mount point must be an abs path
if sep_idx == -1:
return None
return resource_name[sep_idx + 1 : ]
def updateAvailableStorageCapacities(radb, resources):
import os
status = 0
for res in resources:
# All storage resource names are supposedly mount points.
# But do not update with the wrong partition info (sys maintenance?).
# Log error and let admin figure it out. RADB resource defaults may need updating too.
mount_pt = getMountPoint(res['name'])
if mount_pt is None or not os.path.ismount(mount_pt):
logger.error("skipped updating available capacity of resource '{}': its path is not a mount point on this system".format(res['name']))
status = 1
continue
# Retrieve avail capacity from filesystem and do some checks.
try:
st = os.statvfs(mount_pt)
except OSError as e:
logger.error('statvfs: ' + str(e))
status = 1
continue
avail_cap = st.f_bavail * st.f_frsize
total_cap = st.f_blocks * st.f_frsize
if total_cap != res['total_capacity']:
hint_arg = res['name'] + '=' + str(avail_cap) + ',' + str(total_cap)
logger.warn("total capacity for resource '{}' is {}, which is not equal to {} as listed in the RADB. If the total capacity has changed permanently, please update the RADB, e.g. by running this program passing: {} (and by updating the software repo for RADB reinstalls).".format(res['name'], total_cap, res['total_capacity'], hint_arg))
if avail_cap > res['total_capacity']:
logger.error("the detected available capacity for resource '{}' cannot be written to the RADB, because it is greater than the total capacity listed in the RADB.")
status = 1
continue
# Only update available capacity in the RADB.
# Total and active indicate a config change (or maintenance in progress). Leave that for an admin.
res_update = {'id': res['id'], 'available_capacity': avail_cap,
'total_capacity': None, 'active': None}
status |= updateResource(radb, res_update)
return status
def updateSpecifiedCapacities(radb, resources, resource_updates):
status = 0
for res_update in resource_updates:
# Need resource id from name to apply the update. Also check avail <= total.
try:
res = next((res for res in resources if res['name'] == res_update['name']))
except StopIteration:
logger.error("skipped updating resource '{}': name unknown. Correct the name or (correct the) use (of) the -G/--resource-group-root option to widen the resource scope, e.g. -G CEP4|DRAGNET|LOFAR".format(res_update['name']))
status = 1
continue
if res_update['available_capacity'] is not None and \
res_update['total_capacity'] is None and \
res_update['available_capacity'] > res['total_capacity']:
logger.error("skipped updating resource '{}': specified available capacity cannot be greater than total capacity listed in the RADB. If the total capacity has changed permanently, please update the RADB using this program (and by updating the software repo for RADB reinstalls)".format(res_update['name']))
status = 1
continue
res_update['id'] = res['id']
status |= updateResource(radb, res_update)
return status
def getResourceGroupIdByName(db_rgp2rgp, name):
""" Returns group id of resource group named name, or None if name was not found.
The search happens breadth-first.
"""
# find root group(s): empty parent list
gids = [gid for gid, group in list(db_rgp2rgp.items()) if not group['parent_ids']]
i = 0
while i < len(gids): # careful iterating while modifying
res_group = db_rgp2rgp[gids[i]]
if res_group['resource_group_name'] == name:
return gids[i]
gids.extend(res_group['child_ids'])
i += 1
return None
def getSubtreeResourceIdList(db_rgp2rgp, root_gid):
""" Returns list of resource ids in resource group root_gid and its (grand)children."""
# Search breadth-first starting at root_gid.
gids = [root_gid]
resource_id_list = []
i = 0
while i < len(gids): # careful iterating while modifying
res_group = db_rgp2rgp[gids[i]]
resource_id_list.extend(res_group['resource_ids'])
gids.extend(res_group['child_ids'])
i += 1
return resource_id_list
def parseResourceArg(arg):
""" Return dict parsed from arg str. Arg format: resource_name:/data=True,100,200
with any value optional after the '=' (but need at least one).
Any returned dict value but the resource name may be None.
On error ValueError is raised.
"""
eq_idx = arg.find('=')
if eq_idx == -1:
raise ValueError("could not find '=' in argument; need e.g. res_name=100 or resource_name=True,100,200")
resource_name = arg[ : eq_idx]
if not resource_name:
raise ValueError("invalid resource name in argument before '='; need e.g. res_name=100 or resource_name=True,100,200")
resource_val = arg[eq_idx + 1 : ]
vals = resource_val.split(',')
if not vals or len(vals) > 3:
raise ValueError("need 1-3 argument value(s) after '=', e.g. res_name=100 or resource_name=True,100,200")
active = None
avail_cap = None
total_cap = None
for val in vals:
if val == 'True' or val == 'False':
if active is not None:
raise ValueError("accepting at most 1 bool as resource active value in argument")
active = True if val == 'True' else False
continue
if total_cap is not None:
raise ValueError("accepting at most 2 ints as resource available and total capacities in argument")
v = int(val)
if v < 0:
raise ValueError("capacity value must be positive")
if avail_cap is None:
avail_cap = v
else:
if v < avail_cap:
raise ValueError("specified available capacity cannot be greater than specified total capacity")
total_cap = v
return {'name': resource_name, 'active': active,
'available_capacity': avail_cap, 'total_capacity': total_cap}
def parseTimestamps(datetime_fmt, timestamps):
""" Return list of None or datetime objects representing timestamps. Raise ValueError on parse error.
Use datetime_fmt as the strptime() format str. A timestamp value may also be 'now' (UTC) or 'None'.
"""
# Parsing datetime strings could be done by extending optparse's Option class, but this works well enough
rv = []
now = None
for ts in timestamps:
if ts is None or ts == 'now':
if now is None:
now = datetime.utcnow()
ts = now
elif ts == 'None':
ts = None
else:
ts = datetime.strptime(ts, datetime_fmt)
rv.append(ts)
return rv
def parseArgs(args):
from socket import gethostname
hostname = gethostname()
from optparse import OptionParser
usage = 'Usage: %prog [OPTIONS] [resource_name=available_capacity]... or [resource_name=True|False[,avail_cap[,total_cap]]]...'
descr = 'List or update LOFAR RADB resource availability and/or available/total capacities'
parser = OptionParser(usage=usage, description=descr)
# already supported options: -h, --help, --
parser.add_option('-q', '--broker', dest='broker', default=DEFAULT_BROKER,
help='qpid broker hostname (default: %default).')
parser.add_option('--busname', dest='busname', default=DEFAULT_BUSNAME,
help='Name of the bus for all messaging operations (default: %default)')
parser.add_option('-G', '--resource-group-root', dest='resource_group_root', default=hostname,
help='Only consider resources under resource group root (default: this hostname: \'%default\' (all=LOFAR))')
parser.add_option('-t', '--resource-type', dest='resource_type', default=None,
help='Only consider resources of this type (e.g. storage, bandwidth, rsp, rcu, ...)')
parser.add_option('-E', '--end-past-tasks-storage-claims', dest='end_storage_claims', action='store_true', default=False,
help='WARNING: USE THIS OPTION ONLY FOR DRAGNET!. Set end time to task stoptime for storage claims under --resource-group-root for completed tasks. Implies -t storage. Can be limited to timerange given by -T and -S.')
parser.add_option('-U', '--update-available-storage-capacity', dest='update_avail', action='store_true', default=False,
help='Update the available capacity value in the RADB of storage resources under --resource-group-root. Implies -t storage. Not affected by -T and -S.')
datetime_fmt = '%Y-%m-%d %H:%M:%S'
parser.add_option('-T', '--timestart', dest='timestart',
help='lower bound UTC timestamp \'{}\' or \'now\' or \'None\' for resource claims (default: now)'.format(datetime_fmt))
parser.add_option('-S', '--timestop', dest='timestop',
help='upper bound UTC timestamp \'{}\' or \'now\' or \'None\' for resource claims (default: now)'.format(datetime_fmt))
parser.add_option('--no-scaled-units', dest='no_scaled_units', action='store_true', default=False,
help='Print raw instead of scaled units for some sizes, e.g. 1048576 instead of 1M')
options, left_over_args = parser.parse_args(args)
if options.update_avail and options.resource_group_root != hostname:
parser.error("combining the option -U with a non-default -G is rejected: it is too easy to mass-update the wrong resources")
if options.end_storage_claims or options.update_avail:
if options.resource_type is None:
options.resource_type = 'storage'
elif options.resource_type != 'storage':
parser.error("the options -E or -U cannot be combined with -t {}, because -E and -U are about storage only".format(options.resource_type))
try:
timestamps = parseTimestamps(datetime_fmt, (options.timestart, options.timestop))
except ValueError as exc:
parser.error("timestamp arguments: " + str(exc))
options.timestart = timestamps[0]
options.timestop = timestamps[1]
if options.timestart is not None and options.timestop is not None and options.timestart > options.timestop:
parser.error("-T/--timestart option value may not be after -S/--timestop option value")
resource_updates = []
for i, arg in enumerate(left_over_args):
try:
resource_updates.append(parseResourceArg(arg))
except ValueError as exc:
parser.error("failed to parse non-option argument '{}': {}".format(i, exc))
return options, resource_updates, parser.print_help
def main(args):
import os
os.environ['TZ'] = 'UTC' # LOFAR observatory software uses UTC
options, resource_updates, print_help_func = parseArgs(args)
status = 0
radb = None
try:
radb = RADBRPC.create(exchange=options.busname, broker=options.broker)
db_resource_list = radb.getResources(resource_types=options.resource_type, include_availability=True)
if options.timestart is None:
options.timestart = datetime(1970, 1, 1)
if options.timestop is None:
options.timestop = datetime(2100, 1, 1)
# Filter resource list via resource root group option
db_resource_group_mships = radb.getResourceGroupMemberships()
db_rgp2rgp = db_resource_group_mships['groups'] # resource-group-to-resource-group relations
group_id = getResourceGroupIdByName(db_rgp2rgp, options.resource_group_root)
if group_id is None:
print_help_func()
print("")
logger.error("could not find resource group '{}'. You may want to (correct the) use (of) the -G/--resource-group-root option to widen the resource scope, e.g. -G CEP4|DRAGNET|LOFAR".format(options.resource_group_root))
return 1
resource_id_list = getSubtreeResourceIdList(db_rgp2rgp, group_id)
if not resource_id_list:
print_help_func()
print("")
logger.error("no resources found under resource group '{}' and its (grand)children".format(options.resource_group_root))
return 1
resources = [res for res in db_resource_list if res['id'] in resource_id_list] # SQL could have done this better
if options.end_storage_claims:
try:
storage_resource_type_id = next((res['type_id'] for res in resources))
except StopIteration:
print_help_func()
print("")
logger.error("-E/--end-past-tasks-storage-claims used, but no storage resources found under resource group '{}' and its (grand)children".format(options.resource_group_root))
return 1
status |= updateStorageClaimsEndTime(radb, resources, storage_resource_type_id, lower_bound=options.timestart, upper_bound=options.timestop)
if options.update_avail:
status |= updateAvailableStorageCapacities(radb, resources)
if resource_updates:
status |= updateSpecifiedCapacities(radb, resources, resource_updates)
# If no specific action requested, print list of resources and claims
if not options.end_storage_claims and not options.update_avail and not resource_updates:
resource_ids = [res['id'] for res in resources]
claims = radb.getResourceClaims(lower_bound=options.timestart, upper_bound=options.timestop,
resource_ids=resource_ids, extended=True)
# A small downside of querying RADB again is that the claimable capacities might be inconsistent with claims just retrieved.
# We could derive it ourselves or stick it in a transaction, but this is good enough for the overview.
for res in resources:
res['claimable_capacity'] = radb.get_resource_claimable_capacity(resource_id=res['id'],
lower_bound=options.timestart,
upper_bound=options.timestop)
printResources(resources, not options.no_scaled_units)
print("")
printClaims(claims, not options.no_scaled_units)
#except Exception: # disabled: prefer default stacktrace on bug here
finally:
if radb is not None:
radb.close()
return status
if __name__ == '__main__':
from sys import argv, exit
exit(main(argv[1:]))<|fim▁end|>
| |
<|file_name|>ArraySave.py<|end_file_name|><|fim▁begin|>#/*##########################################################################
# Copyright (C) 2004-2012 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
#############################################################################*/
__author__ = "V.A. Sole - ESRF Data Analysis"
import os
import numpy
import time
try:
from PyMca import EdfFile
from PyMca import TiffIO
except ImportError:
print("ArraySave.py is importing EdfFile and TiffIO from local directory")
import EdfFile
import TiffIO
HDF5 = True
try:
import h5py
except ImportError:
HDF5 = False
DEBUG = 0
def getDate():
localtime = time.localtime()
gtime = time.gmtime()
#year, month, day, hour, minute, second,\
# week_day, year_day, delta = time.localtime()
year = localtime[0]
month = localtime[1]
day = localtime[2]
hour = localtime[3]
minute = localtime[4]
second = localtime[5]
#get the difference against Greenwich
delta = hour - gtime[3]
return "%4d-%02d-%02dT%02d:%02d:%02d%+02d:00" % (year, month, day, hour,
minute, second, delta)
def save2DArrayListAsASCII(datalist, filename,
labels=None, csv=False, csvseparator=";"):
if type(datalist) != type([]):
datalist = [datalist]
r, c = datalist[0].shape
ndata = len(datalist)
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(len(datalist)):
labels.append("Array_%d" % i)
if len(labels) != len(datalist):
raise ValueError("Incorrect number of labels")
if csv:
header = '"row"%s"column"' % csvseparator
for label in labels:
header += '%s"%s"' % (csvseparator, label)
else:
header = "row column"
for label in labels:
header += " %s" % label
filehandle = open(filename, 'w+')
filehandle.write('%s\n' % header)
fileline = ""
if csv:
for row in range(r):
for col in range(c):
fileline += "%d" % row
fileline += "%s%d" % (csvseparator, col)
for i in range(ndata):
fileline += "%s%g" % (csvseparator, datalist[i][row, col])
fileline += "\n"
filehandle.write("%s" % fileline)
fileline = ""
else:
for row in range(r):
for col in range(c):
fileline += "%d" % row
fileline += " %d" % col
for i in range(ndata):
fileline += " %g" % datalist[i][row, col]
fileline += "\n"
filehandle.write("%s" % fileline)
fileline = ""
filehandle.write("\n")
filehandle.close()
def save2DArrayListAsEDF(datalist, filename, labels=None, dtype=None):
if type(datalist) != type([]):
datalist = [datalist]
ndata = len(datalist)
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
edfout = EdfFile.EdfFile(filename, access="ab")
for i in range(ndata):
if dtype is None:
edfout.WriteImage({'Title': labels[i]},
datalist[i], Append=1)
else:
edfout.WriteImage({'Title': labels[i]},
datalist[i].astype(dtype),
Append=1)
del edfout # force file close
def save2DArrayListAsMonochromaticTiff(datalist, filename,
labels=None, dtype=None):
if type(datalist) != type([]):
datalist = [datalist]
ndata = len(datalist)
if dtype is None:
dtype = datalist[0].dtype
for i in range(len(datalist)):
dtypeI = datalist[i].dtype
if dtypeI in [numpy.float32, numpy.float64] or\
dtypeI.str[-2] == 'f':
dtype = numpy.float32
break
elif dtypeI != dtype:
dtype = numpy.float32
break
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
outfileInstance = TiffIO.TiffIO(filename, mode="wb+")
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
data = datalist[i]
else:
data = datalist[i].astype(dtype)
outfileInstance.writeImage(data, info={'Title': labels[i]})
outfileInstance.close() # force file close
def openHDF5File(name, mode='a', **kwargs):
"""
Open an HDF5 file.
Valid modes (like Python's file() modes) are:
- r Readonly, file must exist
- r+ Read/write, file must exist
- w Create file, truncate if exists
- w- Create file, fail if exists
- a Read/write if exists, create otherwise (default)
sorted_with is a callable function like python's builtin sorted, or
None.
"""
h5file = h5py.File(name, mode, **kwargs)
if h5file.mode != 'r' and len(h5file) == 0:
if 'file_name' not in h5file.attrs:
attr = 'file_name'
txt = "%s" % name
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'file_time' not in h5file.attrs:
attr = 'file_time'
txt = "%s" % getDate()
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'HDF5_version' not in h5file.attrs:
attr = 'HDF5_version'
txt = "%s" % h5py.version.hdf5_version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'HDF5_API_version' not in h5file.attrs:
attr = 'HDF5_API_version'
txt = "%s" % h5py.version.api_version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'h5py_version' not in h5file.attrs:
attr = 'h5py_version'
txt = "%s" % h5py.version.version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'creator' not in h5file.attrs:
attr = 'creator'
txt = "%s" % 'PyMca'
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
#if 'format_version' not in self.attrs and len(h5file) == 0:
# h5file.attrs['format_version'] = __format_version__
return h5file
def getHDF5FileInstanceAndBuffer(filename, shape,
buffername="data",
dtype=numpy.float32,
interpretation=None,
compression=None):
if not HDF5:
raise IOError('h5py does not seem to be installed in your system')
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = openHDF5File(filename, 'a')
entryName = "data"
#entry
nxEntry = hdf.require_group(entryName)
if 'NX_class' not in nxEntry.attrs:
nxEntry.attrs['NX_class'] = 'NXentry'.encode('utf-8')
elif nxEntry.attrs['NX_class'] != 'NXentry'.encode('utf-8'):
#should I raise an error?
pass
nxEntry['title'] = "PyMca saved 3D Array".encode('utf-8')
nxEntry['start_time'] = getDate().encode('utf-8')
nxData = nxEntry.require_group('NXdata')
if 'NX_class' not in nxData.attrs:
nxData.attrs['NX_class'] = 'NXdata'.encode('utf-8')
elif nxData.attrs['NX_class'] == 'NXdata'.encode('utf-8'):
#should I raise an error?
pass
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
if chunk2 == 0:
chunk2 = shape[2]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
data = nxData.require_dataset(buffername,
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
#no chunking
if DEBUG:
print("Saving not compressed and not chunked dataset")
data = nxData.require_dataset(buffername,
shape=shape,
dtype=dtype,
compression=None)
data.attrs['signal'] = numpy.int32(1)
if interpretation is not None:
data.attrs['interpretation'] = interpretation.encode('utf-8')
for i in range(len(shape)):
dim = numpy.arange(shape[i]).astype(numpy.float32)
dset = nxData.require_dataset('dim_%d' % i,
dim.shape,
dim.dtype,
dim,
chunks=dim.shape)
dset.attrs['axis'] = numpy.int32(i + 1)
nxEntry['end_time'] = getDate().encode('utf-8')
return hdf, data
def save3DArrayAsMonochromaticTiff(data, filename,
labels=None, dtype=None, mcaindex=-1):
ndata = data.shape[mcaindex]
if dtype is None:
dtype = numpy.float32
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
outfileInstance = TiffIO.TiffIO(filename, mode="wb+")
if mcaindex in [2, -1]:
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[:, :, i]
else:
tmpData = data[:, :, i].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})<|fim▁hole|> if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[:, i, :]
else:
tmpData = data[:, i, :].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
else:
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[i]
else:
tmpData = data[i].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
outfileInstance.close() # force file close
# it should be used to name the data that for the time being is named 'data'.
def save3DArrayAsHDF5(data, filename, axes=None, labels=None, dtype=None, mode='nexus',
mcaindex=-1, interpretation=None, compression=None):
if not HDF5:
raise IOError('h5py does not seem to be installed in your system')
if (mcaindex == 0) and (interpretation in ["spectrum", None]):
#stack of images to be saved as stack of spectra
modify = True
shape = [data.shape[1], data.shape[2], data.shape[0]]
elif (mcaindex != 0) and (interpretation in ["image"]):
#stack of spectra to be saved as stack of images
modify = True
shape = [data.shape[2], data.shape[0], data.shape[1]]
else:
modify = False
shape = data.shape
if dtype is None:
dtype = data.dtype
if mode.lower() in ['nexus', 'nexus+']:
#raise IOError, 'NeXus data saving not implemented yet'
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = openHDF5File(filename, 'a')
entryName = "data"
#entry
nxEntry = hdf.require_group(entryName)
if 'NX_class' not in nxEntry.attrs:
nxEntry.attrs['NX_class'] = 'NXentry'.encode('utf-8')
elif nxEntry.attrs['NX_class'] != 'NXentry'.encode('utf-8'):
#should I raise an error?
pass
nxEntry['title'] = "PyMca saved 3D Array".encode('utf-8')
nxEntry['start_time'] = getDate().encode('utf-8')
nxData = nxEntry.require_group('NXdata')
if ('NX_class' not in nxData.attrs):
nxData.attrs['NX_class'] = 'NXdata'.encode('utf-8')
elif nxData.attrs['NX_class'] != 'NXdata'.encode('utf-8'):
#should I raise an error?
pass
if modify:
if interpretation in ["image", "image".encode('utf-8')]:
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
#risk of taking a 10 % more space in disk
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and not chunked dataset")
#print not compressed -> Not chunked
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
for i in range(data.shape[-1]):
tmp = data[:, :, i:i + 1]
tmp.shape = 1, shape[1], shape[2]
dset[i, 0:shape[1], :] = tmp
print("Saved item %d of %d" % (i + 1, data.shape[-1]))
elif 0:
#if I do not match the input and output shapes it takes ages
#to save the images as spectra. However, it is much faster
#when performing spectra operations.
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, shape[1], shape[2]))
for i in range(data.shape[1]): # shape[0]
chunk = numpy.zeros((1, data.shape[2], data.shape[0]),
dtype)
for k in range(data.shape[0]): # shape[2]
if 0:
tmpData = data[k:k + 1]
for j in range(data.shape[2]): # shape[1]
tmpData.shape = data.shape[1], data.shape[2]
chunk[0, j, k] = tmpData[i, j]
else:
tmpData = data[k:k + 1, i, :]
tmpData.shape = -1
chunk[0, :, k] = tmpData
print("Saving item %d of %d" % (i, data.shape[1]))
dset[i, :, :] = chunk
else:
#if I do not match the input and output shapes it takes ages
#to save the images as spectra. This is a very fast saving, but
#the performance is awful when reading.
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(shape[0], shape[1], 1),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and not chunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
for i in range(data.shape[0]):
tmp = data[i:i + 1, :, :]
tmp.shape = shape[0], shape[1], 1
dset[:, :, i:i + 1] = tmp
else:
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
if chunk2 == 0:
chunk2 = shape[2]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
if DEBUG:
print("Used chunk size = (1, %d, %d)" % (chunk1, chunk2))
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and notchunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
tmpData = numpy.zeros((1, data.shape[1], data.shape[2]),
data.dtype)
for i in range(data.shape[0]):
tmpData[0:1] = data[i:i + 1]
dset[i:i + 1] = tmpData[0:1]
print("Saved item %d of %d" % (i + 1, data.shape[0]))
dset.attrs['signal'] = "1".encode('utf-8')
if interpretation is not None:
dset.attrs['interpretation'] = interpretation.encode('utf-8')
axesAttribute = []
for i in range(len(shape)):
if axes is None:
dim = numpy.arange(shape[i]).astype(numpy.float32)
dimlabel = 'dim_%d' % i
elif axes[i] is not None:
dim = axes[i]
try:
dimlabel = "%s" % labels[i]
except:
dimlabel = 'dim_%d' % i
else:
dim = numpy.arange(shape[i]).astype(numpy.float32)
dimlabel = 'dim_%d' % i
axesAttribute.append(dimlabel)
adset = nxData.require_dataset(dimlabel,
dim.shape,
dim.dtype,
compression=None)
adset[:] = dim[:]
adset.attrs['axis'] = i + 1
dset.attrs['axes'] = (":".join(axesAttribute)).encode('utf-8')
nxEntry['end_time'] = getDate().encode('utf-8')
if mode.lower() == 'nexus+':
#create link
g = h5py.h5g.open(hdf.fid, '/'.encode('utf-8'))
g.link('/data/NXdata/data'.encode('utf-8'),
'/data/data'.encode('utf-8'),
h5py.h5g.LINK_HARD)
elif mode.lower() == 'simplest':
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = h5py.File(filename, 'a')
if compression:
hdf.require_dataset('data',
shape=shape,
dtype=dtype,
data=data,
chunks=(1, shape[1], shape[2]),
compression=compression)
else:
hdf.require_dataset('data',
shape=shape,
data=data,
dtype=dtype,
compression=None)
else:
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
shape = data.shape
dtype = data.dtype
hdf = h5py.File(filename, 'a')
dataGroup = hdf.require_group('data')
dataGroup.require_dataset('data',
shape=shape,
dtype=dtype,
data=data,
chunks=(1, shape[1], shape[2]))
hdf.flush()
hdf.close()
def main():
a = numpy.arange(1000000.)
a.shape = 20, 50, 1000
save3DArrayAsHDF5(a, '/test.h5', mode='nexus+', interpretation='image')
getHDF5FileInstanceAndBuffer('/test2.h5', (100, 100, 100))
print("Date String = ", getDate())
if __name__ == "__main__":
main()<|fim▁end|>
|
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
elif mcaindex == 1:
for i in range(ndata):
|
<|file_name|>qdbusconnectioninterface.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation ([email protected])
**
** This file is part of the QtDBus module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial Usage
** Licensees holding valid Qt Commercial licenses may use this file in
** accordance with the Qt Commercial License Agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Nokia.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
** If you have questions regarding the use of this file, please contact
** Nokia at [email protected].
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qdbusconnectioninterface.h"
#include <QtCore/QByteArray>
#include <QtCore/QList>
#include <QtCore/QMap>
#include <QtCore/QString>
#include <QtCore/QStringList>
#include <QtCore/QVariant>
#include <QtCore/QDebug>
#include "qdbus_symbols_p.h" // for the DBUS_* constants
#ifndef QT_NO_DBUS
QT_BEGIN_NAMESPACE
/*
* Implementation of interface class QDBusConnectionInterface
*/
/*!
\class QDBusConnectionInterface
\inmodule QtDBus
\since 4.2
\brief The QDBusConnectionInterface class provides access to the D-Bus bus daemon service.
The D-Bus bus server daemon provides one special interface \c
org.freedesktop.DBus that allows clients to access certain
properties of the bus, such as the current list of clients
connected. The QDBusConnectionInterface class provides access to that
interface.
The most common uses of this class are to register and unregister
service names on the bus using the registerService() and
unregisterService() functions, query about existing names using
the isServiceRegistered(), registeredServiceNames() and
serviceOwner() functions, and to receive notification that a
client has registered or de-registered through the
serviceRegistered(), serviceUnregistered() and serviceOwnerChanged()
signals.
*/
/*!
\enum QDBusConnectionInterface::ServiceQueueOptions
Flags for determining how a service registration should behave, in
case the service name is already registered.
\value DontQueueService If an application requests a name that
is already owned, no queueing will be
performed. The registeredService()
call will simply fail.
This is the default.
\value QueueService Attempts to register the requested
service, but do not try to replace it
if another application already has it
registered. Instead, simply put this
application in queue, until it is
given up. The serviceRegistered()
signal will be emitted when that
happens.
\value ReplaceExistingService If another application already has
the service name registered, attempt
to replace it.
\sa ServiceReplacementOptions
*/
/*!
\enum QDBusConnectionInterface::ServiceReplacementOptions
Flags for determining if the D-Bus server should allow another
application to replace a name that this application has registered
with the ReplaceExistingService option.
The possible values are:
\value DontAllowReplacement Do not allow another application to
replace us. The service must be
explicitly unregistered with
unregisterService() for another
application to acquire it.
This is the default.
\value AllowReplacement Allow other applications to replace us
with the ReplaceExistingService option
to registerService() without
intervention. If that happens, the
serviceUnregistered() signal will be
emitted.
\sa ServiceQueueOptions
*/
/*!
\enum QDBusConnectionInterface::RegisterServiceReply
The possible return values from registerService():
\value ServiceNotRegistered The call failed and the service name was not registered.
\value ServiceRegistered The caller is now the owner of the service name.
\value ServiceQueued The caller specified the QueueService flag and the
service was already registered, so we are in queue.
The serviceRegistered() signal will be emitted when the service is
acquired by this application.
*/
/*!
\internal
*/
const char *QDBusConnectionInterface::staticInterfaceName()
{ return "org.freedesktop.DBus"; }
/*!
\internal
*/
QDBusConnectionInterface::QDBusConnectionInterface(const QDBusConnection &connection,
QObject *parent)
: QDBusAbstractInterface(QLatin1String(DBUS_SERVICE_DBUS),
QLatin1String(DBUS_PATH_DBUS),
DBUS_INTERFACE_DBUS, connection, parent)
{
connect(this, SIGNAL(NameAcquired(QString)), this, SIGNAL(serviceRegistered(QString)));
connect(this, SIGNAL(NameLost(QString)), this, SIGNAL(serviceUnregistered(QString)));
connect(this, SIGNAL(NameOwnerChanged(QString,QString,QString)),
this, SIGNAL(serviceOwnerChanged(QString,QString,QString)));
}
<|fim▁hole|>{
}
/*!
Returns the unique connection name of the primary owner of the
name \a name. If the requested name doesn't have an owner, returns
a \c org.freedesktop.DBus.Error.NameHasNoOwner error.
*/
QDBusReply<QString> QDBusConnectionInterface::serviceOwner(const QString &name) const
{
return internalConstCall(QDBus::AutoDetect, QLatin1String("GetNameOwner"), QList<QVariant>() << name);
}
/*!
\property QDBusConnectionInterface::registeredServiceNames
\brief holds the registered service names
Lists all names currently registered on the bus.
*/
QDBusReply<QStringList> QDBusConnectionInterface::registeredServiceNames() const
{
return internalConstCall(QDBus::AutoDetect, QLatin1String("ListNames"));
}
/*!
Returns true if the service name \a serviceName has is currently
registered.
*/
QDBusReply<bool> QDBusConnectionInterface::isServiceRegistered(const QString &serviceName) const
{
return internalConstCall(QDBus::AutoDetect, QLatin1String("NameHasOwner"),
QList<QVariant>() << serviceName);
}
/*!
Returns the Unix Process ID (PID) for the process currently
holding the bus service \a serviceName.
*/
QDBusReply<uint> QDBusConnectionInterface::servicePid(const QString &serviceName) const
{
return internalConstCall(QDBus::AutoDetect, QLatin1String("GetConnectionUnixProcessID"),
QList<QVariant>() << serviceName);
}
/*!
Returns the Unix User ID (UID) for the process currently holding
the bus service \a serviceName.
*/
QDBusReply<uint> QDBusConnectionInterface::serviceUid(const QString &serviceName) const
{
return internalConstCall(QDBus::AutoDetect, QLatin1String("GetConnectionUnixUser"),
QList<QVariant>() << serviceName);
}
/*!
Requests that the bus start the service given by the name \a name.
*/
QDBusReply<void> QDBusConnectionInterface::startService(const QString &name)
{
return call(QLatin1String("StartServiceByName"), name, uint(0));
}
/*!
Requests to register the service name \a serviceName on the
bus. The \a qoption flag specifies how the D-Bus server should behave
if \a serviceName is already registered. The \a roption flag
specifies if the server should allow another application to
replace our registered name.
If the service registration succeeds, the serviceRegistered()
signal will be emitted. If we are placed in queue, the signal will
be emitted when we obtain the name. If \a roption is
AllowReplacement, the serviceUnregistered() signal will be emitted
if another application replaces this one.
\sa unregisterService()
*/
QDBusReply<QDBusConnectionInterface::RegisterServiceReply>
QDBusConnectionInterface::registerService(const QString &serviceName,
ServiceQueueOptions qoption,
ServiceReplacementOptions roption)
{
// reconstruct the low-level flags
uint flags = 0;
switch (qoption) {
case DontQueueService:
flags = DBUS_NAME_FLAG_DO_NOT_QUEUE;
break;
case QueueService:
flags = 0;
break;
case ReplaceExistingService:
flags = DBUS_NAME_FLAG_DO_NOT_QUEUE | DBUS_NAME_FLAG_REPLACE_EXISTING;
break;
}
switch (roption) {
case DontAllowReplacement:
break;
case AllowReplacement:
flags |= DBUS_NAME_FLAG_ALLOW_REPLACEMENT;
break;
}
QDBusMessage reply = call(QLatin1String("RequestName"), serviceName, flags);
// qDebug() << "QDBusConnectionInterface::registerService" << serviceName << "Reply:" << reply;
// convert the low-level flags to something that we can use
if (reply.type() == QDBusMessage::ReplyMessage) {
uint code = 0;
switch (reply.arguments().at(0).toUInt()) {
case DBUS_REQUEST_NAME_REPLY_PRIMARY_OWNER:
case DBUS_REQUEST_NAME_REPLY_ALREADY_OWNER:
code = uint(ServiceRegistered);
break;
case DBUS_REQUEST_NAME_REPLY_EXISTS:
code = uint(ServiceNotRegistered);
break;
case DBUS_REQUEST_NAME_REPLY_IN_QUEUE:
code = uint(ServiceQueued);
break;
}
reply.setArguments(QVariantList() << code);
}
return reply;
}
/*!
Releases the claim on the bus service name \a serviceName, that
had been previously registered with registerService(). If this
application had ownership of the name, it will be released for
other applications to claim. If it only had the name queued, it
gives up its position in the queue.
*/
QDBusReply<bool>
QDBusConnectionInterface::unregisterService(const QString &serviceName)
{
QDBusMessage reply = call(QLatin1String("ReleaseName"), serviceName);
if (reply.type() == QDBusMessage::ReplyMessage) {
bool success = reply.arguments().at(0).toUInt() == DBUS_RELEASE_NAME_REPLY_RELEASED;
reply.setArguments(QVariantList() << success);
}
return reply;
}
/*!
\internal
*/
void QDBusConnectionInterface::connectNotify(const char *signalName)
{
// translate the signal names to what we really want
// this avoids setting hooks for signals that don't exist on the bus
if (qstrcmp(signalName, SIGNAL(serviceRegistered(QString))) == 0)
QDBusAbstractInterface::connectNotify(SIGNAL(NameAcquired(QString)));
else if (qstrcmp(signalName, SIGNAL(serviceUnregistered(QString))) == 0)
QDBusAbstractInterface::connectNotify(SIGNAL(NameLost(QString)));
else if (qstrcmp(signalName, SIGNAL(serviceOwnerChanged(QString,QString,QString))) == 0) {
static bool warningPrinted = false;
if (!warningPrinted) {
qWarning("Connecting to deprecated signal QDBusConnectionInterface::serviceOwnerChanged(QString,QString,QString)");
warningPrinted = true;
}
QDBusAbstractInterface::connectNotify(SIGNAL(NameOwnerChanged(QString,QString,QString)));
}
}
/*!
\internal
*/
void QDBusConnectionInterface::disconnectNotify(const char *signalName)
{
// translate the signal names to what we really want
// this avoids setting hooks for signals that don't exist on the bus
if (qstrcmp(signalName, SIGNAL(serviceRegistered(QString))) == 0)
QDBusAbstractInterface::disconnectNotify(SIGNAL(NameAcquired(QString)));
else if (qstrcmp(signalName, SIGNAL(serviceUnregistered(QString))) == 0)
QDBusAbstractInterface::disconnectNotify(SIGNAL(NameLost(QString)));
else if (qstrcmp(signalName, SIGNAL(serviceOwnerChanged(QString,QString,QString))) == 0)
QDBusAbstractInterface::disconnectNotify(SIGNAL(NameOwnerChanged(QString,QString,QString)));
}
// signals
/*!
\fn QDBusConnectionInterface::serviceRegistered(const QString &serviceName)
This signal is emitted by the D-Bus server when the bus service
name (unique connection name or well-known service name) given by
\a serviceName is acquired by this application.
Acquisition happens after this application has requested a name using
registerService().
*/
/*!
\fn QDBusConnectionInterface::serviceUnregistered(const QString &serviceName)
This signal is emitted by the D-Bus server when this application
loses ownership of the bus service name given by \a serviceName.
*/
/*!
\fn QDBusConnectionInterface::serviceOwnerChanged(const QString &name, const QString &oldOwner, const QString &newOwner)
This signal is emitted by the D-Bus server whenever a service
ownership change happens in the bus, including apparition and
disparition of names.
This signal means the application \a oldOwner lost ownership of
bus name \a name to application \a newOwner. If \a oldOwner is an
empty string, it means the name \a name has just been created; if
\a newOwner is empty, the name \a name has no current owner and is
no longer available.
\note connecting to this signal will make the application listen for and
receive every single service ownership change on the bus. Depending on
how many services are running, this make the application be activated to
receive more signals than it needs. To avoid this problem, use the
QDBusServiceWatcher class, which can listen for specific changes.
*/
/*!
\fn void QDBusConnectionInterface::callWithCallbackFailed(const QDBusError &error, const QDBusMessage &call)
This signal is emitted when there is an error during a
QDBusConnection::callWithCallback(). \a error specifies the error.
\a call is the message that couldn't be delivered.
\sa QDBusConnection::callWithCallback()
*/
QT_END_NAMESPACE
#endif // QT_NO_DBUS<|fim▁end|>
|
/*!
\internal
*/
QDBusConnectionInterface::~QDBusConnectionInterface()
|
<|file_name|>comment_result.py<|end_file_name|><|fim▁begin|>from pykintone.result import Result
from pykintone.comment import RecordComment, Mention
class CreateCommentResult(Result):
<|fim▁hole|> serialized = response.json()
if "id" in serialized:
self.comment_id = int(serialized["id"])
class SelectCommentResult(Result):
def __init__(self, response):
super(SelectCommentResult, self).__init__(response)
self.raw_comments = []
self.older = False
self.newer = False
if self.ok:
serialized = response.json()
if "comments" in serialized:
self.raw_comments = serialized["comments"]
self.older = serialized["older"]
self.newer = serialized["newer"]
def comments(self):
cs = [RecordComment.deserialize(cd) for cd in self.raw_comments]
for c in cs:
c.mentions = [Mention.deserialize(m) for m in c.mentions]
return cs<|fim▁end|>
|
def __init__(self, response):
super(CreateCommentResult, self).__init__(response)
self.comment_id = -1
if self.ok:
|
<|file_name|>edit_instance_vmware.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
def command():
return "edit-instance-vmware"
<|fim▁hole|> parser.add_argument("--instance-type", required=True)
parser.add_argument("--key-name", required=True)
parser.add_argument("--compute-resource", required=True)
parser.add_argument("--is-static-ip", required=True)
parser.add_argument("--ip-address", required=False)
parser.add_argument("--subnet-mask", required=False)
parser.add_argument("--default-gateway", required=False)
parser.add_argument("--comment", required=False)
parser.add_argument("--root-size", required=False)
def execute(requester, args):
instance_no = args.instance_no
instance_type = args.instance_type
key_name = args.key_name
compute_resource = args.compute_resource
is_static_ip = args.is_static_ip
ip_address = args.ip_address
subnet_mask = args.subnet_mask
default_gateway = args.default_gateway
comment = args.comment
root_size = args.root_size
parameters = {}
parameters["InstanceNo"] = instance_no
parameters["InstanceType"] = instance_type
parameters["KeyName"] = key_name
parameters["ComputeResource"] = compute_resource
parameters["IsStaticIp"] = is_static_ip
if (ip_address != None):
parameters["IpAddress"] = ip_address
if (subnet_mask != None):
parameters["SubnetMask"] = subnet_mask
if (default_gateway != None):
parameters["DefaultGateway"] = default_gateway
if (comment != None):
parameters["Comment"] = comment
if (root_size != None):
parameters["RootSize"] = root_size
return requester.execute("/EditInstanceVmware", parameters)<|fim▁end|>
|
def init_argument(parser):
parser.add_argument("--instance-no", required=True)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ____________
# ___/ | \_____________ _ _ ___
# / ___/ | _ __ _ _| | ___ __ _ __| | \
# / \___/ ______/ | '_ \ || | |__/ _ \/ _` / _` | \
# \ ◯ | | .__/\_, |____\___/\__,_\__,_| /
# \_______\ /_______|_| |__/________________________/
# \ /
# \/
import _locale<|fim▁hole|>import logging
import locale
import os
import pkg_resources
import semver
import sys
import traceback
# Info
APPID = "pyload"
PKGNAME = "pyload-ng"
PKGDIR = pkg_resources.resource_filename(__name__, "")
USERHOMEDIR = os.path.expanduser("~")
os.chdir(USERHOMEDIR)
__version__ = pkg_resources.get_distribution(PKGNAME).parsed_version.base_version
__version_info__ = semver.parse_version_info(__version__)
# Locale
locale.setlocale(locale.LC_ALL, "")
if os.name == "nt":
_locale._getdefaultlocale = lambda *args: ["en_US", "utf_8_sig"]
# Exception logger
exc_logger = logging.getLogger("exception")
def excepthook(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
msg_list = traceback.format_exception_only(exc_type, exc_value)
exc_info = (exc_type, exc_value, exc_traceback)
exc_logger.exception(msg_list[-1], exc_info=exc_info)
sys.excepthook = excepthook
del excepthook
# Cleanup
del _locale
del locale
del logging
del os
del pkg_resources
del semver
del sys<|fim▁end|>
| |
<|file_name|>clean.rs<|end_file_name|><|fim▁begin|>use std::collections::HashSet;
use std::fs::{read_dir, read_link, remove_file, metadata};
use std::io::{self, stdout, stderr};
use std::ffi::OsStr;
use std::path::Path;
use std::str::FromStr;
use std::time::{SystemTime, UNIX_EPOCH, Duration};
use std::os::unix::fs::MetadataExt;
use argparse::{ArgumentParser, PushConst, StoreTrue, StoreOption};
use scan_dir::ScanDir;
use humantime;
use crate::config::command::MainCommand::{Supervise, Command, CapsuleCommand};
use crate::config::volumes::Volume::Persistent;
use crate::container::util::clean_dir;
use crate::file_util::{read_visible_entries, Lock};
use crate::wrapper::build::get_version_hash;
use super::setup;
use super::Wrapper;
#[derive(Clone, Copy)]
enum Action {
Temporary,
Old,
Unused,
Everything,
Transient,
Volumes,
UnusedVolumes,
}
pub fn clean_cmd(wrapper: &Wrapper, cmdline: Vec<String>)
-> Result<i32, String>
{
let mut global = false;
let mut dry_run = false;
let mut actions = vec!();
let mut duration = None::<humantime::Duration>;
{
let mut ap = ArgumentParser::new();
ap.set_description("
Performs various cleanup tasks
");
ap.refer(&mut actions)
.add_option(&["--tmp", "--tmp-folders"],
PushConst(Action::Temporary),
"Clean temporary containers (failed builds)")
.add_option(&["--old", "--old-containers"], PushConst(Action::Old), "
Clean old versions of containers (those which doesn't have a \
symlink in .vagga)")
.add_option(&["--unused"], PushConst(Action::Unused), "
Clean unused containers, or versions thereof. (This is not \
`--old` for historical reasons, we will probably merge the \
commands later on)")
.add_option(&["--transient"], PushConst(Action::Transient),
"Clean unneeded transient folders (left from containers with
`write-mode` set to transient-something). The pid of process
is checked for liveness first.")
.add_option(&["--everything"], PushConst(Action::Everything),
"Clean whole `.vagga` folder. Useful when deleting a project.
With ``--global`` cleans whole storage-dir and cache-dir")
.add_option(&["--unused-volumes"], PushConst(Action::UnusedVolumes),
"Remove `!Persistent` volumes that are not used by any \
command or container of the current config")
.add_option(&["--volumes"], PushConst(Action::Volumes),
"Remove all `!Persistent` volumes. So they are reinitialized \
on the next start of the command")
.required();
ap.refer(&mut global)
.add_option(&["--global"], StoreTrue,
"Apply cleanup command to all the projects
in the `storage-dir`. Works only \
if `storage-dir` is configured in settings");
ap.refer(&mut dry_run)
.add_option(&["-n", "--dry-run"], StoreTrue,
"Dry run. Don't delete everything, just print");
ap.refer(&mut duration)
.add_option(&["--at-least"], StoreOption, "
Only in combination with `--unused`. Treat as unused \
containers that are unused for specified time, rather than \
the ones not used by current version of config");
match ap.parse(cmdline, &mut stdout(), &mut stderr()) {
Ok(()) => {}
Err(0) => return Ok(0),
Err(_) => {
return Ok(1);
}
}
}
if global && wrapper.ext_settings.storage_dir.is_none() {
error!("The --global flag is only meaningful if you configure \
storage-dir in settings");
return Ok(2);
}
let duration = duration.map(|x| x.into());
for action in actions.iter() {
let res = match *action {
Action::Temporary => clean_temporary(wrapper, global, dry_run),
Action::Old => clean_old(wrapper, global, dry_run),
Action::Unused => {
if global {
if let Some(duration) = duration {
global_clean_unused(wrapper, duration, dry_run)
} else {
panic!("no global cleanup without --at-least");
}
} else {
clean_unused(wrapper, duration, dry_run)
}
}
Action::Transient => clean_transient(wrapper, global, dry_run),
Action::Everything => clean_everything(wrapper, global, dry_run),
Action::UnusedVolumes => {
clean_volumes(wrapper, global, dry_run, false)
}
Action::Volumes => {
clean_volumes(wrapper, global, dry_run, true)
}
};
match res {
Ok(()) => {}
Err(err) => {
error!("Error cleaning up: {}", err);
return Ok(3);
}
}
}
return Ok(0);
}
fn clean_dir_wrapper(path: &Path,
remove_dir_itself: bool, dry_run: bool) -> Result<(), String> {
// TODO(tailhook) chroot to dir for removing
if dry_run {
println!("Would remove {:?}", path);
} else {
let mut n = path.to_path_buf().into_os_string();
n.push(".lock");
let lock_name = Path::new(&n);
let lock_guard = if lock_name.exists() {
match Lock::exclusive(&lock_name) {
Ok(x) => Some(x),
Err(e) => {
error!("Failed to lock {:?}: {}, skipping", lock_name, e);
return Ok(());
}
}
} else {
None
};
debug!("Removing {:?}", path);
clean_dir(path, remove_dir_itself)?;
if let Some(_lock) = lock_guard {
remove_file(lock_name)
.map_err(|e| format!("Error removing lock file {:?}: {}",
lock_name, e))?;
}
}
Ok(())
}
fn clean_everything(wrapper: &Wrapper, global: bool, dry_run: bool)
-> Result<(), String>
{
if global {
if let Some(ref cache_dir) = wrapper.ext_settings.cache_dir {
clean_dir_wrapper(&cache_dir, false, dry_run)?;
}
if let Some(ref storage_dir) = wrapper.ext_settings.storage_dir {
clean_dir_wrapper(&storage_dir, false, dry_run)?;
}
} else {
let base = match setup::get_vagga_base(
wrapper.project_root, wrapper.ext_settings)?
{
Some(base) => base,
None => {
warn!("No vagga directory exists");
return Ok(());
}
};
clean_dir_wrapper(&base, true, dry_run)?;
let inner = wrapper.project_root.join(".vagga");
if base != inner {
clean_dir_wrapper(&inner, true, dry_run)?;
}
}
return Ok(());
}
fn clean_temporary(wrapper: &Wrapper, global: bool, dry_run: bool)
-> Result<(), String>
{
if global {
panic!("Global cleanup is not implemented yet");
}
let base = match setup::get_vagga_base(
wrapper.project_root, wrapper.ext_settings)?
{
Some(base) => base,
None => {
warn!("No vagga directory exists");
return Ok(());
}
};
let roots = base.join(".roots");
for entry in try_msg!(read_dir(&roots),
"Can't read dir {r:?}: {err}", r=roots)
{
let entry = try_msg!(entry, "Can't read dir {r:?}: {err}", r=roots);
let typ = try_msg!(entry.file_type(),
"Can't stat {p:?}: {err}", p=entry.path());
if typ.is_dir() &&
entry.file_name()[..].to_str().map(|n| n.starts_with(".tmp"))
.unwrap_or(false)
{
clean_dir_wrapper(&entry.path(), true, dry_run)?;
}
}
return Ok(());
}
fn clean_old(wrapper: &Wrapper, global: bool, dry_run: bool)
-> Result<(), String>
{
if global {
panic!("Global cleanup is not implemented yet");
}
let base = match setup::get_vagga_base(
wrapper.project_root, wrapper.ext_settings)?
{
Some(base) => base,
None => {
warn!("No vagga directory exists");
return Ok(());
}
};
let useful: HashSet<String> = try_msg!(
read_visible_entries(&wrapper.project_root.join(".vagga")),
"Can't read vagga directory: {err}")
.into_iter()
.filter_map(|path| read_link(&path)
.map_err(|e| warn!("Can't readlink {:?}: {}", path, e))
.ok()
.and_then(|f| {
// The container name is next to the last component
f.iter().rev().nth(1)
.and_then(|x| x.to_str()).map(ToString::to_string)
}))
.collect();
info!("Useful images {:?}", useful);
clean_dirs_except(&base.join(".roots"), &useful, dry_run)?;
return Ok(());
}
fn clean_transient(wrapper: &Wrapper, global: bool, dry_run: bool)
-> Result<(), String>
{
if global {
panic!("Global cleanup is not implemented yet");
}
let base = match setup::get_vagga_base(
wrapper.project_root, wrapper.ext_settings)?
{
Some(base) => base,
None => {
warn!("No vagga directory exists");
return Ok(());
}
};
let procfs = Path::new("/proc");
for entry in try_msg!(read_dir(&base.join(".transient")),
"Can't read .vagga/.transient dir: {err}")
{
let entry = try_msg!(entry, "Error reading .vagga/transient: {err}");
if let Some(fname) = entry.file_name()[..].to_str() {
if let Some(idx) = fname.find('.') {
if u32::from_str(&fname[idx+1..]).is_ok() &&
procfs.join(&fname[idx+1..]).exists()
{
continue;
}
}
}
clean_dir_wrapper(&entry.path(), true, dry_run)?;
}
return Ok(());
}
fn clean_dirs_except<P: AsRef<Path>>(roots: P, useful: &HashSet<String>,
dry_run: bool)
-> Result<(), String>
{
let roots = roots.as_ref();
for entry in try_msg!(read_dir(&roots),
"Can't read dir {dir:?}: {err}", dir=roots)
{
let entry = try_msg!(entry,
"Can't read dir {dir:?}: {err}", dir=roots);
let path = entry.path();
let typ = try_msg!(entry.file_type(),
"Can't stat {p:?}: {err}", p=path);
if !typ.is_dir() {
if path.extension() == Some(OsStr::new("lock")) &&
path.with_extension("").is_dir()
{
debug!("Skipping lock file {:?}", path);
} else {
match remove_file(&path) {
Ok(()) => {}
// File is deleted while we were scanning
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
Err(ref e) => {
return Err(
format!("Can't remove file {:?}: {}", path, e));
}
}
}
} else if !typ.is_dir() || entry.file_name()[..].to_str()
.map(|n| !useful.contains(&n.to_string()))
.unwrap_or(false)
{
clean_dir_wrapper(&entry.path(), true, dry_run)?;
}
}
Ok(())
}<|fim▁hole|> dry_run: bool)
-> Result<(), String>
{
let unixtime = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let cut_off = (unixtime - duration).as_secs() as i64;
let is_cache_dir = |p: &Path| {
wrapper.ext_settings.cache_dir.as_ref()
.map(|x| x == p)
.unwrap_or(false)
};
let storage_dir = wrapper.ext_settings.storage_dir.as_ref().unwrap();
let mut proj_num = 0;
let mut to_remove = 0;
let mut to_keep = 0;
ScanDir::dirs().read(&storage_dir, |iter| {
for (entry, name) in iter {
let path = entry.path();
if is_cache_dir(&path) {
continue;
}
proj_num += 1;
info!("Scanning project {}", name);
let mut useful: HashSet<String> = HashSet::new();
let roots = path.join(".roots");
ScanDir::dirs().skip_hidden(false).read(&roots, |iter| {
for (entry, name) in iter {
let luse_path = entry.path().join("last_use");
match metadata(&luse_path) {
Ok(ref meta) if meta.mtime() > cut_off => {
useful.insert(name);
to_keep += 1;
}
Ok(_) => {
to_remove += 1;
}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
Err(e) => {
error!("Error trying to stat {:?}: {}",
luse_path, e);
}
}
}
}).map_err(|e| {
error!("Error reading {:?}: {}", roots, e);
}).ok();
info!("Useful images {:?}", useful);
clean_dirs_except(&roots, &useful, dry_run)
.map_err(|e| error!("Error cleaning {:?}: {}", roots, e))
.ok(); // TODO(tailhook) propagate the errorneous exit code?
}
}).map_err(|e| {
format!("Error reading storage dir {:?}: {}", storage_dir, e)
})?;
info!("Scanned {} projects, keeping {} images, removed {}",
proj_num, to_keep, to_remove);
Ok(())
}
fn clean_unused(wrapper: &Wrapper, duration: Option<Duration>,
dry_run: bool)
-> Result<(), String>
{
setup::setup_base_filesystem(
wrapper.project_root, wrapper.ext_settings)?;
let mut useful: HashSet<String> = HashSet::new();
if let Some(duration) = duration {
let unixtime = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let cut_off = (unixtime - duration).as_secs() as i64;
ScanDir::dirs().skip_hidden(false).read("/vagga/base/.roots", |iter| {
for (entry, name) in iter {
let luse_path = entry.path().join("last_use");
match metadata(&luse_path) {
Ok(ref meta) if meta.mtime() > cut_off => {
useful.insert(name);
}
Ok(_) => {}
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
Err(e) => {
error!("Error trying to stat {:?}: {}", luse_path, e);
}
}
}
}).map_err(|e| {
error!("Error reading `.vagga/.roots`: {}", e);
}).ok();
} else {
for (name, _) in &wrapper.config.containers {
if let Some(version) = get_version_hash(name, wrapper)? {
useful.insert(format!("{}.{}", name, &version[..8]));
}
}
}
info!("Useful images {:?}", useful);
clean_dirs_except("/vagga/base/.roots", &useful, dry_run)?;
return Ok(());
}
fn clean_volumes(wrapper: &Wrapper, global: bool, dry_run: bool, all: bool)
-> Result<(), String>
{
if global {
panic!("Global cleanup is not implemented yet");
}
let base = match setup::get_vagga_base(
wrapper.project_root, wrapper.ext_settings)?
{
Some(base) => base,
None => {
warn!("No vagga directory exists");
return Ok(());
}
};
let volume_dir = base.join(".volumes");
let mut useful = HashSet::new();
if !all {
for (_, container) in &wrapper.config.containers {
for (_, vol) in &container.volumes {
if let Persistent(ref p) = *vol {
useful.insert(p.name.clone());
}
}
}
for (_, command) in &wrapper.config.commands {
match *command {
Command(ref cmd) => {
for (_, vol) in &cmd.volumes {
if let Persistent(ref p) = *vol {
useful.insert(p.name.clone());
}
}
}
CapsuleCommand(_) => {
// novolumes
},
Supervise(ref cmd) => {
for (_, child) in &cmd.children {
for (_, vol) in child.get_volumes() {
if let Persistent(ref p) = *vol {
useful.insert(p.name.clone());
}
}
}
}
}
}
}
info!("Useful volumes {:?}", useful);
clean_dirs_except(volume_dir, &useful, dry_run)?;
return Ok(());
}<|fim▁end|>
|
fn global_clean_unused(wrapper: &Wrapper, duration: Duration,
|
<|file_name|>VerticalTextAlign.java<|end_file_name|><|fim▁begin|>/*
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright (c) 2001 - 2013 Object Refinery Ltd, Pentaho Corporation and Contributors.. All rights reserved.
*/
package org.pentaho.reporting.engine.classic.core.style;
import org.pentaho.reporting.engine.classic.core.util.ObjectStreamResolveException;
import java.io.ObjectStreamException;
import java.io.Serializable;
/**
* Creation-Date: 24.11.2005, 17:08:01
*
* @author Thomas Morgner
*/
public class VerticalTextAlign implements Serializable {
public static final VerticalTextAlign USE_SCRIPT = new VerticalTextAlign( "use-script" );
public static final VerticalTextAlign BASELINE = new VerticalTextAlign( "baseline" );
public static final VerticalTextAlign SUB = new VerticalTextAlign( "sub" );
public static final VerticalTextAlign SUPER = new VerticalTextAlign( "super" );
public static final VerticalTextAlign TOP = new VerticalTextAlign( "top" );
public static final VerticalTextAlign TEXT_TOP = new VerticalTextAlign( "text-top" );
public static final VerticalTextAlign CENTRAL = new VerticalTextAlign( "central" );
public static final VerticalTextAlign MIDDLE = new VerticalTextAlign( "middle" );
public static final VerticalTextAlign BOTTOM = new VerticalTextAlign( "bottom" );
public static final VerticalTextAlign TEXT_BOTTOM = new VerticalTextAlign( "text-bottom" );
private String id;
private VerticalTextAlign( final String id ) {
this.id = id;
}
/**
* Replaces the automatically generated instance with one of the enumeration instances.
*
* @return the resolved element
* @throws java.io.ObjectStreamException
* if the element could not be resolved.
*/
protected Object readResolve() throws ObjectStreamException {
if ( this.id.equals( VerticalTextAlign.USE_SCRIPT.id ) ) {
return VerticalTextAlign.USE_SCRIPT;
}
if ( this.id.equals( VerticalTextAlign.BASELINE.id ) ) {
return VerticalTextAlign.BASELINE;
}
if ( this.id.equals( VerticalTextAlign.SUPER.id ) ) {
return VerticalTextAlign.SUPER;<|fim▁hole|> if ( this.id.equals( VerticalTextAlign.TOP.id ) ) {
return VerticalTextAlign.TOP;
}
if ( this.id.equals( VerticalTextAlign.TEXT_TOP.id ) ) {
return VerticalTextAlign.TEXT_TOP;
}
if ( this.id.equals( VerticalTextAlign.BOTTOM.id ) ) {
return VerticalTextAlign.BOTTOM;
}
if ( this.id.equals( VerticalTextAlign.TEXT_BOTTOM.id ) ) {
return VerticalTextAlign.TEXT_BOTTOM;
}
if ( this.id.equals( VerticalTextAlign.CENTRAL.id ) ) {
return VerticalTextAlign.CENTRAL;
}
if ( this.id.equals( VerticalTextAlign.MIDDLE.id ) ) {
return VerticalTextAlign.MIDDLE;
}
// unknown element alignment...
throw new ObjectStreamResolveException();
}
public static VerticalTextAlign valueOf( String id ) {
if ( id == null ) {
return null;
}
if ( id.equals( VerticalTextAlign.USE_SCRIPT.id ) ) {
return VerticalTextAlign.USE_SCRIPT;
}
if ( id.equals( VerticalTextAlign.BASELINE.id ) ) {
return VerticalTextAlign.BASELINE;
}
if ( id.equals( VerticalTextAlign.SUPER.id ) ) {
return VerticalTextAlign.SUPER;
}
if ( id.equals( VerticalTextAlign.SUB.id ) ) {
return VerticalTextAlign.SUB;
}
if ( id.equals( VerticalTextAlign.TOP.id ) ) {
return VerticalTextAlign.TOP;
}
if ( id.equals( VerticalTextAlign.TEXT_TOP.id ) ) {
return VerticalTextAlign.TEXT_TOP;
}
if ( id.equals( VerticalTextAlign.BOTTOM.id ) ) {
return VerticalTextAlign.BOTTOM;
}
if ( id.equals( VerticalTextAlign.TEXT_BOTTOM.id ) ) {
return VerticalTextAlign.TEXT_BOTTOM;
}
if ( id.equals( VerticalTextAlign.CENTRAL.id ) ) {
return VerticalTextAlign.CENTRAL;
}
if ( id.equals( VerticalTextAlign.MIDDLE.id ) ) {
return VerticalTextAlign.MIDDLE;
}
return null;
}
public boolean equals( final Object o ) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final VerticalTextAlign that = (VerticalTextAlign) o;
if ( !id.equals( that.id ) ) {
return false;
}
return true;
}
public int hashCode() {
return id.hashCode();
}
/**
* Returns a string representation of the object. In general, the <code>toString</code> method returns a string that
* "textually represents" this object. The result should be a concise but informative representation that is easy for
* a person to read. It is recommended that all subclasses override this method.
* <p/>
* The <code>toString</code> method for class <code>Object</code> returns a string consisting of the name of the class
* of which the object is an instance, the at-sign character `<code>@</code>', and the unsigned hexadecimal
* representation of the hash code of the object. In other words, this method returns a string equal to the value of:
* <blockquote>
*
* <pre>
* getClass().getName() + '@' + Integer.toHexString( hashCode() )
* </pre>
*
* </blockquote>
*
* @return a string representation of the object.
*/
public String toString() {
return id;
}
}<|fim▁end|>
|
}
if ( this.id.equals( VerticalTextAlign.SUB.id ) ) {
return VerticalTextAlign.SUB;
}
|
<|file_name|>translation_simba.rs<|end_file_name|><|fim▁begin|>use simba::simd::SimdValue;
use crate::base::allocator::Allocator;
use crate::base::dimension::DimName;
use crate::base::{DefaultAllocator, VectorN};
use crate::Scalar;
use crate::geometry::Translation;
impl<N: Scalar + SimdValue, D: DimName> SimdValue for Translation<N, D>
where
N::Element: Scalar,
DefaultAllocator: Allocator<N, D> + Allocator<N::Element, D>,
{
type Element = Translation<N::Element, D>;
type SimdBool = N::SimdBool;
#[inline]
fn lanes() -> usize {
N::lanes()
}
<|fim▁hole|> }
#[inline]
fn extract(&self, i: usize) -> Self::Element {
self.vector.extract(i).into()
}
#[inline]
unsafe fn extract_unchecked(&self, i: usize) -> Self::Element {
self.vector.extract_unchecked(i).into()
}
#[inline]
fn replace(&mut self, i: usize, val: Self::Element) {
self.vector.replace(i, val.vector)
}
#[inline]
unsafe fn replace_unchecked(&mut self, i: usize, val: Self::Element) {
self.vector.replace_unchecked(i, val.vector)
}
#[inline]
fn select(self, cond: Self::SimdBool, other: Self) -> Self {
self.vector.select(cond, other.vector).into()
}
}<|fim▁end|>
|
#[inline]
fn splat(val: Self::Element) -> Self {
VectorN::splat(val.vector).into()
|
<|file_name|>MissingAuthorizationDtoTest.java<|end_file_name|><|fim▁begin|>/*
* Camunda BPM REST API
* OpenApi Spec for Camunda BPM REST API.
*
* The version of the OpenAPI document: 7.13.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.camunda.consulting.openapi.client.model;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
/**
* Model tests for MissingAuthorizationDto
*/
public class MissingAuthorizationDtoTest {
private final MissingAuthorizationDto model = new MissingAuthorizationDto();
/**
* Model tests for MissingAuthorizationDto
*/
@Test
public void testMissingAuthorizationDto() {
// TODO: test MissingAuthorizationDto<|fim▁hole|> }
/**
* Test the property 'permissionName'
*/
@Test
public void permissionNameTest() {
// TODO: test permissionName
}
/**
* Test the property 'resourceName'
*/
@Test
public void resourceNameTest() {
// TODO: test resourceName
}
/**
* Test the property 'resourceId'
*/
@Test
public void resourceIdTest() {
// TODO: test resourceId
}
}<|fim▁end|>
| |
<|file_name|>test_forms_field.py<|end_file_name|><|fim▁begin|>#:coding=utf-8:
from django.test import TestCase as DjangoTestCase
from django.forms import Form
from beproud.django.commons.forms import EmailField
__all__ = (<|fim▁hole|>
class EmailTestForm(Form):
email = EmailField(label="email")
class EmailFieldTest(DjangoTestCase):
def test_basic_email(self):
form = EmailTestForm({"email": "[email protected]"})
self.assertTrue(form.is_valid())
def test_keitai_email(self):
form = EmailTestForm({"email": "[email protected]"})
self.assertTrue(form.is_valid())
form = EmailTestForm({"email": ".*&$.-spam..!!eggs!!-.*[email protected]"})
self.assertTrue(form.is_valid())
def test_plus_email(self):
form = EmailTestForm({"email": "[email protected]"})
self.assertTrue(form.is_valid())
def test_multi_email(self):
form = EmailTestForm({"email": "aaa [email protected] [email protected]"})
self.assertFalse(form.is_valid())
def test_longtld(self):
form = EmailTestForm({"email": "[email protected]"})
self.assertTrue(form.is_valid())
def test_punycode(self):
form = EmailTestForm({"email": "[email protected]"})
self.assertTrue(form.is_valid())<|fim▁end|>
|
'EmailFieldTest',
'JSONFormFieldTest',
'JSONWidgetTest',
)
|
<|file_name|>syscall_linux_arm.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build arm,linux
package unix
import (
"syscall"
"unsafe"
)
func setTimespec(sec, nsec int64) Timespec {
return Timespec{Sec: int32(sec), Nsec: int32(nsec)}
}
func setTimeval(sec, usec int64) Timeval {
return Timeval{Sec: int32(sec), Usec: int32(usec)}
}
func Pipe(p []int) (err error) {
if len(p) != 2 {
return EINVAL
}
var pp [2]_C_int
err = pipe2(&pp, 0)
p[0] = int(pp[0])
p[1] = int(pp[1])
return
}
//sysnb pipe2(p *[2]_C_int, flags int) (err error)
func Pipe2(p []int, flags int) (err error) {
if len(p) != 2 {
return EINVAL
}
var pp [2]_C_int
err = pipe2(&pp, flags)
p[0] = int(pp[0])
p[1] = int(pp[1])
return
}
// Underlying system call writes to newoffset via pointer.
// Implemented in assembly to avoid allocation.
func seek(fd int, offset int64, whence int) (newoffset int64, err syscall.Errno)
func Seek(fd int, offset int64, whence int) (newoffset int64, err error) {
newoffset, errno := seek(fd, offset, whence)
if errno != 0 {
return 0, errno
}
return newoffset, nil
}
//sys accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error)
//sys accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error)
//sys bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error)
//sys connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error)
//sysnb getgroups(n int, list *_Gid_t) (nn int, err error) = SYS_GETGROUPS32
//sysnb setgroups(n int, list *_Gid_t) (err error) = SYS_SETGROUPS32
//sys getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error)
//sys setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error)
//sysnb socket(domain int, typ int, proto int) (fd int, err error)
//sysnb getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error)
//sysnb getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error)
//sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error)
//sys sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error)
//sysnb socketpair(domain int, typ int, flags int, fd *[2]int32) (err error)
//sys recvmsg(s int, msg *Msghdr, flags int) (n int, err error)
//sys sendmsg(s int, msg *Msghdr, flags int) (n int, err error)
// 64-bit file system and 32-bit uid calls
// (16-bit uid calls are not always supported in newer kernels)
//sys Dup2(oldfd int, newfd int) (err error)
//sysnb EpollCreate(size int) (fd int, err error)
//sys EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error)
//sys Fchown(fd int, uid int, gid int) (err error) = SYS_FCHOWN32
//sys Fstat(fd int, stat *Stat_t) (err error) = SYS_FSTAT64
//sys Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) = SYS_FSTATAT64
//sysnb Getegid() (egid int) = SYS_GETEGID32
//sysnb Geteuid() (euid int) = SYS_GETEUID32
//sysnb Getgid() (gid int) = SYS_GETGID32
//sysnb Getuid() (uid int) = SYS_GETUID32
//sysnb InotifyInit() (fd int, err error)
//sys Lchown(path string, uid int, gid int) (err error) = SYS_LCHOWN32
//sys Listen(s int, n int) (err error)
//sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64
//sys Pause() (err error)
//sys Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error)
//sys sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) = SYS_SENDFILE64
//sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) = SYS__NEWSELECT
//sys Setfsgid(gid int) (err error) = SYS_SETFSGID32
//sys Setfsuid(uid int) (err error) = SYS_SETFSUID32
//sysnb Setregid(rgid int, egid int) (err error) = SYS_SETREGID32
//sysnb Setresgid(rgid int, egid int, sgid int) (err error) = SYS_SETRESGID32
//sysnb Setresuid(ruid int, euid int, suid int) (err error) = SYS_SETRESUID32
//sysnb Setreuid(ruid int, euid int) (err error) = SYS_SETREUID32
//sys Shutdown(fd int, how int) (err error)
//sys Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int, err error)
//sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64
//sys Ustat(dev int, ubuf *Ustat_t) (err error)
//sys futimesat(dirfd int, path string, times *[2]Timeval) (err error)
//sysnb Gettimeofday(tv *Timeval) (err error)
func Time(t *Time_t) (Time_t, error) {
var tv Timeval
err := Gettimeofday(&tv)
if err != nil {
return 0, err
}
if t != nil {
*t = Time_t(tv.Sec)
}
return Time_t(tv.Sec), nil
}
func Utime(path string, buf *Utimbuf) error {
tv := []Timeval{
{Sec: buf.Actime},
{Sec: buf.Modtime},
}
return Utimes(path, tv)
}
//sys utimes(path string, times *[2]Timeval) (err error)
//sys Pread(fd int, p []byte, offset int64) (n int, err error) = SYS_PREAD64
//sys Pwrite(fd int, p []byte, offset int64) (n int, err error) = SYS_PWRITE64
//sys Truncate(path string, length int64) (err error) = SYS_TRUNCATE64
//sys Ftruncate(fd int, length int64) (err error) = SYS_FTRUNCATE64
func Fadvise(fd int, offset int64, length int64, advice int) (err error) {
_, _, e1 := Syscall6(SYS_ARM_FADVISE64_64, uintptr(fd), uintptr(advice), uintptr(offset), uintptr(offset>>32), uintptr(length), uintptr(length>>32))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
//sys mmap2(addr uintptr, length uintptr, prot int, flags int, fd int, pageOffset uintptr) (xaddr uintptr, err error)
func Fstatfs(fd int, buf *Statfs_t) (err error) {
_, _, e := Syscall(SYS_FSTATFS64, uintptr(fd), unsafe.Sizeof(*buf), uintptr(unsafe.Pointer(buf)))
if e != 0 {
err = e
}
return
}
func Statfs(path string, buf *Statfs_t) (err error) {
pathp, err := BytePtrFromString(path)
if err != nil {
return err
}
_, _, e := Syscall(SYS_STATFS64, uintptr(unsafe.Pointer(pathp)), unsafe.Sizeof(*buf), uintptr(unsafe.Pointer(buf)))
if e != 0 {
err = e
}
return
}
func mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) {
page := uintptr(offset / 4096)
if offset != int64(page)*4096 {
return 0, EINVAL
}
return mmap2(addr, length, prot, flags, fd, page)
}
type rlimit32 struct {
Cur uint32
Max uint32
}
//sysnb getrlimit(resource int, rlim *rlimit32) (err error) = SYS_UGETRLIMIT
const rlimInf32 = ^uint32(0)
const rlimInf64 = ^uint64(0)
func Getrlimit(resource int, rlim *Rlimit) (err error) {
err = prlimit(0, resource, nil, rlim)
if err != ENOSYS {
return err
}
rl := rlimit32{}
err = getrlimit(resource, &rl)
if err != nil {
return
}
if rl.Cur == rlimInf32 {
rlim.Cur = rlimInf64
} else {
rlim.Cur = uint64(rl.Cur)
}
if rl.Max == rlimInf32 {
rlim.Max = rlimInf64
} else {
rlim.Max = uint64(rl.Max)
}
return
}
//sysnb setrlimit(resource int, rlim *rlimit32) (err error) = SYS_SETRLIMIT
func Setrlimit(resource int, rlim *Rlimit) (err error) {
err = prlimit(0, resource, rlim, nil)
if err != ENOSYS {
return err
}
rl := rlimit32{}
if rlim.Cur == rlimInf64 {
rl.Cur = rlimInf32
} else if rlim.Cur < uint64(rlimInf32) {
rl.Cur = uint32(rlim.Cur)
} else {
return EINVAL
}
if rlim.Max == rlimInf64 {
rl.Max = rlimInf32
} else if rlim.Max < uint64(rlimInf32) {
rl.Max = uint32(rlim.Max)
} else {
return EINVAL
}
<|fim▁hole|>
func (r *PtraceRegs) SetPC(pc uint64) { r.Uregs[15] = uint32(pc) }
func (iov *Iovec) SetLen(length int) {
iov.Len = uint32(length)
}
func (msghdr *Msghdr) SetControllen(length int) {
msghdr.Controllen = uint32(length)
}
func (cmsg *Cmsghdr) SetLen(length int) {
cmsg.Len = uint32(length)
}
//sys poll(fds *PollFd, nfds int, timeout int) (n int, err error)
func Poll(fds []PollFd, timeout int) (n int, err error) {
if len(fds) == 0 {
return poll(nil, 0, timeout)
}
return poll(&fds[0], len(fds), timeout)
}
//sys armSyncFileRange(fd int, flags int, off int64, n int64) (err error) = SYS_ARM_SYNC_FILE_RANGE
func SyncFileRange(fd int, off int64, n int64, flags int) error {
// The sync_file_range and arm_sync_file_range syscalls differ only in the
// order of their arguments.
return armSyncFileRange(fd, flags, off, n)
}<|fim▁end|>
|
return setrlimit(resource, &rl)
}
func (r *PtraceRegs) PC() uint64 { return uint64(r.Uregs[15]) }
|
<|file_name|>clustering4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# script find clusters of small RNA reads in the genome
# version 3 - 24-12-2013 evolution to multiprocessing
# Usage clustering.py <bowtie input> <output> <bowtie index> <clustering_distance> <minimum read number per cluster to be outputed> <collapse option> <extention value> <average_cluster_size>
# <folding> <output format>
import sys, subprocess, time
from collections import defaultdict # required for some SmRNAwindow attributes (readDic)
#from numpy import mean, std # required for some SmRNAwindow methods
#from scipy import stats
from smRtools import *
import multiprocessing
def clustering (Instance):
def clustermining (cluster, Instance): # cluster argument is a list
if Instance.readDict[-cluster[0]]: # test whether the first position in the cluster was reverse reads
shift = max(Instance.readDict[-cluster[0]])
upstream_coord = cluster[0] - shift + 1
else:
upstream_coord = cluster[0]
if Instance.readDict[cluster[-1]]: # test whether the last position in the cluster was forward reads
shift = max(Instance.readDict[cluster[-1]])
downstream_coord = cluster[-1] + shift -1
else:<|fim▁hole|> mean_size, median_size, stdv_size = Instance.statsizes(upstream_coord=upstream_coord, downstream_coord=downstream_coord)
if readcount >= minimum_reads and median_size >= min_median_size:
location = [Instance.gene.split()[0], upstream_coord, downstream_coord]
if output_format == "intervals":
return "%s\t%s\t%s\t%s" % (location[0], location[1], location[2], readcount)
cluster_size = downstream_coord - upstream_coord + 1
if folding == "yes" and cluster_size < 151:
foldEnergy = Instance.foldEnergy(upstream_coord=upstream_coord, downstream_coord=downstream_coord) ## be careful, test !
else:
foldEnergy = "."
forwardReadcount = Instance.forwardreadcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord) #
reverseReadcount = Instance.reversereadcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord) #
density = readcount / float(cluster_size) #
if output_format == "GFF3":
if forwardReadcount >= reverseReadcount:
GFFstrand = "+"
else:
GFFstrand = "-"
Attributes = "ID=RC %s : FR %s : RR %s : Dens %s : Med %s : FE %s" % (readcount, forwardReadcount, reverseReadcount, density, median_size, foldEnergy)
return "%s\tGalaxy\tRead_Cluster\t%s\t%s\t%s\t%s\t.\t%s" % (location[0], location[1], location[2], readcount, GFFstrand, Attributes)
else:
Forward_Barycenter, Reverse_Barycenter = Instance.barycenter(upstream_coord=upstream_coord, downstream_coord=downstream_coord)
Zsignature = Instance.signature(24,29,24,29,range(1,27), zscore="yes", upstream_coord=upstream_coord, downstream_coord=downstream_coord)[10] #
Hsignature = Instance.hannon_signature(24,29,24,29, range(1,27), upstream_coord=upstream_coord, downstream_coord=downstream_coord )[10] * 100
UpiFreq = Instance.Ufreq(range(24,29), upstream_coord=upstream_coord, downstream_coord=downstream_coord)
UsiFreq = Instance.Ufreq(range(20,22), upstream_coord=upstream_coord, downstream_coord=downstream_coord)
return "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % (location[0], location[1], location[2], cluster_size, readcount, forwardReadcount, reverseReadcount, density, median_size, foldEnergy, Forward_Barycenter, Reverse_Barycenter, Zsignature, Hsignature, UpiFreq, UsiFreq)
return False
l = Instance.readDict.keys()
l=[abs(i) for i in l]
l=list(set(l))
l.sort()
upstream = 0
cluster_list = []
for i, element in enumerate (l[1:]):
if abs(element-l[i]) > dist or i+2==len(l): # the 2nd part of the logical test is to capture the last cluster if it overlaps the end of the list
cluster = l[upstream:i+1]
upstream = i+1
cluster_list.append(cluster)
result_list = []
for i in cluster_list:
totestresult = clustermining (i, Instance)
if totestresult: result_list.append(totestresult)
del Instance #
return result_list
def logtask (results):
global number_of_clusters
if results:
number_of_clusters += len(results)
LOG.append(results)
return
if __name__ == '__main__':
start_time = time.time()
fasta_dic = get_fasta (sys.argv[3])
objDic = {}
number_of_reads = 0
F = open (sys.argv[1], "r") # F is the bowtie output taken as input
for line in F:
number_of_reads += 1
fields = line.split()
polarity = fields[1]
gene = fields[2]
offset = int(fields[3])
size = len (fields[4])
try:
objDic[gene].addread (polarity, offset, size)
except KeyError:
objDic[gene] = SmRNAwindow(gene, fasta_dic[gene])
objDic[gene].addread (polarity, offset, size)
F.close()
OUT = open (sys.argv[2], "w")
output_format=sys.argv[8]
if output_format == "intervals":
print >> OUT, "#chrom\tStart\tEnd\tReadCount"
elif output_format == "GFF3":
print >> OUT, "##gff-version 3"
else:
print >> OUT, "#ID\t#chrom\tStart\tEnd\tLength\tReadCount\tForwardReads\tReverseReads\tDensity\tMedian\tFoldEnergy\tForBar\tRevBar\tz-score_signature\tHannon_signature\tUfreq_in_24-28RNAs\tUfreq_in_20-21RNs"
dist = int(sys.argv[4])
min_median_size = int(sys.argv[6])
minimum_reads = int(sys.argv[5])
number_of_clusters = 0
Instance_ID = 0
folding=sys.argv[7]
pool = multiprocessing.Pool(4)
LOG = []
instance_list = []
for instance in objDic.keys():
instance_list.append(objDic[instance])
del objDic
pool.map_async(clustering, instance_list, callback=logtask)
pool.close()
pool.join()
for lines in LOG:
for line in lines:
print >> OUT, line
OUT.close()
elapsed_time = time.time() - start_time
print "number of reads: %s\nnumber of clusters: %s\ntime: %s" % (number_of_reads, number_of_clusters, elapsed_time)<|fim▁end|>
|
downstream_coord = cluster[-1]
readcount = Instance.readcount(upstream_coord=upstream_coord, downstream_coord=downstream_coord)
|
<|file_name|>test_strports.py<|end_file_name|><|fim▁begin|># Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.application.strports}.
"""
from twisted.trial.unittest import TestCase
from twisted.application import strports
from twisted.application import internet
from twisted.internet.test.test_endpoints import ParserTestCase
from twisted.internet.protocol import Factory
from twisted.internet.endpoints import TCP4ServerEndpoint, UNIXServerEndpoint
class DeprecatedParseTestCase(ParserTestCase):
"""
L{strports.parse} is deprecated. It's an alias for a method that is now
private in L{twisted.internet.endpoints}.
"""
def parse(self, *a, **kw):
result = strports.parse(*a, **kw)
warnings = self.flushWarnings([self.parse])
self.assertEquals(len(warnings), 1)
self.assertEquals(
warnings[0]['message'],
"twisted.application.strports.parse was deprecated "
"in Twisted 10.2.0: in favor of twisted.internet.endpoints.serverFromString")
return result
def test_simpleNumeric(self):
"""
Base numeric ports should be parsed as TCP.
"""
self.assertEquals(self.parse('80', self.f),
('TCP', (80, self.f), {'interface':'', 'backlog':50}))
def test_allKeywords(self):
"""
A collection of keyword arguments with no prefixed type, like 'port=80',
will be parsed as keyword arguments to 'tcp'.
"""
self.assertEquals(self.parse('port=80', self.f),
('TCP', (80, self.f), {'interface':'', 'backlog':50}))
class ServiceTestCase(TestCase):
"""
Tests for L{strports.service}.
"""
def test_service(self):
"""
L{strports.service} returns a L{StreamServerEndpointService}
constructed with an endpoint produced from
L{endpoint.serverFromString}, using the same syntax.
"""
reactor = object() # the cake is a lie
aFactory = Factory()
aGoodPort = 1337
svc = strports.service(
'tcp:'+str(aGoodPort), aFactory, reactor=reactor)
self.assertIsInstance(svc, internet.StreamServerEndpointService)
# See twisted.application.test.test_internet.TestEndpointService.
# test_synchronousRaiseRaisesSynchronously
self.assertEquals(svc._raiseSynchronously, True)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
# Maybe we should implement equality for endpoints.
self.assertEquals(svc.endpoint._port, aGoodPort)
self.assertIdentical(svc.factory, aFactory)
self.assertIdentical(svc.endpoint._reactor, reactor)
def test_serviceDefaultReactor(self):
"""
L{strports.service} will use the default reactor when none is provided
as an argument.
"""
from twisted.internet import reactor as globalReactor
aService = strports.service("tcp:80", None)
self.assertIdentical(aService.endpoint._reactor, globalReactor)
def test_serviceDeprecatedDefault(self):
"""
L{strports.service} still accepts a 'default' argument, which will
affect the parsing of 'default' (i.e. 'not containing a colon')
endpoint descriptions, but this behavior is deprecated.
"""
svc = strports.service("8080", None, "unix")
self.assertIsInstance(svc.endpoint, UNIXServerEndpoint)
warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
"The 'default' parameter was deprecated in Twisted 10.2.0. "
"Use qualified endpoint descriptions; for example, 'tcp:8080'.")
self.assertEquals(len(warnings), 1)
# Almost the same case, but slightly tricky - explicitly passing the old
# default value, None, also must trigger a deprecation warning.
svc = strports.service("tcp:8080", None, None)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
"The 'default' parameter was deprecated in Twisted 10.2.0.")
self.assertEquals(len(warnings), 1)
def test_serviceDeprecatedUnqualified(self):
"""
Unqualified strport descriptions, i.e. "8080", are deprecated.
"""
svc = strports.service("8080", None)
self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
warnings = self.flushWarnings(<|fim▁hole|> "Unqualified strport description passed to 'service'."
"Use qualified endpoint descriptions; for example, 'tcp:8080'.")
self.assertEquals(len(warnings), 1)<|fim▁end|>
|
[self.test_serviceDeprecatedUnqualified])
self.assertEquals(warnings[0]['category'], DeprecationWarning)
self.assertEquals(
warnings[0]['message'],
|
<|file_name|>NimManager.py<|end_file_name|><|fim▁begin|>from Tools.HardwareInfo import HardwareInfo
from Tools.BoundFunction import boundFunction
from config import config, ConfigSubsection, ConfigSelection, ConfigFloat, \
ConfigSatlist, ConfigYesNo, ConfigInteger, ConfigSubList, ConfigNothing, \
ConfigSubDict, ConfigOnOff, ConfigDateTime
from enigma import eDVBSatelliteEquipmentControl as secClass, \
eDVBSatelliteLNBParameters as lnbParam, \
eDVBSatelliteDiseqcParameters as diseqcParam, \
eDVBSatelliteSwitchParameters as switchParam, \
eDVBSatelliteRotorParameters as rotorParam, \
eDVBResourceManager, eDVBDB, eEnv
from time import localtime, mktime
from datetime import datetime
from Tools.BoundFunction import boundFunction
from Tools import Directories
import xml.etree.cElementTree
def getConfigSatlist(orbpos, satlist):
default_orbpos = None
for x in satlist:
if x[0] == orbpos:
default_orbpos = orbpos
break
return ConfigSatlist(satlist, default_orbpos)
class SecConfigure:
def getConfiguredSats(self):
return self.configuredSatellites
def addSatellite(self, sec, orbpos):
sec.addSatellite(orbpos)
self.configuredSatellites.add(orbpos)
def addLNBSimple(self, sec, slotid, diseqcmode, toneburstmode = diseqcParam.NO, diseqcpos = diseqcParam.SENDNO, orbpos = 0, longitude = 0, latitude = 0, loDirection = 0, laDirection = 0, turningSpeed = rotorParam.FAST, useInputPower=True, inputPowerDelta=50, fastDiSEqC = False, setVoltageTone = True, diseqc13V = False):
if orbpos is None or orbpos == 3600 or orbpos == 3601:
return
#simple defaults
sec.addLNB()
tunermask = 1 << slotid
if self.equal.has_key(slotid):
for slot in self.equal[slotid]:
tunermask |= (1 << slot)
if self.linked.has_key(slotid):
for slot in self.linked[slotid]:
tunermask |= (1 << slot)
sec.setLNBSatCR(-1)
sec.setLNBNum(1)
sec.setLNBLOFL(9750000)
sec.setLNBLOFH(10600000)
sec.setLNBThreshold(11700000)
sec.setLNBIncreasedVoltage(lnbParam.OFF)
sec.setRepeats(0)
sec.setFastDiSEqC(fastDiSEqC)
sec.setSeqRepeat(0)
sec.setCommandOrder(0)
#user values
sec.setDiSEqCMode(diseqcmode)
sec.setToneburst(toneburstmode)
sec.setCommittedCommand(diseqcpos)
sec.setUncommittedCommand(0) # SENDNO
#print "set orbpos to:" + str(orbpos)
if 0 <= diseqcmode < 3:
self.addSatellite(sec, orbpos)
if setVoltageTone:
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
else:
sec.setVoltageMode(switchParam._14V)
sec.setToneMode(switchParam.OFF)
elif (diseqcmode == 3): # diseqc 1.2
if self.satposdepends.has_key(slotid):
for slot in self.satposdepends[slotid]:
tunermask |= (1 << slot)
sec.setLatitude(latitude)
sec.setLaDirection(laDirection)
sec.setLongitude(longitude)
sec.setLoDirection(loDirection)
sec.setUseInputpower(useInputPower)
sec.setInputpowerDelta(inputPowerDelta)
sec.setRotorTurningSpeed(turningSpeed)
for x in self.NimManager.satList:
print "Add sat " + str(x[0])
self.addSatellite(sec, int(x[0]))
if diseqc13V:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
sec.setToneMode(switchParam.HILO)
sec.setRotorPosNum(0) # USALS
sec.setLNBSlotMask(tunermask)
def setSatposDepends(self, sec, nim1, nim2):
print "tuner", nim1, "depends on satpos of", nim2
sec.setTunerDepends(nim1, nim2)
def linkInternally(self, slotid):
nim = self.NimManager.getNim(slotid)
if nim.internallyConnectableTo is not None:
nim.setInternalLink()
def linkNIMs(self, sec, nim1, nim2):
print "link tuner", nim1, "to tuner", nim2
if nim2 == (nim1 - 1):
self.linkInternally(nim1)
sec.setTunerLinked(nim1, nim2)
def getRoot(self, slotid, connto):
visited = []
while (self.NimManager.getNimConfig(connto).configMode.value in ("satposdepends", "equal", "loopthrough")):
connto = int(self.NimManager.getNimConfig(connto).connectedTo.value)
if connto in visited: # prevent endless loop
return slotid
visited.append(connto)
return connto
def update(self):
sec = secClass.getInstance()
self.configuredSatellites = set()
for slotid in self.NimManager.getNimListOfType("DVB-S"):
if self.NimManager.nimInternallyConnectableTo(slotid) is not None:
self.NimManager.nimRemoveInternalLink(slotid)
sec.clear() ## this do unlinking NIMs too !!
print "sec config cleared"
self.linked = { }
self.satposdepends = { }
self.equal = { }
nim_slots = self.NimManager.nim_slots
used_nim_slots = [ ]
for slot in nim_slots:
if slot.type is not None:
used_nim_slots.append((slot.slot, slot.description, slot.config.configMode.value != "nothing" and True or False, slot.isCompatible("DVB-S2"), slot.frontend_id is None and -1 or slot.frontend_id))
eDVBResourceManager.getInstance().setFrontendSlotInformations(used_nim_slots)
for slot in nim_slots:
if slot.frontend_id is not None:
types = [type for type in ["DVB-T", "DVB-C", "DVB-S", "ATSC"] if eDVBResourceManager.getInstance().frontendIsCompatible(slot.frontend_id, type)]
if len(types) > 1:
slot.multi_type = {}
for type in types:
slot.multi_type[str(types.index(type))] = type
for slot in nim_slots:
x = slot.slot
nim = slot.config
if slot.isCompatible("DVB-S"):
# save what nim we link to/are equal to/satposdepends to.
# this is stored in the *value* (not index!) of the config list
if nim.configMode.value == "equal":
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.equal.has_key(connto):
self.equal[connto] = []
self.equal[connto].append(x)
elif nim.configMode.value == "loopthrough":
self.linkNIMs(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.linked.has_key(connto):
self.linked[connto] = []
self.linked[connto].append(x)
elif nim.configMode.value == "satposdepends":
self.setSatposDepends(sec, x, int(nim.connectedTo.value))
connto = self.getRoot(x, int(nim.connectedTo.value))
if not self.satposdepends.has_key(connto):
self.satposdepends[connto] = []
self.satposdepends[connto].append(x)
for slot in nim_slots:
x = slot.slot
nim = slot.config
hw = HardwareInfo()
if slot.isCompatible("DVB-S"):
print "slot: " + str(x) + " configmode: " + str(nim.configMode.value)
if nim.configMode.value in ( "loopthrough", "satposdepends", "nothing" ):
pass
else:
sec.setSlotNotLinked(x)
if nim.configMode.value == "equal":
pass
elif nim.configMode.value == "simple": #simple config
print "diseqcmode: ", nim.diseqcMode.value
if nim.diseqcMode.value == "single": #single
if nim.simpleSingleSendDiSEqC.value:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, diseqc13V = nim.diseqc13V.value)
else:
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.NONE, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "toneburst_a_b": #Toneburst A/B
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.A, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.B, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "diseqc_a_b": #DiSEqC A/B
fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value
setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "diseqc_a_b_c_d": #DiSEqC A/B/C/D
fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value
setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcC.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcD.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value)
elif nim.diseqcMode.value == "positioner": #Positioner
if nim.latitudeOrientation.value == "north":
laValue = rotorParam.NORTH
else:
laValue = rotorParam.SOUTH
if nim.longitudeOrientation.value == "east":
loValue = rotorParam.EAST
else:
loValue = rotorParam.WEST
inputPowerDelta=nim.powerThreshold.value
useInputPower=False
turning_speed=0
if nim.powerMeasurement.value:
useInputPower=True
turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW }
if turn_speed_dict.has_key(nim.turningSpeed.value):
turning_speed = turn_speed_dict[nim.turningSpeed.value]
else:
beg_time = localtime(nim.fastTurningBegin.value)
end_time = localtime(nim.fastTurningEnd.value)
turning_speed = ((beg_time.tm_hour+1) * 60 + beg_time.tm_min + 1) << 16
turning_speed |= (end_time.tm_hour+1) * 60 + end_time.tm_min + 1
self.addLNBSimple(sec, slotid = x, diseqcmode = 3,
longitude = nim.longitude.float,
loDirection = loValue,
latitude = nim.latitude.float,
laDirection = laValue,
turningSpeed = turning_speed,
useInputPower = useInputPower,
inputPowerDelta = inputPowerDelta,
diseqc13V = nim.diseqc13V.value)
elif nim.configMode.value == "advanced": #advanced config
self.updateAdvanced(sec, x)
print "sec config completed"
def updateAdvanced(self, sec, slotid):
try:
if config.Nims[slotid].advanced.unicableconnected is not None:
if config.Nims[slotid].advanced.unicableconnected.value == True:
config.Nims[slotid].advanced.unicableconnectedTo.save_forced = True
self.linkNIMs(sec, slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value))
connto = self.getRoot(slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value))
if not self.linked.has_key(connto):
self.linked[connto] = []
self.linked[connto].append(slotid)
else:
config.Nims[slotid].advanced.unicableconnectedTo.save_forced = False
except:
pass
lnbSat = {}
for x in range(1,37):
lnbSat[x] = []
#wildcard for all satellites ( for rotor )
for x in range(3601, 3605):
lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value)
if lnb != 0:
for x in self.NimManager.satList:
print "add", x[0], "to", lnb
lnbSat[lnb].append(x[0])
for x in self.NimManager.satList:
lnb = int(config.Nims[slotid].advanced.sat[x[0]].lnb.value)
if lnb != 0:
print "add", x[0], "to", lnb
lnbSat[lnb].append(x[0])
for x in range(1,37):
if len(lnbSat[x]) > 0:
currLnb = config.Nims[slotid].advanced.lnb[x]
sec.addLNB()
if x < 33:
sec.setLNBNum(x)
tunermask = 1 << slotid
if self.equal.has_key(slotid):
for slot in self.equal[slotid]:
tunermask |= (1 << slot)
if self.linked.has_key(slotid):
for slot in self.linked[slotid]:
tunermask |= (1 << slot)
if currLnb.lof.value != "unicable":
sec.setLNBSatCR(-1)
if currLnb.lof.value == "universal_lnb":
sec.setLNBLOFL(9750000)
sec.setLNBLOFH(10600000)
sec.setLNBThreshold(11700000)
elif currLnb.lof.value == "unicable":
def setupUnicable(configManufacturer, ProductDict):
manufacturer_name = configManufacturer.value
manufacturer = ProductDict[manufacturer_name]
product_name = manufacturer.product.value
sec.setLNBSatCR(manufacturer.scr[product_name].index)
sec.setLNBSatCRvco(manufacturer.vco[product_name][manufacturer.scr[product_name].index].value*1000)
sec.setLNBSatCRpositions(manufacturer.positions[product_name][0].value)
sec.setLNBLOFL(manufacturer.lofl[product_name][0].value * 1000)
sec.setLNBLOFH(manufacturer.lofh[product_name][0].value * 1000)
sec.setLNBThreshold(manufacturer.loft[product_name][0].value * 1000)
configManufacturer.save_forced = True
manufacturer.product.save_forced = True
manufacturer.vco[product_name][manufacturer.scr[product_name].index].save_forced = True
if currLnb.unicable.value == "unicable_user":
#TODO satpositions for satcruser
sec.setLNBLOFL(currLnb.lofl.value * 1000)
sec.setLNBLOFH(currLnb.lofh.value * 1000)
sec.setLNBThreshold(currLnb.threshold.value * 1000)
sec.setLNBSatCR(currLnb.satcruser.index)
sec.setLNBSatCRvco(currLnb.satcrvcouser[currLnb.satcruser.index].value*1000)
sec.setLNBSatCRpositions(1) #HACK
elif currLnb.unicable.value == "unicable_matrix":
setupUnicable(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix)
elif currLnb.unicable.value == "unicable_lnb":
setupUnicable(currLnb.unicableLnbManufacturer, currLnb.unicableLnb)
elif currLnb.lof.value == "c_band":
sec.setLNBLOFL(5150000)
sec.setLNBLOFH(5150000)
sec.setLNBThreshold(5150000)
elif currLnb.lof.value == "user_defined":
sec.setLNBLOFL(currLnb.lofl.value * 1000)
sec.setLNBLOFH(currLnb.lofh.value * 1000)
sec.setLNBThreshold(currLnb.threshold.value * 1000)
# if currLnb.output_12v.value == "0V":
# pass # nyi in drivers
# elif currLnb.output_12v.value == "12V":
# pass # nyi in drivers
if currLnb.increased_voltage.value:
sec.setLNBIncreasedVoltage(lnbParam.ON)
else:
sec.setLNBIncreasedVoltage(lnbParam.OFF)
dm = currLnb.diseqcMode.value
if dm == "none":
sec.setDiSEqCMode(diseqcParam.NONE)
elif dm == "1_0":
sec.setDiSEqCMode(diseqcParam.V1_0)
elif dm == "1_1":
sec.setDiSEqCMode(diseqcParam.V1_1)
elif dm == "1_2":
sec.setDiSEqCMode(diseqcParam.V1_2)
if self.satposdepends.has_key(slotid):
for slot in self.satposdepends[slotid]:
tunermask |= (1 << slot)
if dm != "none":
if currLnb.toneburst.value == "none":
sec.setToneburst(diseqcParam.NO)
elif currLnb.toneburst.value == "A":
sec.setToneburst(diseqcParam.A)
elif currLnb.toneburst.value == "B":
sec.setToneburst(diseqcParam.B)
# Committed Diseqc Command
cdc = currLnb.commitedDiseqcCommand.value
c = { "none": diseqcParam.SENDNO,
"AA": diseqcParam.AA,
"AB": diseqcParam.AB,
"BA": diseqcParam.BA,
"BB": diseqcParam.BB }
if c.has_key(cdc):
sec.setCommittedCommand(c[cdc])
else:
sec.setCommittedCommand(long(cdc))
sec.setFastDiSEqC(currLnb.fastDiseqc.value)
sec.setSeqRepeat(currLnb.sequenceRepeat.value)
if currLnb.diseqcMode.value == "1_0":
currCO = currLnb.commandOrder1_0.value
sec.setRepeats(0)
else:
currCO = currLnb.commandOrder.value
<|fim▁hole|> sec.setUncommittedCommand(0xF0|(udc-1))
else:
sec.setUncommittedCommand(0) # SENDNO
sec.setRepeats({"none": 0, "one": 1, "two": 2, "three": 3}[currLnb.diseqcRepeats.value])
setCommandOrder = False
# 0 "committed, toneburst",
# 1 "toneburst, committed",
# 2 "committed, uncommitted, toneburst",
# 3 "toneburst, committed, uncommitted",
# 4 "uncommitted, committed, toneburst"
# 5 "toneburst, uncommitted, commmitted"
order_map = {"ct": 0, "tc": 1, "cut": 2, "tcu": 3, "uct": 4, "tuc": 5}
sec.setCommandOrder(order_map[currCO])
if dm == "1_2":
latitude = currLnb.latitude.float
sec.setLatitude(latitude)
longitude = currLnb.longitude.float
sec.setLongitude(longitude)
if currLnb.latitudeOrientation.value == "north":
sec.setLaDirection(rotorParam.NORTH)
else:
sec.setLaDirection(rotorParam.SOUTH)
if currLnb.longitudeOrientation.value == "east":
sec.setLoDirection(rotorParam.EAST)
else:
sec.setLoDirection(rotorParam.WEST)
if currLnb.powerMeasurement.value:
sec.setUseInputpower(True)
sec.setInputpowerDelta(currLnb.powerThreshold.value)
turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW }
if turn_speed_dict.has_key(currLnb.turningSpeed.value):
turning_speed = turn_speed_dict[currLnb.turningSpeed.value]
else:
beg_time = localtime(currLnb.fastTurningBegin.value)
end_time = localtime(currLnb.fastTurningEnd.value)
turning_speed = ((beg_time.tm_hour + 1) * 60 + beg_time.tm_min + 1) << 16
turning_speed |= (end_time.tm_hour + 1) * 60 + end_time.tm_min + 1
sec.setRotorTurningSpeed(turning_speed)
else:
sec.setUseInputpower(False)
sec.setLNBSlotMask(tunermask)
sec.setLNBPrio(int(currLnb.prio.value))
# finally add the orbital positions
for y in lnbSat[x]:
self.addSatellite(sec, y)
if x > 32:
satpos = x > 32 and (3604-(36 - x)) or y
else:
satpos = y
currSat = config.Nims[slotid].advanced.sat[satpos]
if currSat.voltage.value == "polarization":
if config.Nims[slotid].diseqc13V.value:
sec.setVoltageMode(switchParam.HV_13)
else:
sec.setVoltageMode(switchParam.HV)
elif currSat.voltage.value == "13V":
sec.setVoltageMode(switchParam._14V)
elif currSat.voltage.value == "18V":
sec.setVoltageMode(switchParam._18V)
if currSat.tonemode.value == "band":
sec.setToneMode(switchParam.HILO)
elif currSat.tonemode.value == "on":
sec.setToneMode(switchParam.ON)
elif currSat.tonemode.value == "off":
sec.setToneMode(switchParam.OFF)
if not currSat.usals.value and x < 34:
sec.setRotorPosNum(currSat.rotorposition.value)
else:
sec.setRotorPosNum(0) #USALS
def __init__(self, nimmgr):
self.NimManager = nimmgr
self.configuredSatellites = set()
self.update()
class NIM(object):
def __init__(self, slot, type, description, has_outputs = True, internally_connectable = None, multi_type = {}, frontend_id = None, i2c = None, is_empty = False):
self.slot = slot
if type not in ("DVB-S", "DVB-C", "DVB-T", "DVB-S2", "DVB-T2", "DVB-C2", "ATSC", None):
print "warning: unknown NIM type %s, not using." % type
type = None
self.type = type
self.description = description
self.has_outputs = has_outputs
self.internally_connectable = internally_connectable
self.multi_type = multi_type
self.i2c = i2c
self.frontend_id = frontend_id
self.__is_empty = is_empty
self.compatible = {
None: (None,),
"DVB-S": ("DVB-S", None),
"DVB-C": ("DVB-C", None),
"DVB-T": ("DVB-T", None),
"DVB-S2": ("DVB-S", "DVB-S2", None),
"DVB-C2": ("DVB-C", "DVB-C2", None),
"DVB-T2": ("DVB-T", "DVB-T2", None),
"ATSC": ("ATSC", None),
}
def isCompatible(self, what):
if not self.isSupported():
return False
return what in self.compatible[self.getType()]
def canBeCompatible(self, what):
if not self.isSupported():
return False
if self.isCompatible(what):
return True
for type in self.multi_type.values():
if what in self.compatible[type]:
return True
return False
def getType(self):
try:
if self.isMultiType():
return self.multi_type[self.config.multiType.value]
except:
pass
return self.type
def connectableTo(self):
connectable = {
"DVB-S": ("DVB-S", "DVB-S2"),
"DVB-C": ("DVB-C", "DVB-C2"),
"DVB-T": ("DVB-T","DVB-T2"),
"DVB-S2": ("DVB-S", "DVB-S2"),
"DVB-C2": ("DVB-C", "DVB-C2"),
"DVB-T2": ("DVB-T", "DVB-T2"),
"ATSC": ("ATSC"),
}
return connectable[self.getType()]
def getSlotName(self):
# get a friendly description for a slot name.
# we name them "Tuner A/B/C/...", because that's what's usually written on the back
# of the device.
return _("Tuner") + " " + chr(ord('A') + self.slot)
slot_name = property(getSlotName)
def getSlotID(self):
return chr(ord('A') + self.slot)
def getI2C(self):
return self.i2c
def hasOutputs(self):
return self.has_outputs
def internallyConnectableTo(self):
return self.internally_connectable
def setInternalLink(self):
if self.internally_connectable is not None:
print "setting internal link on frontend id", self.frontend_id
open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w").write("internal")
def removeInternalLink(self):
if self.internally_connectable is not None:
print "removing internal link on frontend id", self.frontend_id
open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w").write("external")
def isMultiType(self):
return (len(self.multi_type) > 0)
def isEmpty(self):
return self.__is_empty
# empty tuners are supported!
def isSupported(self):
return (self.frontend_id is not None) or self.__is_empty
# returns dict {<slotid>: <type>}
def getMultiTypeList(self):
return self.multi_type
slot_id = property(getSlotID)
def getFriendlyType(self):
return {
"DVB-S": "DVB-S",
"DVB-T": "DVB-T",
"DVB-C": "DVB-C",
"DVB-S2": "DVB-S2",
"DVB-T2": "DVB-T2",
"DVB-C2": "DVB-C2",
"ATSC": "ATSC",
None: _("empty")
}[self.getType()]
friendly_type = property(getFriendlyType)
def getFriendlyFullDescription(self):
nim_text = self.slot_name + ": "
if self.empty:
nim_text += _("(empty)")
elif not self.isSupported():
nim_text += self.description + " (" + _("not supported") + ")"
else:
nim_text += self.description + " (" + self.friendly_type + ")"
return nim_text
friendly_full_description = property(getFriendlyFullDescription)
config_mode = property(lambda self: config.Nims[self.slot].configMode.value)
config = property(lambda self: config.Nims[self.slot])
empty = property(lambda self: self.getType is None)
class NimManager:
def getConfiguredSats(self):
return self.sec.getConfiguredSats()
def getTransponders(self, pos):
if self.transponders.has_key(pos):
return self.transponders[pos]
else:
return []
def getTranspondersCable(self, nim):
nimConfig = config.Nims[nim]
if nimConfig.configMode.value != "nothing" and nimConfig.cable.scan_type.value == "provider":
return self.transponderscable[self.cablesList[nimConfig.cable.scan_provider.index][0]]
return [ ]
def getTranspondersTerrestrial(self, region):
return self.transpondersterrestrial[region]
def getCableDescription(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][0]
def getCableFlags(self, nim):
return self.cablesList[config.Nims[nim].scan_provider.index][1]
def getTerrestrialDescription(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][0]
def getTerrestrialFlags(self, nim):
return self.terrestrialsList[config.Nims[nim].terrestrial.index][1]
def getSatDescription(self, pos):
return self.satellites[pos]
def sortFunc(self, x):
orbpos = x[0]
if orbpos > 1800:
return orbpos - 3600
else:
return orbpos + 1800
def readTransponders(self):
# read initial networks from file. we only read files which we are interested in,
# which means only these where a compatible tuner exists.
self.satellites = { }
self.transponders = { }
self.transponderscable = { }
self.transpondersterrestrial = { }
self.transpondersatsc = { }
db = eDVBDB.getInstance()
if self.hasNimType("DVB-S"):
print "Reading satellites.xml"
db.readSatellites(self.satList, self.satellites, self.transponders)
self.satList.sort() # sort by orbpos
#print "SATLIST", self.satList
#print "SATS", self.satellites
#print "TRANSPONDERS", self.transponders
if self.hasNimType("DVB-C"):
print "Reading cables.xml"
db.readCables(self.cablesList, self.transponderscable)
# print "CABLIST", self.cablesList
# print "TRANSPONDERS", self.transponders
if self.hasNimType("DVB-T"):
print "Reading terrestrial.xml"
db.readTerrestrials(self.terrestrialsList, self.transpondersterrestrial)
# print "TERLIST", self.terrestrialsList
# print "TRANSPONDERS", self.transpondersterrestrial
if self.hasNimType("ATSC"):
print "Reading atsc.xml"
#db.readATSC(self.atscList, self.transpondersatsc)
def enumerateNIMs(self):
# enum available NIMs. This is currently very dreambox-centric and uses the /proc/bus/nim_sockets interface.
# the result will be stored into nim_slots.
# the content of /proc/bus/nim_sockets looks like:
# NIM Socket 0:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 1:
# Type: DVB-S
# Name: BCM4501 DVB-S2 NIM (internal)
# NIM Socket 2:
# Type: DVB-T
# Name: Philips TU1216
# NIM Socket 3:
# Type: DVB-S
# Name: Alps BSBE1 702A
#
# Type will be either "DVB-S", "DVB-S2", "DVB-T", "DVB-C" or None.
# nim_slots is an array which has exactly one entry for each slot, even for empty ones.
self.nim_slots = [ ]
try:
nimfile = open("/proc/bus/nim_sockets")
except IOError:
return
current_slot = None
entries = {}
for line in nimfile:
if not line:
break
line = line.strip()
if line.startswith("NIM Socket"):
parts = line.split(" ")
current_slot = int(parts[2][:-1])
entries[current_slot] = {}
elif line.startswith("Type:"):
entries[current_slot]["type"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.startswith("Name:"):
entries[current_slot]["name"] = str(line[6:])
entries[current_slot]["isempty"] = False
elif line.startswith("Has_Outputs:"):
input = str(line[len("Has_Outputs:") + 1:])
entries[current_slot]["has_outputs"] = (input == "yes")
elif line.startswith("Internally_Connectable:"):
input = int(line[len("Internally_Connectable:") + 1:])
entries[current_slot]["internally_connectable"] = input
elif line.startswith("Frontend_Device:"):
input = int(line[len("Frontend_Device:") + 1:])
entries[current_slot]["frontend_device"] = input
elif line.startswith("Mode"):
# "Mode 0: DVB-T" -> ["Mode 0", "DVB-T"]
split = line.split(": ")
if len(split) > 1 and split[1]:
# "Mode 0" -> ["Mode", "0"]
split2 = split[0].split(" ")
modes = entries[current_slot].get("multi_type", {})
modes[split2[1]] = split[1]
entries[current_slot]["multi_type"] = modes
elif line.startswith("I2C_Device:"):
input = int(line[len("I2C_Device:") + 1:])
entries[current_slot]["i2c"] = input
elif line.startswith("empty"):
entries[current_slot]["type"] = None
entries[current_slot]["name"] = _("N/A")
entries[current_slot]["isempty"] = True
nimfile.close()
from os import path
for id, entry in entries.items():
if not (entry.has_key("name") and entry.has_key("type")):
entry["name"] = _("N/A")
entry["type"] = None
if not (entry.has_key("i2c")):
entry["i2c"] = None
if not (entry.has_key("has_outputs")):
entry["has_outputs"] = True
if entry.has_key("frontend_device"): # check if internally connectable
if path.exists("/proc/stb/frontend/%d/rf_switch" % entry["frontend_device"]):
entry["internally_connectable"] = entry["frontend_device"] - 1
else:
entry["internally_connectable"] = None
else:
entry["frontend_device"] = entry["internally_connectable"] = None
if not (entry.has_key("multi_type")):
entry["multi_type"] = {}
self.nim_slots.append(NIM(slot = id, description = entry["name"], type = entry["type"], has_outputs = entry["has_outputs"], internally_connectable = entry["internally_connectable"], multi_type = entry["multi_type"], frontend_id = entry["frontend_device"], i2c = entry["i2c"], is_empty = entry["isempty"]))
def hasNimType(self, chktype):
for slot in self.nim_slots:
if slot.isCompatible(chktype):
return True
for type in slot.getMultiTypeList().values():
if chktype == type:
return True
return False
def getNimType(self, slotid):
return self.nim_slots[slotid].type
def getNimDescription(self, slotid):
return self.nim_slots[slotid].friendly_full_description
def getNimName(self, slotid):
return self.nim_slots[slotid].description
def getNim(self, slotid):
return self.nim_slots[slotid]
def getI2CDevice(self, slotid):
return self.nim_slots[slotid].getI2C()
def getNimListOfType(self, type, exception = -1):
# returns a list of indexes for NIMs compatible to the given type, except for 'exception'
list = []
for x in self.nim_slots:
if x.isCompatible(type) and x.slot != exception:
list.append(x.slot)
return list
def __init__(self):
self.satList = [ ]
self.cablesList = []
self.terrestrialsList = []
self.atscList = []
self.enumerateNIMs()
self.readTransponders()
InitNimManager(self) #init config stuff
# get a list with the friendly full description
def nimList(self):
list = [ ]
for slot in self.nim_slots:
list.append(slot.friendly_full_description)
return list
def getSlotCount(self):
return len(self.nim_slots)
def hasOutputs(self, slotid):
return self.nim_slots[slotid].hasOutputs()
def nimInternallyConnectableTo(self, slotid):
return self.nim_slots[slotid].internallyConnectableTo()
def nimRemoveInternalLink(self, slotid):
self.nim_slots[slotid].removeInternalLink()
def canConnectTo(self, slotid):
slots = []
if self.nim_slots[slotid].internallyConnectableTo() is not None:
slots.append(self.nim_slots[slotid].internallyConnectableTo())
for type in self.nim_slots[slotid].connectableTo():
for slot in self.getNimListOfType(type, exception = slotid):
if self.hasOutputs(slot):
slots.append(slot)
# remove nims, that have a conntectedTo reference on
for testnim in slots[:]:
for nim in self.getNimListOfType("DVB-S", slotid):
nimConfig = self.getNimConfig(nim)
if nimConfig.content.items.has_key("configMode") and nimConfig.configMode.value == "loopthrough" and int(nimConfig.connectedTo.value) == testnim:
slots.remove(testnim)
break
slots.sort()
return slots
def canEqualTo(self, slotid):
type = self.getNimType(slotid)
type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C
nimList = self.getNimListOfType(type, slotid)
for nim in nimList[:]:
mode = self.getNimConfig(nim)
if mode.configMode.value == "loopthrough" or mode.configMode.value == "satposdepends":
nimList.remove(nim)
return nimList
def canDependOn(self, slotid):
type = self.getNimType(slotid)
type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C
nimList = self.getNimListOfType(type, slotid)
positionerList = []
for nim in nimList[:]:
mode = self.getNimConfig(nim)
nimHaveRotor = mode.configMode.value == "simple" and mode.diseqcMode.value == "positioner"
if not nimHaveRotor and mode.configMode.value == "advanced":
for x in range(3601, 3605):
lnb = int(mode.advanced.sat[x].lnb.value)
if lnb != 0:
nimHaveRotor = True
break
if not nimHaveRotor:
for sat in mode.advanced.sat.values():
lnb_num = int(sat.lnb.value)
diseqcmode = lnb_num and mode.advanced.lnb[lnb_num].diseqcMode.value or ""
if diseqcmode == "1_2":
nimHaveRotor = True
break
if nimHaveRotor:
alreadyConnected = False
for testnim in nimList:
testmode = self.getNimConfig(testnim)
if testmode.configMode.value == "satposdepends" and int(testmode.connectedTo.value) == int(nim):
alreadyConnected = True
break
if not alreadyConnected:
positionerList.append(nim)
return positionerList
def getNimConfig(self, slotid):
return config.Nims[slotid]
def getSatName(self, pos):
for sat in self.satList:
if sat[0] == pos:
return sat[1]
return _("N/A")
def getSatList(self):
return self.satList
# returns True if something is configured to be connected to this nim
# if slotid == -1, returns if something is connected to ANY nim
def somethingConnected(self, slotid = -1):
if (slotid == -1):
connected = False
for id in range(self.getSlotCount()):
if self.somethingConnected(id):
connected = True
return connected
else:
nim = config.Nims[slotid]
configMode = nim.configMode.value
if self.nim_slots[slotid].isCompatible("DVB-S") or self.nim_slots[slotid].isCompatible("DVB-T") or self.nim_slots[slotid].isCompatible("DVB-C"):
return not (configMode == "nothing")
def getSatListForNim(self, slotid):
list = []
if self.nim_slots[slotid].isCompatible("DVB-S"):
nim = config.Nims[slotid]
#print "slotid:", slotid
#print "self.satellites:", self.satList[config.Nims[slotid].diseqcA.index]
#print "diseqcA:", config.Nims[slotid].diseqcA.value
configMode = nim.configMode.value
if configMode == "equal":
slotid = int(nim.connectedTo.value)
nim = config.Nims[slotid]
configMode = nim.configMode.value
elif configMode == "loopthrough":
slotid = self.sec.getRoot(slotid, int(nim.connectedTo.value))
nim = config.Nims[slotid]
configMode = nim.configMode.value
if configMode == "simple":
dm = nim.diseqcMode.value
if dm in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"):
if nim.diseqcA.orbital_position < 3600:
list.append(self.satList[nim.diseqcA.index - 2])
if dm in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"):
if nim.diseqcB.orbital_position < 3600:
list.append(self.satList[nim.diseqcB.index - 2])
if dm == "diseqc_a_b_c_d":
if nim.diseqcC.orbital_position < 3600:
list.append(self.satList[nim.diseqcC.index - 2])
if nim.diseqcD.orbital_position < 3600:
list.append(self.satList[nim.diseqcD.index - 2])
if dm == "positioner":
for x in self.satList:
list.append(x)
elif configMode == "advanced":
for x in range(3601, 3605):
if int(nim.advanced.sat[x].lnb.value) != 0:
for x in self.satList:
list.append(x)
if not list:
for x in self.satList:
if int(nim.advanced.sat[x[0]].lnb.value) != 0:
list.append(x)
return list
def getRotorSatListForNim(self, slotid):
list = []
if self.nim_slots[slotid].isCompatible("DVB-S"):
#print "slotid:", slotid
#print "self.satellites:", self.satList[config.Nims[slotid].diseqcA.value]
#print "diseqcA:", config.Nims[slotid].diseqcA.value
configMode = config.Nims[slotid].configMode.value
if configMode == "simple":
if config.Nims[slotid].diseqcMode.value == "positioner":
for x in self.satList:
list.append(x)
elif configMode == "advanced":
nim = config.Nims[slotid]
for x in range(3601, 3605):
if int(nim.advanced.sat[x].lnb.value) != 0:
for x in self.satList:
list.append(x)
if not list:
for x in self.satList:
lnbnum = int(nim.advanced.sat[x[0]].lnb.value)
if lnbnum != 0:
lnb = nim.advanced.lnb[lnbnum]
if lnb.diseqcMode.value == "1_2":
list.append(x)
return list
def InitSecParams():
config.sec = ConfigSubsection()
x = ConfigInteger(default=25, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_CONT_TONE_DISABLE_BEFORE_DISEQC, configElement.value))
config.sec.delay_after_continuous_tone_disable_before_diseqc = x
x = ConfigInteger(default=10, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_CONT_TONE_CHANGE, configElement.value))
config.sec.delay_after_final_continuous_tone_change = x
x = ConfigInteger(default=10, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_VOLTAGE_CHANGE, configElement.value))
config.sec.delay_after_final_voltage_change = x
x = ConfigInteger(default=120, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_DISEQC_REPEATS, configElement.value))
config.sec.delay_between_diseqc_repeats = x
x = ConfigInteger(default=50, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_LAST_DISEQC_CMD, configElement.value))
config.sec.delay_after_last_diseqc_command = x
x = ConfigInteger(default=50, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_TONEBURST, configElement.value))
config.sec.delay_after_toneburst = x
x = ConfigInteger(default=20, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_SWITCH_CMDS, configElement.value))
config.sec.delay_after_change_voltage_before_switch_command = x
x = ConfigInteger(default=200, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_SWITCH_CMDS, configElement.value))
config.sec.delay_after_enable_voltage_before_switch_command = x
x = ConfigInteger(default=700, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_SWITCH_AND_MOTOR_CMD, configElement.value))
config.sec.delay_between_switch_and_motor_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MEASURE_IDLE_INPUTPOWER, configElement.value))
config.sec.delay_after_voltage_change_before_measure_idle_inputpower = x
x = ConfigInteger(default=900, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_MOTOR_CMD, configElement.value))
config.sec.delay_after_enable_voltage_before_motor_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_MOTOR_STOP_CMD, configElement.value))
config.sec.delay_after_motor_stop_command = x
x = ConfigInteger(default=500, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MOTOR_CMD, configElement.value))
config.sec.delay_after_voltage_change_before_motor_command = x
x = ConfigInteger(default=70, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BEFORE_SEQUENCE_REPEAT, configElement.value))
config.sec.delay_before_sequence_repeat = x
x = ConfigInteger(default=360, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_RUNNING_TIMEOUT, configElement.value))
config.sec.motor_running_timeout = x
x = ConfigInteger(default=1, limits = (0, 5))
x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_COMMAND_RETRIES, configElement.value))
config.sec.motor_command_retries = x
x = ConfigInteger(default=50, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_RESET_CMD, configElement.value))
config.sec.delay_after_diseqc_reset_cmd = x
x = ConfigInteger(default=150, limits = (0, 9999))
x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_PERIPHERIAL_POWERON_CMD, configElement.value))
config.sec.delay_after_diseqc_peripherial_poweron_cmd = x
# TODO add support for satpos depending nims to advanced nim configuration
# so a second/third/fourth cable from a motorized lnb can used behind a
# diseqc 1.0 / diseqc 1.1 / toneburst switch
# the C(++) part should can handle this
# the configElement should be only visible when diseqc 1.2 is disabled
def InitNimManager(nimmgr):
hw = HardwareInfo()
addNimConfig = False
try:
config.Nims
except:
addNimConfig = True
if addNimConfig:
InitSecParams()
config.Nims = ConfigSubList()
for x in range(len(nimmgr.nim_slots)):
config.Nims.append(ConfigSubsection())
lnb_choices = {
"universal_lnb": _("Universal LNB"),
"unicable": _("Unicable"),
"c_band": _("C-Band"),
"user_defined": _("User defined")}
lnb_choices_default = "universal_lnb"
unicablelnbproducts = {}
unicablematrixproducts = {}
doc = xml.etree.cElementTree.parse(eEnv.resolve("${datadir}/enigma2/unicable.xml"))
root = doc.getroot()
entry = root.find("lnb")
for manufacturer in entry.getchildren():
m={}
for product in manufacturer.getchildren():
scr=[]
lscr=("scr1","scr2","scr3","scr4","scr5","scr6","scr7","scr8")
for i in range(len(lscr)):
scr.append(product.get(lscr[i],"0"))
for i in range(len(lscr)):
if scr[len(lscr)-i-1] == "0":
scr.pop()
else:
break;
lof=[]
lof.append(int(product.get("positions",1)))
lof.append(int(product.get("lofl",9750)))
lof.append(int(product.get("lofh",10600)))
lof.append(int(product.get("threshold",11700)))
scr.append(tuple(lof))
m.update({product.get("name"):tuple(scr)})
unicablelnbproducts.update({manufacturer.get("name"):m})
entry = root.find("matrix")
for manufacturer in entry.getchildren():
m={}
for product in manufacturer.getchildren():
scr=[]
lscr=("scr1","scr2","scr3","scr4","scr5","scr6","scr7","scr8")
for i in range(len(lscr)):
scr.append(product.get(lscr[i],"0"))
for i in range(len(lscr)):
if scr[len(lscr)-i-1] == "0":
scr.pop()
else:
break;
lof=[]
lof.append(int(product.get("positions",1)))
lof.append(int(product.get("lofl",9750)))
lof.append(int(product.get("lofh",10600)))
lof.append(int(product.get("threshold",11700)))
scr.append(tuple(lof))
m.update({product.get("name"):tuple(scr)})
unicablematrixproducts.update({manufacturer.get("name"):m})
UnicableLnbManufacturers = unicablelnbproducts.keys()
UnicableLnbManufacturers.sort()
UnicableMatrixManufacturers = unicablematrixproducts.keys()
UnicableMatrixManufacturers.sort()
unicable_choices = {
"unicable_lnb": _("Unicable LNB"),
"unicable_matrix": _("Unicable Martix"),
"unicable_user": "Unicable "+_("User defined")}
unicable_choices_default = "unicable_lnb"
advanced_lnb_satcruser_choices = [ ("1", "SatCR 1"), ("2", "SatCR 2"), ("3", "SatCR 3"), ("4", "SatCR 4"),
("5", "SatCR 5"), ("6", "SatCR 6"), ("7", "SatCR 7"), ("8", "SatCR 8")]
prio_list = [ ("-1", _("Auto")) ]
prio_list += [(str(prio), str(prio)) for prio in range(65)+range(14000,14065)+range(19000,19065)]
advanced_lnb_csw_choices = [("none", _("None")), ("AA", _("AA")), ("AB", _("AB")), ("BA", _("BA")), ("BB", _("BB"))]
advanced_lnb_csw_choices += [(str(0xF0|y), "Input " + str(y+1)) for y in range(0, 16)]
advanced_lnb_ucsw_choices = [("0", _("None"))] + [(str(y), "Input " + str(y)) for y in range(1, 17)]
diseqc_mode_choices = [
("single", _("Single")), ("toneburst_a_b", _("Toneburst A/B")),
("diseqc_a_b", "DiSEqC A/B"), ("diseqc_a_b_c_d", "DiSEqC A/B/C/D"),
("positioner", _("Positioner"))]
positioner_mode_choices = [("usals", _("USALS")), ("manual", _("manual"))]
diseqc_satlist_choices = [(3600, _('automatic'), 1), (3601, _('nothing connected'), 1)] + nimmgr.satList
longitude_orientation_choices = [("east", _("East")), ("west", _("West"))]
latitude_orientation_choices = [("north", _("North")), ("south", _("South"))]
turning_speed_choices = [("fast", _("Fast")), ("slow", _("Slow")), ("fast epoch", _("Fast epoch"))]
advanced_satlist_choices = nimmgr.satList + [
(3601, _('All satellites')+' 1', 1), (3602, _('All satellites')+' 2', 1),
(3603, _('All satellites')+' 3', 1), (3604, _('All satellites')+' 4', 1)]
advanced_lnb_choices = [("0", "not available")] + [(str(y), "LNB " + str(y)) for y in range(1, 33)]
advanced_voltage_choices = [("polarization", _("Polarization")), ("13V", _("13 V")), ("18V", _("18 V"))]
advanced_tonemode_choices = [("band", _("Band")), ("on", _("On")), ("off", _("Off"))]
advanced_lnb_toneburst_choices = [("none", _("None")), ("A", _("A")), ("B", _("B"))]
advanced_lnb_allsat_diseqcmode_choices = [("1_2", _("1.2"))]
advanced_lnb_diseqcmode_choices = [("none", _("None")), ("1_0", _("1.0")), ("1_1", _("1.1")), ("1_2", _("1.2"))]
advanced_lnb_commandOrder1_0_choices = [("ct", "committed, toneburst"), ("tc", "toneburst, committed")]
advanced_lnb_commandOrder_choices = [
("ct", "committed, toneburst"), ("tc", "toneburst, committed"),
("cut", "committed, uncommitted, toneburst"), ("tcu", "toneburst, committed, uncommitted"),
("uct", "uncommitted, committed, toneburst"), ("tuc", "toneburst, uncommitted, commmitted")]
advanced_lnb_diseqc_repeat_choices = [("none", _("None")), ("one", _("One")), ("two", _("Two")), ("three", _("Three"))]
advanced_lnb_fast_turning_btime = mktime(datetime(1970, 1, 1, 7, 0).timetuple());
advanced_lnb_fast_turning_etime = mktime(datetime(1970, 1, 1, 19, 0).timetuple());
def configLOFChanged(configElement):
if configElement.value == "unicable":
x = configElement.slot_id
lnb = configElement.lnb_id
nim = config.Nims[x]
lnbs = nim.advanced.lnb
section = lnbs[lnb]
if isinstance(section.unicable, ConfigNothing):
if lnb == 1:
section.unicable = ConfigSelection(unicable_choices, unicable_choices_default)
elif lnb == 2:
section.unicable = ConfigSelection(choices = {"unicable_matrix": _("Unicable Martix"),"unicable_user": "Unicable "+_("User defined")}, default = "unicable_matrix")
else:
section.unicable = ConfigSelection(choices = {"unicable_user": _("User defined")}, default = "unicable_user")
def fillUnicableConf(sectionDict, unicableproducts, vco_null_check):
for y in unicableproducts:
products = unicableproducts[y].keys()
products.sort()
tmp = ConfigSubsection()
tmp.product = ConfigSelection(choices = products, default = products[0])
tmp.scr = ConfigSubDict()
tmp.vco = ConfigSubDict()
tmp.lofl = ConfigSubDict()
tmp.lofh = ConfigSubDict()
tmp.loft = ConfigSubDict()
tmp.positions = ConfigSubDict()
for z in products:
scrlist = []
vcolist = unicableproducts[y][z]
tmp.vco[z] = ConfigSubList()
for cnt in range(1,1+len(vcolist)-1):
vcofreq = int(vcolist[cnt-1])
if vcofreq == 0 and vco_null_check:
scrlist.append(("%d" %cnt,"SCR %d " %cnt +_("not used")))
else:
scrlist.append(("%d" %cnt,"SCR %d" %cnt))
tmp.vco[z].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq)))
tmp.scr[z] = ConfigSelection(choices = scrlist, default = scrlist[0][0])
positions = int(vcolist[len(vcolist)-1][0])
tmp.positions[z] = ConfigSubList()
tmp.positions[z].append(ConfigInteger(default=positions, limits = (positions, positions)))
lofl = vcolist[len(vcolist)-1][1]
tmp.lofl[z] = ConfigSubList()
tmp.lofl[z].append(ConfigInteger(default=lofl, limits = (lofl, lofl)))
lofh = int(vcolist[len(vcolist)-1][2])
tmp.lofh[z] = ConfigSubList()
tmp.lofh[z].append(ConfigInteger(default=lofh, limits = (lofh, lofh)))
loft = int(vcolist[len(vcolist)-1][3])
tmp.loft[z] = ConfigSubList()
tmp.loft[z].append(ConfigInteger(default=loft, limits = (loft, loft)))
sectionDict[y] = tmp
if lnb < 3:
print "MATRIX"
section.unicableMatrix = ConfigSubDict()
section.unicableMatrixManufacturer = ConfigSelection(UnicableMatrixManufacturers, UnicableMatrixManufacturers[0])
fillUnicableConf(section.unicableMatrix, unicablematrixproducts, True)
if lnb < 2:
print "LNB"
section.unicableLnb = ConfigSubDict()
section.unicableLnbManufacturer = ConfigSelection(UnicableLnbManufacturers, UnicableLnbManufacturers[0])
fillUnicableConf(section.unicableLnb, unicablelnbproducts, False)
#TODO satpositions for satcruser
section.satcruser = ConfigSelection(advanced_lnb_satcruser_choices, default="1")
tmp = ConfigSubList()
tmp.append(ConfigInteger(default=1284, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1400, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1516, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1632, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1748, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1864, limits = (950, 2150)))
tmp.append(ConfigInteger(default=1980, limits = (950, 2150)))
tmp.append(ConfigInteger(default=2096, limits = (950, 2150)))
section.satcrvcouser = tmp
nim.advanced.unicableconnected = ConfigYesNo(default=False)
nim.advanced.unicableconnectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x])
def configDiSEqCModeChanged(configElement):
section = configElement.section
if configElement.value == "1_2" and isinstance(section.longitude, ConfigNothing):
#section.longitude = ConfigFloat(default = [5,100], limits = [(0,359),(0,999)])
section.longitude = ConfigFloat(default = [0,000], limits = [(0,359),(0,999)]) # [iq]
section.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east")
#section.latitude = ConfigFloat(default = [50,767], limits = [(0,359),(0,999)])
section.latitude = ConfigFloat(default = [51,500], limits = [(0,359),(0,999)]) # [iq]
section.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north")
section.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)])
section.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)])
section.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)])
section.powerMeasurement = ConfigYesNo(default=True)
section.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm7025" and 50 or 15, limits=(0, 100))
section.turningSpeed = ConfigSelection(turning_speed_choices, "fast")
section.fastTurningBegin = ConfigDateTime(default=advanced_lnb_fast_turning_btime, formatstring = _("%H:%M"), increment = 600)
section.fastTurningEnd = ConfigDateTime(default=advanced_lnb_fast_turning_etime, formatstring = _("%H:%M"), increment = 600)
def configLNBChanged(configElement):
x = configElement.slot_id
nim = config.Nims[x]
if isinstance(configElement.value, tuple):
lnb = int(configElement.value[0])
else:
lnb = int(configElement.value)
lnbs = nim.advanced.lnb
if lnb and lnb not in lnbs:
section = lnbs[lnb] = ConfigSubsection()
section.lofl = ConfigInteger(default=9750, limits = (0, 99999))
section.lofh = ConfigInteger(default=10600, limits = (0, 99999))
section.threshold = ConfigInteger(default=11700, limits = (0, 99999))
# section.output_12v = ConfigSelection(choices = [("0V", _("0 V")), ("12V", _("12 V"))], default="0V")
section.increased_voltage = ConfigYesNo(False)
section.toneburst = ConfigSelection(advanced_lnb_toneburst_choices, "none")
section.longitude = ConfigNothing()
if lnb > 32:
tmp = ConfigSelection(advanced_lnb_allsat_diseqcmode_choices, "1_2")
tmp.section = section
configDiSEqCModeChanged(tmp)
else:
tmp = ConfigSelection(advanced_lnb_diseqcmode_choices, "none")
tmp.section = section
tmp.addNotifier(configDiSEqCModeChanged)
section.diseqcMode = tmp
section.commitedDiseqcCommand = ConfigSelection(advanced_lnb_csw_choices)
section.fastDiseqc = ConfigYesNo(False)
section.sequenceRepeat = ConfigYesNo(False)
section.commandOrder1_0 = ConfigSelection(advanced_lnb_commandOrder1_0_choices, "ct")
section.commandOrder = ConfigSelection(advanced_lnb_commandOrder_choices, "ct")
section.uncommittedDiseqcCommand = ConfigSelection(advanced_lnb_ucsw_choices)
section.diseqcRepeats = ConfigSelection(advanced_lnb_diseqc_repeat_choices, "none")
section.prio = ConfigSelection(prio_list, "-1")
section.unicable = ConfigNothing()
tmp = ConfigSelection(lnb_choices, lnb_choices_default)
tmp.slot_id = x
tmp.lnb_id = lnb
tmp.addNotifier(configLOFChanged, initial_call = False)
section.lof = tmp
def configModeChanged(configMode):
slot_id = configMode.slot_id
nim = config.Nims[slot_id]
if configMode.value == "advanced" and isinstance(nim.advanced, ConfigNothing):
# advanced config:
nim.advanced = ConfigSubsection()
nim.advanced.sat = ConfigSubDict()
nim.advanced.sats = getConfigSatlist(192, advanced_satlist_choices)
nim.advanced.lnb = ConfigSubDict()
nim.advanced.lnb[0] = ConfigNothing()
for x in nimmgr.satList:
tmp = ConfigSubsection()
tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization")
tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band")
tmp.usals = ConfigYesNo(True)
tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255))
lnb = ConfigSelection(advanced_lnb_choices, "0")
lnb.slot_id = slot_id
lnb.addNotifier(configLNBChanged, initial_call = False)
tmp.lnb = lnb
nim.advanced.sat[x[0]] = tmp
for x in range(3601, 3605):
tmp = ConfigSubsection()
tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization")
tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band")
tmp.usals = ConfigYesNo(default=True)
tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255))
lnbnum = 33+x-3601
lnb = ConfigSelection([("0", "not available"), (str(lnbnum), "LNB %d"%(lnbnum))], "0")
lnb.slot_id = slot_id
lnb.addNotifier(configLNBChanged, initial_call = False)
tmp.lnb = lnb
nim.advanced.sat[x] = tmp
def toneAmplitudeChanged(configElement):
fe_id = configElement.fe_id
slot_id = configElement.slot_id
if nimmgr.nim_slots[slot_id].description == 'Alps BSBE2':
open("/proc/stb/frontend/%d/tone_amplitude" %(fe_id), "w").write(configElement.value)
def createSatConfig(nim, x, empty_slots):
try:
nim.toneAmplitude
except:
nim.toneAmplitude = ConfigSelection([("11", "340mV"), ("10", "360mV"), ("9", "600mV"), ("8", "700mV"), ("7", "800mV"), ("6", "900mV"), ("5", "1100mV")], "7")
nim.toneAmplitude.fe_id = x - empty_slots
nim.toneAmplitude.slot_id = x
nim.toneAmplitude.addNotifier(toneAmplitudeChanged)
nim.diseqc13V = ConfigYesNo(False)
nim.diseqcMode = ConfigSelection(diseqc_mode_choices, "diseqc_a_b")
nim.connectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x])
nim.simpleSingleSendDiSEqC = ConfigYesNo(False)
nim.simpleDiSEqCSetVoltageTone = ConfigYesNo(True)
nim.simpleDiSEqCOnlyOnSatChange = ConfigYesNo(False)
nim.diseqcA = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcB = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcC = ConfigSatlist(list = diseqc_satlist_choices)
nim.diseqcD = ConfigSatlist(list = diseqc_satlist_choices)
nim.positionerMode = ConfigSelection(positioner_mode_choices, "usals")
#nim.longitude = ConfigFloat(default=[5,100], limits=[(0,359),(0,999)])
nim.longitude = ConfigFloat(default=[0,000], limits=[(0,359),(0,999)]) # [iq]
nim.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east")
#nim.latitude = ConfigFloat(default=[50,767], limits=[(0,359),(0,999)])
nim.latitude = ConfigFloat(default=[51,500], limits=[(0,359),(0,999)]) # [iq]
nim.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north")
nim.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)])
nim.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)])
nim.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)])
nim.powerMeasurement = ConfigYesNo(True)
nim.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm8000" and 15 or 50, limits=(0, 100))
nim.turningSpeed = ConfigSelection(turning_speed_choices, "fast")
btime = datetime(1970, 1, 1, 7, 0);
nim.fastTurningBegin = ConfigDateTime(default = mktime(btime.timetuple()), formatstring = _("%H:%M"), increment = 900)
etime = datetime(1970, 1, 1, 19, 0);
nim.fastTurningEnd = ConfigDateTime(default = mktime(etime.timetuple()), formatstring = _("%H:%M"), increment = 900)
def createCableConfig(nim, x):
try:
nim.cable
except:
list = [ ]
n = 0
for x in nimmgr.cablesList:
list.append((str(n), x[0]))
n += 1
nim.cable = ConfigSubsection()
nim.cable.scan_networkid = ConfigInteger(default = 0, limits = (0, 9999))
possible_scan_types = [("bands", _("Frequency bands")), ("steps", _("Frequency steps"))]
if n:
possible_scan_types.append(("provider", _("Provider")))
nim.cable.scan_provider = ConfigSelection(default = "0", choices = list)
nim.cable.scan_type = ConfigSelection(default = "bands", choices = possible_scan_types)
nim.cable.scan_band_EU_VHF_I = ConfigYesNo(default = True)
nim.cable.scan_band_EU_MID = ConfigYesNo(default = True)
nim.cable.scan_band_EU_VHF_III = ConfigYesNo(default = True)
nim.cable.scan_band_EU_UHF_IV = ConfigYesNo(default = True)
nim.cable.scan_band_EU_UHF_V = ConfigYesNo(default = True)
nim.cable.scan_band_EU_SUPER = ConfigYesNo(default = True)
nim.cable.scan_band_EU_HYPER = ConfigYesNo(default = True)
nim.cable.scan_band_US_LOW = ConfigYesNo(default = False)
nim.cable.scan_band_US_MID = ConfigYesNo(default = False)
nim.cable.scan_band_US_HIGH = ConfigYesNo(default = False)
nim.cable.scan_band_US_SUPER = ConfigYesNo(default = False)
nim.cable.scan_band_US_HYPER = ConfigYesNo(default = False)
nim.cable.scan_frequency_steps = ConfigInteger(default = 1000, limits = (1000, 10000))
nim.cable.scan_mod_qam16 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam32 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam64 = ConfigYesNo(default = True)
nim.cable.scan_mod_qam128 = ConfigYesNo(default = False)
nim.cable.scan_mod_qam256 = ConfigYesNo(default = True)
nim.cable.scan_sr_6900 = ConfigYesNo(default = True)
nim.cable.scan_sr_6875 = ConfigYesNo(default = True)
nim.cable.scan_sr_ext1 = ConfigInteger(default = 0, limits = (0, 7230))
nim.cable.scan_sr_ext2 = ConfigInteger(default = 0, limits = (0, 7230))
def createTerrestrialConfig(nim, x):
try:
nim.terrestrial
except:
list = []
n = 0
for x in nimmgr.terrestrialsList:
list.append((str(n), x[0]))
n += 1
nim.terrestrial = ConfigSelection(choices = list)
nim.terrestrial_5V = ConfigOnOff()
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
if slot.isCompatible("DVB-S"):
createSatConfig(nim, x, empty_slots)
config_mode_choices = [("nothing", _("nothing connected")),
("simple", _("simple")), ("advanced", _("advanced"))]
if len(nimmgr.getNimListOfType(slot.type, exception = x)) > 0:
config_mode_choices.append(("equal", _("equal to")))
config_mode_choices.append(("satposdepends", _("second cable of motorized LNB")))
if len(nimmgr.canConnectTo(x)) > 0:
config_mode_choices.append(("loopthrough", _("loopthrough to")))
nim.advanced = ConfigNothing()
tmp = ConfigSelection(config_mode_choices, "simple")
tmp.slot_id = x
tmp.addNotifier(configModeChanged, initial_call = False)
nim.configMode = tmp
elif slot.isCompatible("DVB-C"):
nim.configMode = ConfigSelection(
choices = {
"enabled": _("enabled"),
"nothing": _("nothing connected"),
},
default = "enabled")
createCableConfig(nim, x)
elif slot.isCompatible("DVB-T"):
nim.configMode = ConfigSelection(
choices = {
"enabled": _("enabled"),
"nothing": _("nothing connected"),
},
default = "enabled")
createTerrestrialConfig(nim, x)
else:
empty_slots += 1
nim.configMode = ConfigSelection(choices = { "nothing": _("disabled") }, default="nothing");
if slot.type is not None:
print "pls add support for this frontend type!", slot.type
# assert False
nimmgr.sec = SecConfigure(nimmgr)
def tunerTypeChanged(nimmgr, configElement):
fe_id = configElement.fe_id
eDVBResourceManager.getInstance().setFrontendType(nimmgr.nim_slots[fe_id].frontend_id, nimmgr.nim_slots[fe_id].getType())
import os
if os.path.exists("/proc/stb/frontend/%d/mode" % fe_id):
cur_type = int(open("/proc/stb/frontend/%d/mode" % (fe_id), "r").read())
if cur_type != int(configElement.value):
print "tunerTypeChanged feid %d from %d to mode %d" % (fe_id, cur_type, int(configElement.value))
try:
oldvalue = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "r").readline()
open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w").write("0")
except:
print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available"
frontend = eDVBResourceManager.getInstance().allocateRawChannel(fe_id).getFrontend()
frontend.closeFrontend()
open("/proc/stb/frontend/%d/mode" % (fe_id), "w").write(configElement.value)
frontend.reopenFrontend()
try:
open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w").write(oldvalue)
except:
print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available"
nimmgr.enumerateNIMs()
else:
print "tuner type is already already %d" %cur_type
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
addMultiType = False
try:
nim.multiType
except:
addMultiType = True
if slot.isMultiType() and addMultiType:
typeList = []
for id in slot.getMultiTypeList().keys():
type = slot.getMultiTypeList()[id]
typeList.append((id, type))
nim.multiType = ConfigSelection(typeList, "0")
nim.multiType.fe_id = x - empty_slots
nim.multiType.addNotifier(boundFunction(tunerTypeChanged, nimmgr))
empty_slots = 0
for slot in nimmgr.nim_slots:
x = slot.slot
nim = config.Nims[x]
empty = True
if slot.canBeCompatible("DVB-S"):
createSatConfig(nim, x, empty_slots)
empty = False
if slot.canBeCompatible("DVB-C"):
createCableConfig(nim, x)
empty = False
if slot.canBeCompatible("DVB-T"):
createTerrestrialConfig(nim, x)
empty = False
if empty:
empty_slots += 1
nimmanager = NimManager()<|fim▁end|>
|
udc = int(currLnb.uncommittedDiseqcCommand.value)
if udc > 0:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.