prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>projectstatus_test.go<|end_file_name|><|fim▁begin|>package describe
import (
"strings"
"testing"
"time"
kapi "github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/errors"
ktestclient "github.com/GoogleCloudPlatform/kubernetes/pkg/client/testclient"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
"github.com/openshift/origin/pkg/client/testclient"
projectapi "github.com/openshift/origin/pkg/project/api"
)
func mustParseTime(t string) time.Time {
out, err := time.Parse(time.RFC3339, t)
if err != nil {
panic(err)
}
return out
}
func TestProjectStatus(t *testing.T) {
testCases := map[string]struct {
Path string
Extra []runtime.Object
ErrFn func(error) bool
Contains []string
Time time.Time
}{
"missing project": {
ErrFn: func(err error) bool { return errors.IsNotFound(err) },
},
"empty project with display name": {
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{
Name: "example",
Namespace: "",
Annotations: map[string]string{
"displayName": "Test",
},
},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project Test (example)\n",
"You have no Services, DeploymentConfigs, or BuildConfigs.",
},
},
"empty service": {
Path: "../../../../test/fixtures/app-scenarios/k8s-service-with-nothing.json",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service empty-service",
"(<initializing>:5432", "To see more information",
},
},
"unstarted build": {
Path: "../../../../test/fixtures/app-scenarios/new-project-no-build.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-example-2 (172.30.17.48:8080)",
"builds git://github.com",
"with docker.io/openshift/ruby-20-centos7:latest",
"not built yet",
"#1 deployment waiting on image or update",
"To see more information",
},
},
"running build": {
Path: "../../../../test/fixtures/app-scenarios/new-project-one-build.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-example-1 (172.30.17.47:8080)",
"builds git://github.com",
"with docker.io/openshift/ruby-20-centos7:latest",
"build 1 running for about a minute",
"#1 deployment waiting on image or update",
"To see more information",
},
Time: mustParseTime("2015-04-06T21:20:03Z"),
},
"a/b test DeploymentConfig": {
Path: "../../../../test/fixtures/app-scenarios/new-project-two-deployment-configs.yaml",
Extra: []runtime.Object{<|fim▁hole|> ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service sinatra-app-example (172.30.17.49:8080)",
"sinatra-app-example-a deploys",
"sinatra-app-example-b deploys",
"with docker.io/openshift/ruby-20-centos7:latest",
"build 1 running for about a minute",
"- 7a4f354: Prepare v1beta3 Template types (Roy Programmer <[email protected]>)",
"To see more information",
},
Time: mustParseTime("2015-04-06T21:20:03Z"),
},
"with real deployments": {
Path: "../../../../test/fixtures/app-scenarios/new-project-deployed-app.yaml",
Extra: []runtime.Object{
&projectapi.Project{
ObjectMeta: kapi.ObjectMeta{Name: "example", Namespace: ""},
},
},
ErrFn: func(err error) bool { return err == nil },
Contains: []string{
"In project example\n",
"service database (172.30.17.240:5434 -> 3306)",
"service frontend (172.30.17.154:5432 -> 8080)",
"database deploys",
"frontend deploys",
"with docker.io/openshift/ruby-20-centos7:latest",
"#2 deployment failed less than a second ago: unable to contact server - 0/1 pods",
"#2 deployment running for 7 seconds - 2/1 pods",
"#1 deployed 8 seconds ago",
"#1 deployed less than a second ago",
"To see more information",
},
Time: mustParseTime("2015-04-07T04:12:25Z"),
},
}
oldTimeFn := timeNowFn
defer func() { timeNowFn = oldTimeFn }()
for k, test := range testCases {
timeNowFn = func() time.Time {
if !test.Time.IsZero() {
return test.Time
}
return time.Now()
}
o := ktestclient.NewObjects(kapi.Scheme, kapi.Scheme)
if len(test.Path) > 0 {
if err := ktestclient.AddObjectsFromPath(test.Path, o, kapi.Scheme); err != nil {
t.Fatal(err)
}
}
for _, obj := range test.Extra {
o.Add(obj)
}
osc, kc := testclient.NewFixtureClients(o)
d := ProjectStatusDescriber{C: osc, K: kc}
out, err := d.Describe("example", "")
if !test.ErrFn(err) {
t.Errorf("%s: unexpected error: %v", k, err)
}
if err != nil {
continue
}
for _, s := range test.Contains {
if !strings.Contains(out, s) {
t.Errorf("%s: did not have %q:\n%s\n---", k, s, out)
}
}
t.Logf("\n%s", out)
}
}<|fim▁end|>
|
&projectapi.Project{
|
<|file_name|>mpEvidenceQry.py<|end_file_name|><|fim▁begin|># Copyright 2016-2017 University of Pittsburgh
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http:www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, uuid, datetime
from sets import Set
from model.micropublication import Annotation, DataMaterialRow, DMItem, DataRatioItem, MaterialDoseItem, MaterialParticipants, MaterialPhenotypeItem, DataReviewer, DataDips
######################### QUERY MP Annotation ##########################
# query all mp annotations
# return annotations with claim, data and material
def queryAllMpAnnotation(conn):
mpAnnotations = []
claimAnns = queryAllMpClaim(conn)
<|fim▁hole|>
mpAnnotations.append(claimDataMatAnno)
return mpAnnotations
# query all mp annotations
# return annotations with claim, data and material
def queryMpAnnotationByUrn(conn, annotationUrn):
claimAnn = queryMpClaimByUrn(conn, annotationUrn)
claimDataAnn = queryMpData(conn, claimAnn, claimAnn.claimid)
claimDataMatAnn = queryMpMaterial(conn, claimDataAnn, claimAnn.claimid)
return claimDataMatAnn
######################### QUERY MP Claim ##########################
## query all claim annotation by document URL
## return {{key: id-1, value: Ann-1"}, {key: id-2, value: Ann-2"}, ...}
def queryAllMpClaim(conn):
annotations = {} # key: id, value obj Annotation
cur = conn.cursor()
qry = """
select cann.id, t.has_source, cann.creator, cann.date_created, s.exact, s.prefix, s.suffix, cbody.label, qualifierrole(q.subject, q.predicate, q.object) as qtype, qvalue, cann.rejected_statement, cann.rejected_statement_reason, cann.rejected_statement_comment, met.entered_value, cann.negation, q.enantiomer, q.metabolite
from mp_claim_annotation cann join oa_claim_body cbody on cann.has_body = cbody.id
join qualifier q on cbody.id = q.claim_body_id
join method met on cann.id = met.mp_claim_id
join oa_target t on cann.has_target = t.id
join oa_selector s on t.has_selector = s.id;
"""
cur.execute(qry)
for row in cur.fetchall():
id = row[0]
if id not in annotations: ## Using existing annotation if it's available
annotation = Annotation()
annotations[id] = annotation
else:
annotation = annotations[id]
drugPC = "" ## define parent compound string
if row[15] and not row[16]:
drugPC = "enantiomer|"
elif row[16] and not row[15]:
drugPC = "|metabolite"
elif row[15] and row[16]:
drugPC = "enantiomer|metabolite"
## claim qualifiers
if row[8] == "subject":
annotation.csubject = row[9]
annotation.setSubjectPC(drugPC) # parent compound for subject
elif row[8] == "predicate":
annotation.cpredicate = row[9]
elif row[8] == "object":
annotation.cobject = row[9]
annotation.setObjectPC(drugPC) # parent compound for object
elif row[8] == "qualifer":
annotation.qualifier = row[9]
annotation.setQualifierPC(drugPC) # parent compound for qualifier
else:
print "[ERROR] qualifier role unidentified qvalue: %s (claimid %s)" % (row[8], id)
## claim source and label
if annotation.source == None:
annotation.source = row[1]
if annotation.label == None:
annotation.label = row[7]
## claim text selector
if annotation.exact == None:
annotation.setOaSelector(row[5], row[4], row[6])
## user entered method
if annotation.method == None:
annotation.method = row[13]
## rejected reason
if annotation.rejected == None and row[10] == True:
annotation.rejected = row[11] + "|" + row[12]
## assertion negation
if annotation.negation == None and row[14] != None:
annotation.negation = row[14]
return annotations
def queryMpClaimByUrn(conn, urn):
"""
query claim annotation by annotationId
return Annotation
"""
cur = conn.cursor()
qry = """
select cann.id, t.has_source, cann.creator, cann.date_created, s.exact, s.prefix, s.suffix, cbody.label, qualifierrole(q.subject, q.predicate, q.object) as qtype, qvalue, cann.rejected_statement, cann.rejected_statement_reason, cann.rejected_statement_comment, met.entered_value, cann.negation, q.enantiomer, q.metabolite
from mp_claim_annotation cann join oa_claim_body cbody on cann.has_body = cbody.id
join qualifier q on cbody.id = q.claim_body_id
join method met on cann.id = met.mp_claim_id
join oa_target t on cann.has_target = t.id
join oa_selector s on t.has_selector = s.id
where cann.urn = '%s'; """ % (urn)
cur.execute(qry)
annotation = Annotation()
for row in cur.fetchall():
annotation.claimid = row[0]
annotation.urn = urn
drugPC = "" ## define parent compound string
if row[15] and not row[16]:
drugPC = "enantiomer|"
elif row[16] and not row[15]:
drugPC = "|metabolite"
elif row[15] and row[16]:
drugPC = "enantiomer|metabolite"
## claim qualifiers
if row[8] == "subject":
annotation.csubject = row[9]
annotation.setSubjectPC(drugPC) # parent compound for subject
elif row[8] == "predicate":
annotation.cpredicate = row[9]
elif row[8] == "object":
annotation.cobject = row[9]
annotation.setObjectPC(drugPC) # parent compound for object
elif row[8] == "qualifer":
annotation.qualifier = row[9]
annotation.setQualifierPC(drugPC) # parent compound for qualifier
else:
print "[ERROR] qualifier role unidentified qvalue: %s (claimid %s)" % (row[8], annotation.claimid)
## claim source and label
if annotation.source == None:
annotation.source = row[1]
if annotation.label == None:
annotation.label = row[7]
## claim text selector
if annotation.exact == None:
annotation.setOaSelector(row[5], row[4], row[6])
## rejected reason
if annotation.rejected == None and row[10] == True:
annotation.rejected = row[11] + "|" + row[12]
## user entered method
if annotation.method == None:
annotation.method = row[13]
## assertion negation
if annotation.negation == None and row[14] != None:
annotation.negation = row[14]
return annotation
######################### QUERY MP Data ##########################
# query data items for claim annotation
# return list of annotation with data items attached
def queryMpData(conn, annotation, claimid):
qry = """
select dann.type, df.data_field_type, df.value_as_string, df.value_as_number, s.exact, s.prefix, s.suffix, dann.mp_data_index, dann.ev_supports, dann.rejected, dann.rejected_reason, dann.rejected_comment, met.entered_value, met.inferred_value, eq.question, eq.value_as_string
from mp_data_annotation dann
join oa_data_body dbody on dann.has_body = dbody.id
join data_field df on df.data_body_id = dbody.id
left join oa_target t on dann.has_target = t.id
left join oa_selector s on t.has_selector = s.id
join method met on dann.mp_claim_id = met.mp_claim_id and met.mp_data_index = dann.mp_data_index
left join evidence_question eq on met.id = eq.method_id
where dann.mp_claim_id = %s
""" % (claimid)
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
dType = row[0] # data type
dfType = row[1] # data field
exact = row[4]; value = str(row[2] or row[3]) # value as string or number
index = row[7] # data index
evRelationship = row[8] # EV supports or refutes
dmRow = None
if annotation.getSpecificDataMaterial(index) == None:
dmRow = DataMaterialRow() # create new row of data & material
annotation.setSpecificDataMaterial(dmRow, index)
else: # current row of data & material exists
dmRow = annotation.getSpecificDataMaterial(index)
if dType in ["auc", "cmax" , "clearance", "halflife"]:
if dmRow.getDataRatioItemInRow(dType): # DataRatioItem exists
dataRatioItem = dmRow.getDataRatioItemInRow(dType)
else: # create new dataRatioItem
dataRatioItem = DataRatioItem(dType)
dataRatioItem.setSelector("", exact, "")
dataRatioItem.setAttribute(dfType, value) # add value
dmRow.setDataRatioItem(dataRatioItem)
if dType == "reviewer":
if dmRow.getDataReviewer(): # DataReviewer exists
dataReviewer = dmRow.getDataReviewer()
else:
dataReviewer = DataReviewer()
dataReviewer.setAttribute(dfType, value)
dmRow.setDataReviewer(dataReviewer)
if dType == "dipsquestion": # DataDips exists
if dmRow.getDataDips():
dips = dmRow.getDataDips()
else:
dips = DataDips()
dips.setQuestion(dfType, value)
dmRow.setDataDips(dips)
if not dmRow.getEvRelationship(): # add evidence relationship to dmRow
if evRelationship is True:
dmRow.setEvRelationship("supports")
elif evRelationship is False:
dmRow.setEvRelationship("refutes")
evqs = row[14]; evqsVal = row[15] # add evidence type questions
if evqs and evqsVal:
if evqs == "grouprandom" and not dmRow.getGroupRandom():
dmRow.setGroupRandom(evqsVal)
elif evqs == "parallelgroup" and not dmRow.getParallelGroup():
dmRow.setParallelGroup(evqsVal)
return annotation
######################### QUERY MP Material ##########################
# query material items for claim annotation
# return list of MaterialItems
def queryMpMaterial(conn, annotation, claimid):
qry = """
select mann.type, mf.material_field_type, mf.value_as_string, mf.value_as_number, s.exact, s.prefix, s.suffix, mann.mp_data_index, mann.ev_supports
from mp_material_annotation mann join oa_material_body mbody on mann.has_body = mbody.id
join material_field mf on mf.material_body_id = mbody.id
left join oa_target t on mann.has_target = t.id
left join oa_selector s on t.has_selector = s.id
where mann.mp_claim_id = %s
""" % (claimid)
results = []
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
mType = row[0] # material type
mfType = row[1] # material field
exact = row[4]; value = str(row[2] or row[3]) # value as string or number
index = row[7] # data & material index
evRelationship = row[8] # EV supports or refutes
if annotation.getSpecificDataMaterial(index) == None:
dmRow = DataMaterialRow() # create new row of data & material
if evRelationship:
dmRow.setEvRelationship("supports")
else:
dmRow.setEvRelationship("refutes")
if mType in ["object_dose","subject_dose"]: # dose
doseItem = MaterialDoseItem(mType)
doseItem.setAttribute(mfType, value)
doseItem.setSelector("", exact, "")
dmRow.setMaterialDoseItem(doseItem)
elif mType == "participants":
partItem = MaterialParticipants(value)
partItem.setSelector("", exact, "")
dmRow.setParticipants(partItem)
elif mType == "phenotype":
phenoItem = MaterialPhenotypeItem()
phenoItem.setAttribute(mfType, value)
dmRow.setPhenotype(phenoItem)
annotation.setSpecificDataMaterial(dmRow, index)
else: # current row of material & material exists
dmRow = annotation.getSpecificDataMaterial(index)
if dmRow.getEvRelationship() == None and evRelationship is True:
dmRow.setEvRelationship("supports")
elif dmRow.getEvRelationship() == None and evRelationship is False:
dmRow.setEvRelationship("refutes")
if mType in ["object_dose","subject_dose"]:
if dmRow.getMaterialDoseInRow(mType): # current MaterialItem exists
doseItem = dmRow.getMaterialDoseInRow(mType)
else:
doseItem = MaterialDoseItem(mType)
doseItem.setAttribute(mfType, value)
doseItem.setSelector("", exact, "")
dmRow.setMaterialDoseItem(doseItem)
elif mType == "participants":
if dmRow.getParticipantsInRow(): # participants exists
partItem = dmRow.getParticipantsInRow()
partItem.setValue(value)
else:
partItem = MaterialParticipants(value)
dmRow.setParticipants(partItem)
partItem.setSelector("", exact, "")
elif mType == "phenotype":
if dmRow.getPhenotype():
phenoItem = dmRow.getPhenotype()
else:
phenoItem = MaterialPhenotypeItem()
phenoItem.setAttribute(mfType, value)
dmRow.setPhenotype(phenoItem)
return annotation
######################### QUERY Highlight Annotaiton ##########################
# query all highlight annotation
# return dict for drug set in document dict {"doc url": "drug set"}
def queryHighlightAnns(conn):
highlightD = {}
qry = """SELECT h.id, t.has_source, s.exact
FROM highlight_annotation h, oa_target t, oa_selector s
WHERE h.has_target = t.id
AND t.has_selector = s.id;"""
cur = conn.cursor()
cur.execute(qry)
for row in cur.fetchall():
source = row[1]; drugname = row[2]
if source in highlightD:
highlightD[source].add(drugname)
else:
highlightD[source] = Set([drugname])
return highlightD<|fim▁end|>
|
for claimId,claimAnn in claimAnns.items():
claimDataAnno = queryMpData(conn, claimAnn, claimId)
claimDataMatAnno = queryMpMaterial(conn, claimDataAnno, claimId)
|
<|file_name|>hive_prep.py<|end_file_name|><|fim▁begin|>from mrjob.job import MRJob
from mrjob.step import MRStep
def get_id_from_line(line):
if line.find('.","Message-ID: <') > 0:
start = line.find("Message-ID")+13
i=0
for char in line[start:]:
i=i+1
if (not (char.isdigit() or (char == '.'))):
stop = i+start-2
break
return line[start:stop]
class MRMultilineInput(MRJob):
def steps(self):
return [
MRStep(mapper_init=self.mapper_init_count,
mapper=self.mapper_count),
MRStep(mapper=self.mapper_child)
# STEP 1
def mapper_init_count(self):
self.message_id = ''
self.in_body = False
self.body = []
self.after_key = False
self.beginning = False
self.key = False
def mapper_count(self, _, line):
line = line.strip()
if (line.find('.","Message-ID: <') > 0) and self.in_body and not self.beginning:
yield self.message_id, self.body
self.message_id = ''
self.body = []
self.in_body = False
self.after_key = False
self.beginning = False
self.key = False
if self.in_body and not self.after_key:
self.beginning = False
self.body.append(line)
if line.find('.","Message-ID: <') > 0 and not self.key:
if not self.in_body:
self.in_body = True
self.beginning = True
self.after_key = True
self.key = True
start = line.find("Message-ID")+13
i=0
for char in line[start:]:
i=i+1
if (not (char.isdigit() or (char == '.'))):
stop = i+start-2
break
self.message_id = line[start:stop]
self.after_key = False<|fim▁hole|> clean_body = ''
clean_date = ''
clean_from = ''
clean_to = ''
clean_values = []
start = 0
for idx, line in enumerate(values):
if "Date:" in line:
clean_date = line[5:].strip()
if line.find("From:") == 0:
clean_from = line[5:].strip()
if line.find("To:") == 0:
clean_to = line[3:].strip()
if "X-FileName:" in line:
start = idx+1
break
for i in range(start,len(values)):
if "-Original Message-" in values[i]:
break
clean_body=clean_body + values[i] + " "
clean_values.append(clean_date)
clean_values.append(clean_from)
#clean_values.append(clean_to)
#clean_values.append(clean_body.strip())
clean_values.append("TEST BODY")
newval = values
for element in values:
if "subject:" in element.lower():
subject = element
break
if "re:" in subject.lower():
newval.append("child")
elif "fw:" not in subject.lower():
newval.append("parent")
for element in newval:
if "Subject:" in element:
subject = element
break
relation = values[-1]
i = 0
colon = 0
if "<" not in subject:
for char in subject:
i=i+1
if char == ":":
colon = i
sub = subject[colon+1:].strip()
sub_relation = []
sub_relation.append(sub)
sub_relation.append(relation)
yield sub_relation, (message_id,clean_values)
if __name__ == '__main__':
MRMultilineInput.run()<|fim▁end|>
|
# STEP 2
def mapper_child(self, message_id, values):
|
<|file_name|>router_simple.go<|end_file_name|><|fim▁begin|>// +build goji_router_simple
package goji
import "net/http"
/*
This is the simplest correct router implementation for Goji.
*/
type router []route
type route struct {
Pattern
http.Handler
}
func (rt *router) add(p Pattern, h http.Handler) {<|fim▁hole|>}
func (rt *router) route(r *http.Request) *http.Request {
for _, route := range *rt {
if r2 := route.Match(r); r2 != nil {
return r2.WithContext(&match{
Context: r2.Context(),
p: route.Pattern,
h: route.Handler,
})
}
}
return r.WithContext(&match{Context: r.Context()})
}<|fim▁end|>
|
*rt = append(*rt, route{p, h})
|
<|file_name|>MetricsItem.tsx<|end_file_name|><|fim▁begin|>import { Box } from '@rocket.chat/fuselage';
import React, { ComponentProps, FC } from 'react';
type MetricsItemProps = ComponentProps<typeof Box>;
const MetricsItem: FC<MetricsItemProps> = (props) => (
<Box
display='flex'
justifyContent='center'
alignItems='center'
fontScale='micro'
color='info'
mi='x4'
{...props}
/>
);
<|fim▁hole|>export default MetricsItem;<|fim▁end|>
| |
<|file_name|>TscTelnetLib.py<|end_file_name|><|fim▁begin|># File: TscTelnetLib.py ; This file is part of Twister.
# version: 2.002
#
# Copyright (C) 2012 , Luxoft
#
# Authors:
# Adrian Toader <[email protected]>
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains Telnet connection functions.
"""
from telnetlib import Telnet
from time import sleep
#from time import time as epochtime
from thread import start_new_thread
#from os import remove, rename
#from os.path import dirname, exists, abspath, join, getsize
#Efrom json import load, dump
#__dir__ = dirname(abspath(__file__))
__all__ = ['TelnetManager', 'TelnetConnection']
#
class TelnetManager(object):
""" Twister Telnet connections manager """
def __init__(self):
""" init """
# connections are TelnetConnection instances
self.connections = {}
# active connection name; is used for all commands as default
# if no name is specified
self.activeConnection = None
def open_connection(self, name, host, port=23, user=None, password=None,
userExpect=None, passwordExpect=None, keepalive=True):
""" open a new TelnetConnection instance and add it to manager list """
if not self.connections.has_key(name):
connection = TelnetConnection(name, host, port, user, password,
userExpect, passwordExpect, keepalive)
self.connections.update([(name, connection), ])
return True
else:
print('telnet open connection error: connection name already in use')
return False
def login(self, name, user=None, password=None,
userExpect=None, passwordExpect=None):
""" login on telnet connection """
try:
return self.connections[name].login(user, password,
userExpect, passwordExpect)
except Exception, e:
print('telnet manager login error: {er}'.format(er=e))
return False
def write(self, command, name=None):
""" write command to telnet connection """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name].write(command)
elif self.activeConnection:
return self.connections[self.activeConnection].write(command)
return False
def read(self, name=None):
""" read from telnet connection """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name].read()
elif self.activeConnection:
return self.connections[self.activeConnection].read()
return False
def read_until(self, expected, name=None):
""" read from telnet connection until expected """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name].read_until(expected)
elif self.activeConnection:
return self.connections[self.activeConnection].read_until(expected)
return False
def set_newline(self, newline, name=None):
""" set the new line char for telnet connection """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name].set_newline(newline)
elif self.activeConnection:
return self.connections[self.activeConnection].set_newline(newline)
return False
def set_timeout(self, timeout, name=None):
""" set timeout for operations on telnet connection """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name].set_timeout(timeout)
elif self.activeConnection:
return self.connections[self.activeConnection].set_timeout(timeout)
return False
def get_connection(self, name=None):
""" get the TelnetConnection instance """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if name:
return self.connections[name]
elif self.activeConnection:
return self.connections[self.activeConnection]
return False
def set_active_connection(self, name):
""" set the active connection """
if not self.connections.has_key(name):
print 'connection not found'
return False
self.activeConnection = name
return True
def list_connections(self):
""" list all connections """
return [name for name in self.connections.iterkeys()]
def close_connection(self, name=None):
""" close connection """
if ((not name and not self.activeConnection) or
(name and not self.connections.has_key(name))):
print 'connection not found'
return False
if not name and self.activeConnection:
del(self.connections[self.activeConnection])
self.activeConnection = None
return True
try:
del(self.connections[name])
if name == self.activeConnection:
self.activeConnection = None
except Exception, e:
print('telnet manager error while closing connection: {er}'.format(er=e))
return False
return True
def close_all_connections(self):
""" close all connections """
del(self.connections)
self.connections = {}
self.activeConnection = None
print('all connections closed')
return True
class TelnetConnection:
""" tsc telnet connection """
def __init__(self, name, host, port=23, user=None, password=None,
userExpect=None, passwordExpect=None, keepalive=True):
""" init """
self.connection = None
self.host = host
self.port = port
self.loginAccount = {
'user': user,
'password': password
}
self.name = name
self.newline = '\n'
self.timeout = 4
self.keepAliveRetries = 0
self.keepAliveThread = None
self.keepAlive = keepalive
self.loginDriver = {
'userExpect': userExpect,
'passwordExpect': passwordExpect
}
"""
self.loginDrivers = None
self.loginDriversPath = join(__dir__, 'logindrivers.list')
self.loginDriversLockPath = join(__dir__, 'logindrivers.lock')
self.loadLoginDrivers()
"""
try:
self.connection = Telnet(self.host, self.port, self.timeout)
print('telnet connection created!')
self.login()
if self.keepAlive:
self.keepAliveThread = start_new_thread(self.keep_alive, ())
else:
self.keepAliveThread = None
except Exception, e:
self.connection = None
self.keepAliveThread = None
print('telnet connection failed: {er}'.format(er=e))
def __del__(self):
""" delete """
if self.connection:
self.connection.close()
sleep(2)
del(self)
def keep_alive(self):
""" keep connection alive """
timeout = (0.2, self.timeout)[self.timeout>2]
while not self.connection.eof:
self.connection.write('')
sleep(timeout)
def alive(self):
""" check if connection is alive """
if self.connection and not self.connection.eof:
return True
try:
self.connection = Telnet(self.host, self.port)
print('telnet connection created!')
self.login()
if self.keepAlive:
self.keepAliveThread = start_new_thread(self.keep_alive, ())
else:
self.keepAliveThread = None
except Exception, e:
self.connection = None
self.keepAliveThread = None
self.keepAliveRetries += 1
if self.keepAliveRetries > 4:
print('telnet connection restore retry failed!')
return False
print('telnet connection restore failed: {er}'\
'retry: {n}!'.format(er=e, n=self.keepAliveRetries))
self.alive()
return True
<|fim▁hole|> if isinstance(newline, str):
self.newline = newline
return True
return False
def set_timeout(self, timeout):
""" set timeout for operations on telnet connection """
if isinstance(timeout, int):
self.timeout = [2, timeout][timeout > 2]
return True
return False
def read(self):
""" read from telnet connection """
if not self.alive():
return False
try:
response = self.connection.read_very_eager()
if response:
return response
except Exception, e:
print('read command error: {er}'.format(er=e))
return False
return False
def read_until(self, expected):
""" read from telnet connection until expected """
if not self.alive():
return False
try:
response = self.connection.read_until(expected, self.timeout)
if response:
print(response)
return True
except Exception, e:
print('read until command error: {er}'.format(er=e))
return False
return False
def write(self, command, result=True, display=True):
""" write command to telnet connection """
if not self.alive():
return False
try:
self.connection.write( str(command) + self.newline )
sleep(2)
if display: print('command: {c}'.format(c=command))
if result:
return self.connection.read_very_eager()
else:
return True
except Exception, e:
print('send command error: {er}'.format(er=e))
return False
def expect(self, expected, command=None, result=True, display=True):
""" write command to telnet connection on expected prompt """
if not self.alive():
return False
try:
response = self.connection.read_until(expected, self.timeout)
print(response)
if response:
if command:
self.connection.write( str(command) + self.newline)
sleep(2)
if display: print('command: {c}'.format(c=command))
if result:
return self.connection.read_very_eager()
else:
return True
return False
except Exception, e:
print('expect send command error: {er}'.format(er=e))
return False
def login(self, user=None, password=None,
userExpect=None, passwordExpect=None):
""" login on telnet connection """
if not self.alive():
return False
self.loginAccount['user'] = (user,
self.loginAccount['user'])[user is None]
self.loginAccount['password'] = (password,
self.loginAccount['password'])[password is None]
self.loginDriver['userExpect'] = (userExpect,
self.loginDriver['userExpect'])[userExpect is None]
self.loginDriver['passwordExpect'] = (passwordExpect,
self.loginDriver['passwordExpect'])[passwordExpect is None]
print('login ..')
if None in [self.loginAccount['user'], self.loginAccount['password']]:
print('no login data!')
return False
if None in [self.loginDriver['userExpect'],
self.loginDriver['passwordExpect']]:
print('no login expected data!')
return False #return self.autologin()
response = self.expect(self.loginDriver['userExpect'],
self.loginAccount['user'], False)
if response:
response = self.expect(self.loginDriver['passwordExpect'],
self.loginAccount['password'],
True, False)
if response:
print(response)
"""
if ((self.loginDriver['userExpect'] not in
self.loginDrivers['userExpect'] or
self.loginDriver['passwordExpect'] not in
self.loginDrivers['passwordExpect'])
and not None in self.loginDriver.itervalues()):
self.saveLoginDrivers(self.loginDriver['userExpect'],
self.loginDriver['passwordExpect'])
"""
return True
print('fail')
return False
"""
def autologin(self):
# autologin on telnet connection
print('tring autologin ..')
response = self.connection.expect(self.loginDrivers['userExpect'],
self.timeout)
if not None in response:
print(response)
self.write(self.loginAccount['user'], False)
response = self.connection.expect(
self.loginDrivers['passwordExpect'],
self.timeout)
if not None in response:
print(response)
print self.write(self.loginAccount['password'], True, False)
return True
print('fail')
return False
def loadLoginDrivers(self):
# load the known login drivers
retries = 0
while exists(self.loginDriversLockPath) and retries <= self.timeout * 2:
retries += 1
sleep(0.4)
with open(self.loginDriversLockPath, 'wb+') as loginDriversLockFile:
loginDriversLockFile.write('lock\n')
if not exists(self.loginDriversPath):
with open(self.loginDriversPath, 'wb+') as loginDriversFile:
self.loginDrivers = {}
self.loginDrivers['userExpect'] = []
self.loginDrivers['passwordExpect'] = []
dump(self.loginDrivers, loginDriversFile)
if getsize(self.loginDriversPath) > 524288L:
rename(self.loginDriversPath,
self.loginDriversPath + '.bck' + str(epochtime()))
with open(self.loginDriversPath, 'rb') as loginDriversFile:
self.loginDrivers = load(loginDriversFile)
remove(self.loginDriversLockPath)
def saveLoginDrivers(self, userExpect, passwordExpect):
# save new login driver
retries = 0
while exists(self.loginDriversLockPath) and retries <= self.timeout * 2:
retries += 1
sleep(0.4)
with open(self.loginDriversLockPath, 'wb+') as loginDriversLockFile:
loginDriversLockFile.write('lock\n')
with open(self.loginDriversPath, 'rb') as loginDriversFile:
self.loginDrivers = load(loginDriversFile)
self.loginDrivers['userExpect'].append(userExpect)
self.loginDrivers['passwordExpect'].append(passwordExpect)
with open(self.loginDriversPath, 'wb+') as loginDriversFile:
dump(self.loginDrivers, loginDriversFile)
remove(self.loginDriversLockPath)
"""<|fim▁end|>
|
def set_newline(self, newline):
""" set the new line char for telnet connection """
|
<|file_name|>Layout.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.9.3
var Block, Layout, SpecialString, fn, i, len, object, prop, ref, terminalWidth;
Block = require('./layout/Block');
object = require('utila').object;
SpecialString = require('./layout/SpecialString');
terminalWidth = require('./tools').getCols();
module.exports = Layout = (function() {
var self;
self = Layout;
Layout._rootBlockDefaultConfig = {
linePrependor: {
options: {
amount: 0
}
},
lineAppendor: {
options: {
amount: 0
}
},
blockPrependor: {
options: {
amount: 0
}
},
blockAppendor: {
options: {
amount: 0
}
}
};
Layout._defaultConfig = {
terminalWidth: terminalWidth
};
function Layout(config, rootBlockConfig) {
var rootConfig;
if (config == null) {
config = {};
}
if (rootBlockConfig == null) {
rootBlockConfig = {};
}
this._written = [];
this._activeBlock = null;
this._config = object.append(self._defaultConfig, config);
rootConfig = object.append(self._rootBlockDefaultConfig, rootBlockConfig);
this._root = new Block(this, null, rootConfig, '__root');
this._root._open();
}
Layout.prototype.getRootBlock = function() {
return this._root;
};
Layout.prototype._append = function(text) {
return this._written.push(text);
};
Layout.prototype._appendLine = function(text) {
var s;
this._append(text);
s = SpecialString(text);
if (s.length < this._config.terminalWidth) {
this._append('<none>\n</none>');
}
return this;
};
Layout.prototype.get = function() {
this._ensureClosed();
if (this._written[this._written.length - 1] === '<none>\n</none>') {
this._written.pop();
}
return this._written.join("");
};
Layout.prototype._ensureClosed = function() {
if (this._activeBlock !== this._root) {
throw Error("Not all the blocks have been closed. Please call block.close() on all open blocks.");
}
if (this._root.isOpen()) {
this._root.close();
}
};
return Layout;
})();
<|fim▁hole|> method = prop;
return Layout.prototype[method] = function() {
return this._root[method].apply(this._root, arguments);
};
};
for (i = 0, len = ref.length; i < len; i++) {
prop = ref[i];
fn();
}<|fim▁end|>
|
ref = ['openBlock', 'write'];
fn = function() {
var method;
|
<|file_name|>search.test.js<|end_file_name|><|fim▁begin|>const assert = require('assert')
const crypto = require('crypto')
const { createRequest } = require("../util/util")<|fim▁hole|> const keywords = "海阔天空"
const type = 1
const limit = 30
const data = 's=' + keywords + '&limit=' + limit + '&type=' + type + '&offset=0'
createRequest('/api/search/pc/', 'POST', data)
.then(result => {
console.log(JSON.parse(result).result.songs[0].mp3Url)
assert(JSON.parse(result).result.songs[0].name === '海阔天空')
done()
})
.catch(err => {
done(err)
})
})
})<|fim▁end|>
|
describe('测试搜索是否正常', () => {
it('获取到的数据的 name 应该和搜索关键词一致', done => {
|
<|file_name|>calc.py<|end_file_name|><|fim▁begin|>from functools import reduce
# constants used in the multGF2 function
mask1 = mask2 = polyred = None
def setGF2(degree, irPoly):
"""Define parameters of binary finite field GF(2^m)/g(x)
- degree: extension degree of binary field
- irPoly: coefficients of irreducible polynomial g(x)
"""
def i2P(sInt):
"""Convert an integer into a polynomial"""
return [(sInt >> i) & 1
for i in reversed(range(sInt.bit_length()))]
global mask1, mask2, polyred
mask1 = mask2 = 1 << degree
mask2 -= 1
polyred = reduce(lambda x, y: (x << 1) + y, i2P(irPoly)[1:])
def multGF2(p1, p2):
"""Multiply two polynomials in GF(2^m)/g(x)"""
p = 0
while p2:
if p2 & 1:
p ^= p1
p1 <<= 1
if p1 & mask1:
p1 ^= polyred
p2 >>= 1
return p & mask2
if __name__ == "__main__":<|fim▁hole|> # Evaluate the product (x^2 + x + 1)(x^2 + 1)
print("{:02x}".format(multGF2(0x3f7e0000000000000000000000000000L, 0x3f7e00000000000000000000L)))<|fim▁end|>
|
# Define binary field GF(2^3)/x^3 + x + 1
setGF2(127, 2**127 + 2**63 + 1)
|
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""WSGI application."""
import os
from sys import argv
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware
from tweetTrack.app import app
application = DispatcherMiddleware(app)
if __name__ == '__main__':
if len(argv) < 2 or argv[1] == 'Dev':
os.environ['FLASK_CONFIG'] = 'Dev'
run_simple(
'localhost',
5000,
application,
__debug__<|fim▁hole|> os.environ['FLASK_CONFIG'] = argv[1].title()
print(os.environ['FLASK_CONFIG'])
run_simple(
'localhost',
5000,
application,
)<|fim▁end|>
|
)
else:
|
<|file_name|>animation_node.cpp<|end_file_name|><|fim▁begin|>// Copyright mogemimi. Distributed under the MIT license.
#include "pomdog/experimental/skeletal2d/blendtrees/animation_node.h"
namespace pomdog::skeletal2d {
AnimationNode::~AnimationNode() = default;
<|fim▁hole|><|fim▁end|>
|
} // namespace pomdog::skeletal2d
|
<|file_name|>primitive.rs<|end_file_name|><|fim▁begin|>//! Allows to setup a scene with scenes in pyramidal layout, along with traits
//! to help shooting rays to check for intersections
use super::vec::{Vector, RFloat};
use std::default::Default;
use std::f32;
#[derive(Default, PartialEq, Clone, Copy, Debug)]
pub struct Ray {
pub pos: Vector,
pub dir: Vector,
}
#[derive(Clone, Copy)]
pub struct Hit {
pub distance: RFloat,
pub pos: Vector,
}
impl Hit {
pub fn missed() -> Hit {
Hit {
distance: f32::INFINITY,
pos: Default::default(),
}
}
pub fn has_missed(&self) -> bool {
self.distance == f32::INFINITY
}
pub fn set_missed(&mut self) {
self.distance = f32::INFINITY;
}
}
#[derive(Clone, Copy)]
pub struct Sphere {
pub center: Vector,
pub radius: RFloat,
}
impl Default for Sphere {
fn default() -> Sphere {
Sphere {
center: Default::default(),
radius: 1.0,<|fim▁hole|>
impl DistanceMeasure for Sphere {
#[inline(always)]
fn distance_from_ray(&self, r: &Ray) -> RFloat {
let v = self.center - r.pos;
let b = v.dot(&r.dir);
let disc = b * b - v.dot(&v) + self.radius * self.radius;
if disc < 0.0 {
return f32::INFINITY;
}
let d = disc.sqrt();
let t2 = b + d;
if t2 < 0.0 {
return f32::INFINITY;
}
let t1 = b - d;
if t1 > 0.0 { t1 } else { t2 }
}
}
impl Intersectable for Sphere {
#[inline(always)]
fn intersect(&self, hit: &mut Hit, ray: &Ray) {
let distance = self.distance_from_ray(ray);
if distance >= hit.distance {
return;
}
hit.distance = distance;
hit.pos = (ray.pos + (ray.dir.mulfed(distance) - self.center)).normalized();
}
}
pub trait Intersectable {
/// Return intersection point of ray with item (relative to the Ray !!)
fn intersect(&self, &mut Hit, ray: &Ray);
}
pub trait DistanceMeasure {
fn distance_from_ray(&self, r: &Ray) -> RFloat;
}
#[cfg(test)]
mod primitive_tests {
use super::Ray;
use std::default::Default;
#[test]
fn ray_defaults() {
let r1: Ray = Ray {
pos: Default::default(),
dir: Default::default(),
};
let r2: Ray = Default::default();
assert_eq!(r1, r2);
}
}
#[cfg(test)]
mod sphere {
extern crate test;
use super::*;
use std::default::Default;
use super::super::vec::Vector;
use std::f32;
fn setup_scene() -> (Ray, Ray, Sphere) {
let s = Sphere {
center: Default::default(),
radius: 1.0,
};
let mut dir: Vector = Default::default();
dir.x = -1.0;
let r1 = Ray {
pos: Vector {
x: 2.0,
y: 0.0,
z: 0.0,
},
dir: dir,
};
let mut r2 = r1;
r2.dir.x = -r2.dir.x; // invert direction
(r1, r2, s)
}
#[test]
fn intersect() {
let (r1, r2, s) = setup_scene();
{
let dfr = s.distance_from_ray(&r1);
assert_eq!(dfr, 1.0);
let dfr = s.distance_from_ray(&r2);
assert_eq!(dfr, f32::INFINITY);
}
{
let mut h = Hit {
distance: 2.0,
pos: Default::default(),
};
s.intersect(&mut h, &r1);
assert_eq!(h.distance, 1.0);
assert_eq!(h.pos.x, 1.0);
h.distance = 0.5;
s.intersect(&mut h, &r1);
assert!(h.distance == 0.5, "Max Distance too short");
h.distance = 10.0;
s.intersect(&mut h, &r2);
assert!(h.distance == 10.0, "r2 is shot the wrong way");
}
}
#[test]
fn defaultdefault() {
let s: Sphere = Default::default();
assert!(s.radius != 0.0);
}
const NUM_ITERATIONS: usize = 10000;
#[bench]
fn bench_ray_sphere(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
test::black_box(s.distance_from_ray(&r1));
test::black_box(s.distance_from_ray(&r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
#[bench]
fn bench_intersect(b: &mut test::Bencher) {
let (r1, r2, s) = setup_scene();
let mut h = Hit::missed();
b.iter(|| {
for _ in 0..NUM_ITERATIONS {
h.set_missed();
test::black_box(s.intersect(&mut h, &r1));
h.set_missed();
test::black_box(s.intersect(&mut h, &r2));
}
});
b.bytes = (NUM_ITERATIONS * 2) as u64;
}
}<|fim▁end|>
|
}
}
}
|
<|file_name|>linkedlist.rs<|end_file_name|><|fim▁begin|>enum List< T > {
Empty,
Cons{
head : T,
tail : Box< List< T > >
},
}
impl< T > List< T > {
fn mk_empty() -> List< T > {
List::Empty
}
fn cons( value : T, list : List< T > ) -> List < T > {
List::Cons{ head : value, tail : Box::new( list ) }
}
fn mk_one( value : T ) -> List< T > {
List::cons( value, List::mk_empty() )
}
}
struct ListIterator< 'a, T : 'a > {
cur : &'a List< T >,
}
impl< 'a, T > IntoIterator for &'a List< T > {
type Item = &'a T;
type IntoIter = ListIterator< 'a, T >;
fn into_iter( self ) -> Self::IntoIter {
ListIterator{ cur : self }
}
}
impl < 'a, T > Iterator for ListIterator< 'a, T > {
type Item = &'a T;
fn next( &mut self ) -> Option< &'a T > {
match self.cur {
&List::Empty => None,
&List::Cons{ ref head, ref tail } => {
self.cur = tail;
Some( head )
},
}
}
}
// see also: "Entirely Too Many Linked Lists" for alternatives
<|fim▁hole|> println!( "{}", x );
}
}<|fim▁end|>
|
fn main() {
let list = List::cons( 42, List::cons( 1337, List::mk_one( -1 ) ) );
for x in &list {
|
<|file_name|>_outsidetextfont.py<|end_file_name|><|fim▁begin|>from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Outsidetextfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "bar"
_path_str = "bar.outsidetextfont"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object<|fim▁hole|> """
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Outsidetextfont object
Sets the font used for `text` lying outside the bar.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.bar.Outsidetextfont`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
Returns
-------
Outsidetextfont
"""
super(Outsidetextfont, self).__init__("outsidetextfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.bar.Outsidetextfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.bar.Outsidetextfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("familysrc", None)
_v = familysrc if familysrc is not None else _v
if _v is not None:
self["familysrc"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
_v = arg.pop("sizesrc", None)
_v = sizesrc if sizesrc is not None else _v
if _v is not None:
self["sizesrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False<|fim▁end|>
|
Returns
-------
str
|
<|file_name|>bump-jetbrains-pkgs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os
import traceback
import glob
from multiprocessing import Pool, Process, Lock
import subprocess
import requests
import portage
from terminaltables import AsciiTable
DEBUG=0
if 'DEBUG' in os.environ:
DEBUG = os.environ['DEBUG']
PORTDIR_OVERLAY=os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + "/../")
os.chdir(PORTDIR_OVERLAY)
os.environ["PORTDIR_OVERLAY"] = PORTDIR_OVERLAY
# retrieves the latest versions for specified product codes
def get_version( codes ):
payload = {
'code': ','.join(codes),
'latest': 'false',
'type': 'release'
}
r = requests.get('https://data.services.jetbrains.com/products/releases', params=payload)
json=r.json()
# [code][slot]
versions = {}
for c in codes:
versions[c] = {}
all_v_data = json[c]
# latest version is always the first one
versions[c]['latest_slot'] = all_v_data[0]['majorVersion']
# loop over all data and pick the first version from each slot, because the data are already sorted
for v_data in all_v_data:
slot = v_data['majorVersion']
if not slot in versions[c]:
v = v_data['version']
versions[c][slot] = v
return versions
# format: `package_name: product_code`
codes={
'clion': 'CL',
'datagrip': 'DG',
'idea': 'IIU',
'idea-community': 'IIC',
'phpstorm': 'PS',
'pycharm': 'PCP',
'pycharm-community': 'PCC',
'rider': 'RD',
'rubymine': 'RM',
'webstorm': 'WS',
}
remote_versions = get_version(codes.values())
update_table = [dict() for x in range(0)]
pdb = portage.db[portage.root]["porttree"].dbapi
for pn, code in sorted(codes.items()):
new_updates = [dict() for x in range(0)]
# find category by globbing in this repo
cat = glob.glob(f"*/{pn}/{pn}*.ebuild")[0].split("/")[0]
# find the newest version for each slot
loc_slots = {}
local_versions = pdb.xmatch('match-visible', f"{cat}/{pn}::rindeal")
for v in local_versions:
slot = pdb.aux_get(v, ["SLOT"])[0]
# add if not yet present
if not slot in loc_slots:
loc_slots[slot] = v
continue
# update slot if newer version was found
if portage.vercmp(loc_slots[slot], v) < 0:
loc_slots[slot] = v
# now compare current and server versions for each slot
for slot in loc_slots:
pkg = loc_slots[slot]
loc_ver = portage.pkgsplit(pkg)[1]
rem_ver = remote_versions[code][slot]
if portage.vercmp(loc_ver, rem_ver) < 0:
new_updates.append({
'cat': cat,<|fim▁hole|> 'loc_ver': loc_ver,
'rem_slot': slot,
'rem_ver': rem_ver
})
# now look for the newest version outside of any known slots
latest_loc_pkg = pdb.xmatch('bestmatch-visible', f"{cat}/{pn}::rindeal")
latest_loc_ver = portage.pkgsplit(latest_loc_pkg)[1]
latest_loc_slot = pdb.aux_get(latest_loc_pkg, ["SLOT"])[0]
latest_rem_slot = remote_versions[code]['latest_slot']
latest_rem_ver = remote_versions[code][latest_rem_slot]
if portage.vercmp(latest_loc_ver, latest_rem_ver) < 0:
# check for duplicates
is_dup = 0
for update in new_updates:
if update['loc_slot'] == latest_rem_slot:
is_dup = 1
break
if not is_dup:
new_updates.append({
'cat': cat,
'pn': pn,
'loc_slot': latest_loc_slot,
'loc_ver': latest_loc_ver,
'rem_slot': latest_rem_slot,
'rem_ver': latest_rem_ver
})
update_table += new_updates
# create a pretty table
pretty_table = [ [ 'Category', 'Package', 'Slot', 'Version' ] ]
for u in update_table:
slot = u['loc_slot']
if slot != u['rem_slot']:
slot += ' -> ' + u['rem_slot']
pretty_table.append([ u['cat'], u['pn'], slot, u['loc_ver'] + ' -> ' + u['rem_ver'] ])
# now print the table
print(AsciiTable(pretty_table).table)
# and prompt the user for an action
y = input("Press 'y' to proceed with the update\n")
if y != "y":
print(f"You pressed '{y}', bailing...")
exit(0)
def run_cmd(cmd):
pn = os.path.basename(os.getcwd())
print(f"> \033[94m{pn}\033[0m: `\033[93m{cmd}\033[0m`")
err = os.system(cmd)
if err:
print(f"{pn}: command '{cmd}' failed with code {err}")
return err
def update_pkg(cat, pn, loc_slot, loc_ver, rem_slot, rem_ver):
global GIT_LOCK, PKG_LOCKS, PORTDIR_OVERLAY
cat_pn = f"{cat}/{pn}"
os.chdir(f"{PORTDIR_OVERLAY}/{cat_pn}")
PKG_LOCKS[cat_pn].acquire()
new_slot = False if loc_slot == rem_slot else True
if new_slot: # bump into a new slot
run_cmd(f"cp -v {pn}-{loc_slot}*.ebuild {pn}-{rem_ver}.ebuild")
else: # bump inside a slot
GIT_LOCK.acquire()
run_cmd(f"git mv -v {pn}-{loc_ver}*.ebuild {pn}-{rem_ver}.ebuild")
GIT_LOCK.release()
if run_cmd(f"repoman manifest") != 0:
GIT_LOCK.acquire()
run_cmd('git reset -- .')
run_cmd('git checkout -- .')
GIT_LOCK.release()
PKG_LOCKS[cat_pn].release()
return 1
GIT_LOCK.acquire()
run_cmd(f"git add {pn}-{rem_ver}.ebuild")
if new_slot:
run_cmd(f"git commit -m '{cat}/{pn}: new version v{rem_ver}' .")
else: # bump inside a slot
run_cmd(f"git commit -m '{cat}/{pn}: bump to v{rem_ver}' .")
GIT_LOCK.release()
PKG_LOCKS[cat_pn].release()
# only one git command may run concurrently
GIT_LOCK = Lock()
PKG_LOCKS = {}
for update in update_table:
cat_pn = update['cat'] + "/" + update['pn']
if not cat_pn in PKG_LOCKS:
PKG_LOCKS[cat_pn] = Lock()
# DEBUG
#update_pkg(update_table[0])
# https://stackoverflow.com/a/25558333/2566213
def pool_init(l):
global PKG_LOCKS
PKG_LOCKS = l
pool = Pool(processes=8, initializer=pool_init, initargs=(PKG_LOCKS, ))
for update in update_table:
pool.apply_async(func=update_pkg, kwds=update)
pool.close()
pool.join()<|fim▁end|>
|
'pn': pn,
'loc_slot': slot,
|
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Multi Store',
'version': '8.0.1.0.0',
'category': 'Accounting',<|fim▁hole|> 'sequence': 14,
'summary': '',
'description': """
Multi Store
===========
The main purpose of this module is to restrict journals access for users on different stores.
This module add a new concept "stores" in some point similar to multicompany.
Similar to multicompany:
* User can have multiple stores available (store_ids)
* User can be active only in one store (store_id) which can be set up in his own preferences
* There is a group "multi store" that gives users the availability to see multi store fields
This module also adds a store_id field on journal:
* If store_id = False then journal can be seen by everyone
* If store_id is set, then journal can be seen by users on that store and parent stores
It also restrict edition, creation and unlink on: account.move, account.invoice and account.voucher.
It is done with the same logic to journal. We do not limitate the "read" of this models because user should need to access those documents, for example, to see partner due.
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'account_voucher',
],
'data': [
'views/res_store_view.xml',
'views/res_users_view.xml',
'views/account_view.xml',
'security/multi_store_security.xml',
'security/ir.model.access.csv',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
| |
<|file_name|>Alert.js<|end_file_name|><|fim▁begin|>window.hideAlert = function () {
$('#alertMessage').addClass("hidden");
$('#alertMessage').text("");
};
window.showAlert = function (msg) {
$('#alertMessage').text(msg);
$('#alertMessage').addClass("alert-danger");
$('#alertMessage').removeClass("hidden");
$('#alertMessage').fadeOut(100).fadeIn(100).fadeOut(100).fadeIn(100);
};
window.showInfo = function (msg) {
$('#alertMessage').text(msg);
$('#alertMessage').removeClass("alert-danger");
$('#alertMessage').removeClass("hidden");
$('#alertMessage').fadeOut(100).fadeIn(100).fadeOut(100).fadeIn(100);
};
window.dataErrorAlert = function (data) {
switch (data.idError) {
case "InvalidFile":
showAlert(Resources["InvalidFile"]);
break;
case "InvalidReg":
showAlert(Resources["WrongRegExpMessage"]);
break;
case "NotFound":
showAlert(Resources["NoSearchResultsMessage"]);
break;
case "InvalidPassword":
showAlert(Resources["UnlockInvalidPassword"]);
break;
default:
showAlert(data.idError);
break;
}
};
window.handleError = function (xhr, exception) {
hideLoader();
$('#workButton').removeClass("hidden");
var msg = '';
if (xhr.status === 0) {
msg = 'Not connect.\n Verify Network.';
} else if (xhr.status == 404) {
msg = 'Requested page not found. [404]';
} else if (xhr.status == 500) {
msg = 'Internal Server Error [500].';
} else if (exception === 'parsererror') {<|fim▁hole|> msg = 'Requested JSON parse failed.';
} else if (exception === 'timeout') {
msg = 'Time out error.';
} else if (exception === 'abort') {
msg = 'Ajax request aborted.';
} else {
msg = 'Uncaught Error.\n' + xhr.responseText;
}
showAlert(msg);
};<|fim▁end|>
| |
<|file_name|>main.cc<|end_file_name|><|fim▁begin|>#include <iomanip>
#include <iostream>
using namespace std;
int main() {
ios_base::sync_with_stdio(false);
cin.tie(nullptr);
int n, p_i;
cin >> n;
double total = 0.0;
for (size_t i = 0; i < n; i++) {
cin >> p_i;
total += p_i;
}
cout << setprecision(12) << fixed << (total / n) << endl;<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>simple-https-server.py<|end_file_name|><|fim▁begin|># taken from http://www.piware.de/2011/01/creating-an-https-server-in-python/
# generate server.xml with the following command:
# openssl req -new -x509 -keyout server.pem -out server.pem -days 365 -nodes
# run as follows:
# python simple-https-server.py
# then in your browser, visit:
# https://localhost:4443
import BaseHTTPServer, SimpleHTTPServer
import ssl
httpd = BaseHTTPServer.HTTPServer(('localhost', 4443), SimpleHTTPServer.SimpleHTTPRequestHandler)
httpd.socket = ssl.wrap_socket (httpd.socket, certfile='./server.pem', server_side=True)<|fim▁hole|><|fim▁end|>
|
httpd.serve_forever()
|
<|file_name|>update-tuple.spec.tsx<|end_file_name|><|fim▁begin|>import app, { Component, Update } from '../src/apprun';
describe('Component', () => {
it('should support non-event-typed update tuple', () => {
class Test extends Component {
state = 0;
update = [
['+1', state => ++state, { once: true }],
['+1a', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1');
t.run('+1');
t.run('+1a');
expect(t.state).toEqual(2);
})
it('should support state-typed update tuple and event alias', () => {
class Test extends Component {
state = 0;
update: Update<number> = [
['method1, method2', state => ++state]
];
}
const t = new Test().start() as any;
t.run('method1');
t.run('method2');<|fim▁hole|>
type Events = '+1-once' | '+1';
class Test extends Component<number, Events> {
state = 0;
update: Update<number, Events> = [
['+1-once', state => ++state, { once: true }],
['+1', state => ++state],
];
}
const t = new Test().start() as any;
t.run('+1-once');
t.run('+1-once');
t.run('+1');
expect(t.state).toEqual(2);
})
})<|fim▁end|>
|
expect(t.state).toEqual(2);
})
it('should support event-typed update tuple', () => {
|
<|file_name|>inflating_cryptocurrency.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum::{
crypto::PublicKey,
helpers::Height,
runtime::{ExecutionContext, ExecutionError, InstanceId},
};
use exonum_derive::{
exonum_interface, BinaryValue, ExecutionFail, FromAccess, ObjectHash, ServiceDispatcher,
ServiceFactory,
};
use exonum_merkledb::{
access::{Access, FromAccess},
MapIndex,
};
use exonum_rust_runtime::{
api::{self, ServiceApiBuilder, ServiceApiState},
DefaultInstance, Service,
};
use serde_derive::{Deserialize, Serialize};
// // // // // // // // // // CONSTANTS // // // // // // // // // //
pub const SERVICE_ID: InstanceId = 55;
pub const SERVICE_NAME: &str = "cryptocurrency";
/// Initial balance of a newly created wallet.
pub const INIT_BALANCE: u64 = 0;
// // // // // // // // // // PERSISTENT DATA // // // // // // // // // //
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct Wallet {
pub pub_key: PublicKey,
pub name: String,
pub balance: u64,
pub last_update_height: u64,
}
impl Wallet {
pub fn new(&pub_key: &PublicKey, name: &str, balance: u64, last_update_height: u64) -> Self {
Self {
pub_key,
name: name.to_owned(),
balance,
last_update_height,
}
}
pub fn actual_balance(&self, height: Height) -> u64 {
assert!(height.0 >= self.last_update_height);
self.balance + height.0 - self.last_update_height
}
pub fn increase(self, amount: u64, height: Height) -> Self {
let balance = self.actual_balance(height) + amount;
Self::new(&self.pub_key, &self.name, balance, height.0)
}
pub fn decrease(self, amount: u64, height: Height) -> Self {
let balance = self.actual_balance(height) - amount;
Self::new(&self.pub_key, &self.name, balance, height.0)
}
}
// // // // // // // // // // DATA LAYOUT // // // // // // // // // //
#[derive(FromAccess)]
pub(crate) struct CurrencySchema<T: Access> {
pub wallets: MapIndex<T::Base, PublicKey, Wallet>,
}
impl<T: Access> CurrencySchema<T> {
pub fn new(access: T) -> Self {
Self::from_root(access).unwrap()
}
/// Gets a specific wallet from the storage.
pub fn wallet(&self, pub_key: &PublicKey) -> Option<Wallet> {
self.wallets.get(pub_key)
}
}
// // // // // // // // // // TRANSACTIONS // // // // // // // // // //
/// Create a new wallet.
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct CreateWallet {
pub name: String,
}
impl CreateWallet {
pub fn new(name: impl Into<String>) -> Self {
Self { name: name.into() }
}
}
/// Transfer coins between the wallets.
#[derive(Clone, Debug)]
#[derive(Serialize, Deserialize)]
#[derive(BinaryValue, ObjectHash)]
#[binary_value(codec = "bincode")]
pub struct Transfer {
pub to: PublicKey,
pub amount: u64,
pub seed: u64,
}
// // // // // // // // // // CONTRACTS // // // // // // // // // //
#[derive(Debug, ExecutionFail)]
pub enum Error {
/// Sender and receiver of the transfer are the same.
SenderSameAsReceiver = 0,
}
#[exonum_interface(auto_ids)]
pub trait CurrencyInterface<Ctx> {<|fim▁hole|> /// Retrieve two wallets to apply the transfer. Check the sender's
/// balance and apply changes to the balances of the wallets.
fn transfer(&self, ctx: Ctx, arg: Transfer) -> Self::Output;
}
impl CurrencyInterface<ExecutionContext<'_>> for CurrencyService {
type Output = Result<(), ExecutionError>;
fn create_wallet(&self, ctx: ExecutionContext<'_>, arg: CreateWallet) -> Self::Output {
let author = ctx.caller().author().unwrap();
let height = ctx.data().for_core().height();
let mut schema = CurrencySchema::new(ctx.service_data());
if schema.wallet(&author).is_none() {
let wallet = Wallet::new(&author, &arg.name, INIT_BALANCE, height.0);
schema.wallets.put(&author, wallet);
}
Ok(())
}
fn transfer(&self, ctx: ExecutionContext<'_>, arg: Transfer) -> Self::Output {
let author = ctx.caller().author().unwrap();
if author == arg.to {
return Err(Error::SenderSameAsReceiver.into());
}
let height = ctx.data().for_core().height();
let mut schema = CurrencySchema::new(ctx.service_data());
let sender = schema.wallet(&author);
let receiver = schema.wallet(&arg.to);
if let (Some(sender), Some(receiver)) = (sender, receiver) {
let amount = arg.amount;
if sender.actual_balance(height) >= amount {
let sender = sender.decrease(amount, height);
let receiver = receiver.increase(amount, height);
schema.wallets.put(&author, sender);
schema.wallets.put(&arg.to, receiver);
}
}
Ok(())
}
}
// // // // // // // // // // REST API // // // // // // // // // //
struct CryptocurrencyApi;
#[derive(Debug, Serialize, Deserialize)]
struct BalanceQuery {
pub_key: PublicKey,
}
/// Shortcut to get data on wallets.
impl CryptocurrencyApi {
/// Endpoint for retrieving a single wallet.
async fn balance(state: ServiceApiState, query: BalanceQuery) -> api::Result<u64> {
let snapshot = state.data();
let schema = CurrencySchema::new(snapshot.for_executing_service());
schema
.wallet(&query.pub_key)
.map(|wallet| {
let height = snapshot.for_core().height();
wallet.actual_balance(height)
})
.ok_or_else(|| api::Error::not_found().title("Wallet not found"))
}
fn wire(builder: &mut ServiceApiBuilder) {
builder.public_scope().endpoint("v1/balance", Self::balance);
}
}
// // // // // // // // // // SERVICE DECLARATION // // // // // // // // // //
/// Define the service.
#[derive(Debug, ServiceDispatcher, ServiceFactory)]
#[service_factory(artifact_name = "cryptocurrency", artifact_version = "1.0.0")]
#[service_dispatcher(implements("CurrencyInterface"))]
pub struct CurrencyService;
/// Implement a `Service` trait for the service.
impl Service for CurrencyService {
fn wire_api(&self, builder: &mut ServiceApiBuilder) {
CryptocurrencyApi::wire(builder)
}
}
impl DefaultInstance for CurrencyService {
const INSTANCE_ID: u32 = SERVICE_ID;
const INSTANCE_NAME: &'static str = SERVICE_NAME;
}<|fim▁end|>
|
type Output;
/// Apply logic to the storage when executing the transaction.
fn create_wallet(&self, ctx: Ctx, arg: CreateWallet) -> Self::Output;
|
<|file_name|>__init___flymake.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 David Holm <[email protected]>
# This file is part of SimpleGUITk - https://github.com/dholm/simpleguitk<|fim▁hole|>
from .plot import plot_lines<|fim▁end|>
|
# See the file 'COPYING' for copying permission.
|
<|file_name|>0025_auto__add_topshops.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TopShops'
db.create_table(u'catalog_topshops', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['catalog.Shop'])),
('score', self.gf('django.db.models.fields.IntegerField')()),
('time', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal(u'catalog', ['TopShops'])
def backwards(self, orm):
# Deleting model 'TopShops'
db.delete_table(u'catalog_topshops')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'catalog.comment': {
'Meta': {'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'catalog.image': {
'Meta': {'object_name': 'Image'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.likemakey': {
'Meta': {'object_name': 'LikeMakey'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Makey']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.likeproduct': {
'Meta': {'object_name': 'LikeProduct'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.likeproductdescription': {
'Meta': {'object_name': 'LikeProductDescription'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.ProductDescription']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.likeproductimage': {
'Meta': {'object_name': 'LikeProductImage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.ProductImage']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.likeshop': {
'Meta': {'object_name': 'LikeShop'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Shop']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.liketutorial': {
'Meta': {'object_name': 'LikeTutorial'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.list': {
'Meta': {'object_name': 'List'},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'catalog.listitem': {
'Meta': {'object_name': 'ListItem'},
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']"})
},
u'catalog.location': {
'Meta': {'object_name': 'Location'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'catalog.logidenticalproduct': {<|fim▁hole|> u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': u"orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': u"orm['catalog.Product']"}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.makey': {
'Meta': {'object_name': 'Makey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Comment']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Documentation']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeyimages'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Image']"}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'makeylikes'", 'to': u"orm['django_facebook.FacebookCustomUser']", 'through': u"orm['catalog.LikeMakey']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Note']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'catalog.note': {
'Meta': {'object_name': 'Note'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.product': {
'Meta': {'object_name': 'Product'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': u"orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['catalog.Tutorial']", 'symmetrical': 'False', 'blank': 'True'})
},
u'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': u"orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'catalog.productimage': {
'Meta': {'object_name': 'ProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': u"orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': u"orm['catalog.Product']"}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'catalog.shop': {
'Meta': {'object_name': 'Shop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Image']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
u'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {})
},
u'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {})
},
u'catalog.topshops': {
'Meta': {'object_name': 'TopShops'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Shop']"}),
'time': ('django.db.models.fields.DateTimeField', [], {})
},
u'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Tutorial']"})
},
u'catalog.topusers': {
'Meta': {'object_name': 'TopUsers'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
u'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Image']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'django_facebook.facebookcustomuser': {
'Meta': {'object_name': 'FacebookCustomUser'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['catalog']<|fim▁end|>
|
'Meta': {'object_name': 'LogIdenticalProduct'},
|
<|file_name|>stdin.rs<|end_file_name|><|fim▁begin|>use std::io::{Read, Result, Error};
use libc::{self, c_void, size_t};
use sync::NotThreadSafe;
use buf::CopyingBufReader;
lazy_static! {
static ref STDIN: NotThreadSafe<CopyingBufReader<Stdin>> = {
NotThreadSafe::new(CopyingBufReader::new(Stdin::new()))
};
}
pub fn stdin() -> &'static mut CopyingBufReader<Stdin> {
unsafe { STDIN.get().as_mut().unwrap() }
}
pub struct Stdin;
impl Stdin {
fn new() -> Stdin {
Stdin {}
}
}
impl Read for Stdin {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let ret = unsafe {<|fim▁hole|> buf.as_mut_ptr() as *mut c_void,
buf.len() as size_t
)
};
if ret == -1 {
Err(Error::last_os_error())
} else {
Ok(ret as usize)
}
}
}<|fim▁end|>
|
libc::read(
libc::STDIN_FILENO,
|
<|file_name|>csrf.js<|end_file_name|><|fim▁begin|>var crypto = require('crypto');
var scmp = require('scmp');
var utils = require('keystone-utils');
// The DISABLE_CSRF environment variable is available to automatically pass
// CSRF validation. This is useful in development scenarios where you want to
// restart the node process and aren't using a persistent session store, but
// should NEVER be set in production environments!!
var DISABLE_CSRF = process.env.DISABLE_CSRF === 'true';
exports.TOKEN_KEY = '_csrf';
exports.LOCAL_KEY = 'csrf_token_key';
exports.LOCAL_VALUE = 'csrf_token_value';
exports.SECRET_KEY = exports.TOKEN_KEY + '_secret';
exports.SECRET_LENGTH = 10;
exports.CSRF_HEADER_KEY = 'x-csrf-token';
exports.XSRF_HEADER_KEY = 'x-xsrf-token';
exports.XSRF_COOKIE_KEY = 'XSRF-TOKEN';<|fim▁hole|>
exports.createSecret = function () {
return crypto.pseudoRandomBytes(exports.SECRET_LENGTH).toString('base64');
};
exports.getSecret = function (req) {
return req.session[exports.SECRET_KEY] || (req.session[exports.SECRET_KEY] = exports.createSecret());
};
exports.createToken = function (req) {
return tokenize(utils.randomString(exports.SECRET_LENGTH), exports.getSecret(req));
};
exports.getToken = function (req, res) {
res.locals[exports.LOCAL_VALUE] = res.locals[exports.LOCAL_VALUE] || exports.createToken(req);
res.cookie(exports.XSRF_COOKIE_KEY, res.locals[exports.LOCAL_VALUE]);
return res.locals[exports.LOCAL_VALUE];
};
exports.requestToken = function (req) {
if (req.body && req.body[exports.TOKEN_KEY]) {
return req.body[exports.TOKEN_KEY];
} else if (req.query && req.query[exports.TOKEN_KEY]) {
return req.query[exports.TOKEN_KEY];
} else if (req.headers && req.headers[exports.XSRF_HEADER_KEY]) {
return req.headers[exports.XSRF_HEADER_KEY];
} else if (req.headers && req.headers[exports.CSRF_HEADER_KEY]) {
return req.headers[exports.CSRF_HEADER_KEY];
}
return '';
};
exports.validate = function (req, token) {
// Allow environment variable to disable check
if (DISABLE_CSRF) return true;
if (arguments.length === 1) {
token = exports.requestToken(req);
}
if (typeof token !== 'string') {
return false;
}
return scmp(token, tokenize(token.slice(0, exports.SECRET_LENGTH), req.session[exports.SECRET_KEY]));
};
exports.middleware = {
init: function (req, res, next) {
res.locals[exports.LOCAL_KEY] = exports.LOCAL_VALUE;
exports.getToken(req, res);
next();
},
validate: function (req, res, next) {
// Allow environment variable to disable check
if (DISABLE_CSRF) return next();
// Bail on safe methods
if (req.method === 'GET' || req.method === 'HEAD' || req.method === 'OPTIONS') {
return next();
}
// Validate token
if (exports.validate(req)) {
next();
} else {
res.statusCode = 403;
next(new Error('CSRF token mismatch'));
}
},
};<|fim▁end|>
|
function tokenize (salt, secret) {
return salt + crypto.createHash('sha1').update(salt + secret).digest('hex');
}
|
<|file_name|>hashing.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<|fim▁hole|> import hashlib
return hashlib.md5(str(value).encode("utf-8")).hexdigest()[:hash_length]<|fim▁end|>
|
def hash_value(value, hash_length: int = 12) -> str:
|
<|file_name|>test-warns-dead-code.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>#![deny(dead_code)]
fn dead() {} //~ error: function is never used: `dead`
fn main() {}<|fim▁end|>
|
// except according to those terms.
// compile-flags: --test
|
<|file_name|>poisonBush.py<|end_file_name|><|fim▁begin|>import src
class PoisonBush(src.items.Item):
"""
a hard to remove poison plant
"""
type = "PoisonBush"
name = "poison brush"
description = ""
usageInfo = """
You can use it to loose 100 satiation.
"""
walkable = False
charges = 0
def __init__(self):
"""
set up internal state
"""
super().__init__(display=src.canvas.displayChars.poisonBush)
self.attributesToStore.extend(["charges"])
def apply(self, character):
"""
handle a character trying to use this item
by killing the character
Parameters:
character: the character trying to use this item
"""
self.charges += 1
if 100 > character.satiation:
character.satiation = 0
else:
character.satiation -= 100
if self.charges > 10:
new = src.items.itemMap["EncrustedPoisonBush"]()
self.container.addItem(new,self.getPosition())
self.container.removeItem(self)
character.addMessage("you give your blood to the poison bush")
def spawn(self, distance=1):
"""
spawn a new poison bloom
Parameters:
distance: the spawning distance
"""
if not (self.xPosition and self.yPosition):
return
direction = (
2 * self.xPosition + 3 * self.yPosition + src.gamestate.gamestate.tick
) % 4
direction = (random.randint(1, distance + 1), random.randint(1, distance + 1))
newPos = (self.xPosition + direction[0] - 5, self.yPosition + direction[1] - 5, self.zPosition)
if (
newPos[0] < 1
or newPos[1] < 1
or newPos[0] > 15 * 15 - 2
or newPos[1] > 15 * 15 - 2
):
return
if not (
newPos in self.container.itemByCoordinates
and len(self.container.itemByCoordinates[newPos])
):
new = itemMap["PoisonBloom"]()
self.container.addItem(new,newPos)
def getLongInfo(self):
"""
returns a longer than normal description text<|fim▁hole|>
text = super().getLongInfo()
text += "poison charges: %s" % (self.charges)
return text
def destroy(self, generateScrap=True):
"""
destroy the item and leave a exploding thing
Parameters:
generateScrap: flag to toggle leaving residue
"""
new = src.items.itemMap["FireCrystals"]()
self.container.addItem(new,self.getPosition())
character = characters.Exploder()
character.solvers = [
"NaiveActivateQuest",
"ActivateQuestMeta",
"NaiveExamineQuest",
"ExamineQuestMeta",
"NaivePickupQuest",
"NaiveMurderQuest",
"DrinkQuest",
"NaiveExamineQuest",
"ExamineQuestMeta",
]
character.faction = "monster"
command = ""
if src.gamestate.gamestate.tick % 4 == 0:
command += "A"
if src.gamestate.gamestate.tick % 4 == 1:
command += "W"
if src.gamestate.gamestate.tick % 4 == 2:
command += "S"
if src.gamestate.gamestate.tick % 4 == 3:
command += "D"
if self.xPosition % 4 == 0:
command += "A"
if self.xPosition % 4 == 1:
command += "W"
if self.xPosition % 4 == 2:
command += "S"
if self.xPosition % 4 == 3:
command += "D"
if self.yPosition % 4 == 0:
command += "A"
if self.yPosition % 4 == 1:
command += "W"
if self.yPosition % 4 == 2:
command += "S"
if self.yPosition % 4 == 3:
command += "D"
character.macroState["macros"]["m"] = list(command + "_m")
character.runCommandString("_m",clear=True)
character.satiation = 100
self.container.addCharacter(character, self.xPosition, self.yPosition)
super().destroy(generateScrap=False)
src.items.addType(PoisonBush)<|fim▁end|>
|
Returns:
the description text
"""
|
<|file_name|>demo3.py<|end_file_name|><|fim▁begin|>import zmq
ctx = zmq.Context.instance()
server = ctx.socket(zmq.PUSH)
server.bind('inproc://foo')
clients = [ctx.socket(zmq.PULL) for i in range(10)]
poller = zmq.Poller()
for client in clients:<|fim▁hole|>
for client in clients:
server.send(b'DATA')
for sock, flags in poller.poll(0):
print(sock, repr(sock.recv()))<|fim▁end|>
|
client.connect('inproc://foo')
poller.register(client, zmq.POLLIN)
|
<|file_name|>canvas.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# $Id$
#
# Copyright (C) 1999-2006 Keith Dart <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
from gtk import *
from gnome.ui import *
win = GtkWindow()
win.connect('destroy', mainquit)
win.set_title('Canvas test')
canvas = GnomeCanvas()
canvas.set_size(300, 300)
win.add(canvas)
canvas.show()
canvas.root().add('line', points=(10,10, 90,10, 90,90, 10,90),
width_pixels=10, fill_color='blue')
<|fim▁hole|>
mainloop()<|fim▁end|>
|
win.show()
|
<|file_name|>SourcePositionTest.py<|end_file_name|><|fim▁begin|>import unittest
from os.path import relpath
from coalib.results.SourcePosition import SourcePosition
from coala_utils.ContextManagers import prepare_file
class SourcePositionTest(unittest.TestCase):
def test_initialization(self):
with self.assertRaises(TypeError):
SourcePosition(None, 0)
with self.assertRaises(ValueError):
SourcePosition('file', None, 1)
# However these should work:<|fim▁hole|> SourcePosition('file', 4, None)
SourcePosition('file', 4, 5)
def test_string_conversion(self):
uut = SourcePosition('filename', 1)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*filename', line=1, "
'column=None\\) at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'filename:1')
uut = SourcePosition('None', None)
self.assertRegex(
repr(uut),
"<SourcePosition object\\(file='.*None', line=None, column=None\\) "
'at 0x[0-9a-fA-F]+>')
self.assertEqual(str(uut), 'None')
uut = SourcePosition('filename', 3, 2)
self.assertEqual(str(uut), 'filename:3:2')
def test_json(self):
with prepare_file([''], None) as (_, filename):
uut = SourcePosition(filename, 1)
self.assertEqual(uut.__json__(use_relpath=True)
['file'], relpath(filename))
def assert_equal(self, first, second):
self.assertGreaterEqual(first, second)
self.assertEqual(first, second)
self.assertLessEqual(first, second)
def assert_ordering(self, greater, lesser):
self.assertGreater(greater, lesser)
self.assertGreaterEqual(greater, lesser)
self.assertNotEqual(greater, lesser)
self.assertLessEqual(lesser, greater)
self.assertLess(lesser, greater)<|fim▁end|>
|
SourcePosition('file', None, None)
|
<|file_name|>sync_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sync
import (
"context"
"fmt"
"net"
"reflect"
"testing"
"time"
"github.com/golang/glog"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/kubernetes/pkg/controller/node/ipam/cidrset"
"k8s.io/kubernetes/pkg/controller/node/ipam/test"
"k8s.io/api/core/v1"
)
var (
_, clusterCIDRRange, _ = net.ParseCIDR("10.1.0.0/16")
)
type fakeEvent struct {
nodeName string
reason string
}
type fakeAPIs struct {
aliasRange *net.IPNet
aliasErr error
addAliasErr error
nodeRet *v1.Node
nodeErr error
updateNodeErr error
resyncTimeout time.Duration
reportChan chan struct{}
updateNodeNetworkUnavailableErr error
calls []string
events []fakeEvent
results []error
}
func (f *fakeAPIs) Alias(ctx context.Context, nodeName string) (*net.IPNet, error) {
f.calls = append(f.calls, fmt.Sprintf("alias %v", nodeName))
return f.aliasRange, f.aliasErr
}
func (f *fakeAPIs) AddAlias(ctx context.Context, nodeName string, cidrRange *net.IPNet) error {
f.calls = append(f.calls, fmt.Sprintf("addAlias %v %v", nodeName, cidrRange))
return f.addAliasErr
}
func (f *fakeAPIs) Node(ctx context.Context, name string) (*v1.Node, error) {
f.calls = append(f.calls, fmt.Sprintf("node %v", name))
return f.nodeRet, f.nodeErr
}
func (f *fakeAPIs) UpdateNodePodCIDR(ctx context.Context, node *v1.Node, cidrRange *net.IPNet) error {
f.calls = append(f.calls, fmt.Sprintf("updateNode %v", node))
return f.updateNodeErr
}
func (f *fakeAPIs) UpdateNodeNetworkUnavailable(nodeName string, unavailable bool) error {
f.calls = append(f.calls, fmt.Sprintf("updateNodeNetworkUnavailable %v %v", nodeName, unavailable))
return f.updateNodeNetworkUnavailableErr
}
func (f *fakeAPIs) EmitNodeWarningEvent(nodeName, reason, fmtStr string, args ...interface{}) {
f.events = append(f.events, fakeEvent{nodeName, reason})
}
func (f *fakeAPIs) ReportResult(err error) {
glog.V(2).Infof("ReportResult %v", err)
f.results = append(f.results, err)
if f.reportChan != nil {
f.reportChan <- struct{}{}
}
}
func (f *fakeAPIs) ResyncTimeout() time.Duration {
if f.resyncTimeout == 0 {
return time.Second * 10000
}
return f.resyncTimeout
}
func (f *fakeAPIs) dumpTrace() {
for i, x := range f.calls {
glog.Infof("trace %v: %v", i, x)
}
}
var nodeWithoutCIDRRange = &v1.Node{
ObjectMeta: metav1.ObjectMeta{Name: "node1"},
}
var nodeWithCIDRRange = &v1.Node{
ObjectMeta: metav1.ObjectMeta{Name: "node1"},
Spec: v1.NodeSpec{PodCIDR: "10.1.1.0/24"},
}
func TestNodeSyncUpdate(t *testing.T) {
t.Parallel()
for _, tc := range []struct {
desc string
mode NodeSyncMode
node *v1.Node
fake fakeAPIs
events []fakeEvent
wantError bool
}{
{
desc: "validate range ==",
mode: SyncFromCloud,
node: nodeWithCIDRRange,
fake: fakeAPIs{
aliasRange: test.MustParseCIDR(nodeWithCIDRRange.Spec.PodCIDR),
},
},
{
desc: "validate range !=",
mode: SyncFromCloud,
node: nodeWithCIDRRange,
fake: fakeAPIs{aliasRange: test.MustParseCIDR("192.168.0.0/24")},
events: []fakeEvent{{"node1", "CloudCIDRAllocatorMismatch"}},
},
{
desc: "update alias from node",
mode: SyncFromCloud,
node: nodeWithCIDRRange,
events: []fakeEvent{{"node1", "CloudCIDRAllocatorInvalidMode"}},
wantError: true,
},
{
desc: "update alias from node",
mode: SyncFromCluster,
node: nodeWithCIDRRange,
// XXX/bowei -- validation
},
{
desc: "update node from alias",
mode: SyncFromCloud,
node: nodeWithoutCIDRRange,
fake: fakeAPIs{aliasRange: test.MustParseCIDR("10.1.2.3/16")},
// XXX/bowei -- validation
},
{
desc: "update node from alias",
mode: SyncFromCluster,
node: nodeWithoutCIDRRange,
fake: fakeAPIs{aliasRange: test.MustParseCIDR("10.1.2.3/16")},
events: []fakeEvent{{"node1", "CloudCIDRAllocatorInvalidMode"}},
wantError: true,
},
{
desc: "allocate range",
mode: SyncFromCloud,
node: nodeWithoutCIDRRange,
events: []fakeEvent{{"node1", "CloudCIDRAllocatorInvalidMode"}},
wantError: true,
},
{
desc: "allocate range",
mode: SyncFromCluster,
node: nodeWithoutCIDRRange,
},
{
desc: "update with node==nil",
mode: SyncFromCluster,
node: nil,
fake: fakeAPIs{
nodeRet: nodeWithCIDRRange,
},
wantError: false,
},
} {
sync := New(&tc.fake, &tc.fake, &tc.fake, tc.mode, "node1", cidrset.NewCIDRSet(clusterCIDRRange, 24))
doneChan := make(chan struct{})
// Do a single step of the loop.
go sync.Loop(doneChan)
sync.Update(tc.node)
close(sync.opChan)
<-doneChan
tc.fake.dumpTrace()
if !reflect.DeepEqual(tc.fake.events, tc.events) {
t.Errorf("%v, %v; fake.events = %#v, want %#v", tc.desc, tc.mode, tc.fake.events, tc.events)
}
var hasError bool
for _, r := range tc.fake.results {
hasError = hasError || (r != nil)
}
if hasError != tc.wantError {
t.Errorf("%v, %v; hasError = %t, errors = %v, want %t",
tc.desc, tc.mode, hasError, tc.fake.events, tc.wantError)
}
}
}
func TestNodeSyncResync(t *testing.T) {
fake := &fakeAPIs{
nodeRet: nodeWithCIDRRange,
resyncTimeout: time.Millisecond,
reportChan: make(chan struct{}),
}
sync := New(fake, fake, fake, SyncFromCluster, "node1", cidrset.NewCIDRSet(clusterCIDRRange, 24))
doneChan := make(chan struct{})
<|fim▁hole|> // Unblock loop().
go func() {
<-fake.reportChan
}()
<-doneChan
fake.dumpTrace()
}
func TestNodeSyncDelete(t *testing.T) {
t.Parallel()
for _, tc := range []struct {
desc string
mode NodeSyncMode
node *v1.Node
fake fakeAPIs
}{
{
desc: "delete",
mode: SyncFromCluster,
node: nodeWithCIDRRange,
},
{
desc: "delete without CIDR range",
mode: SyncFromCluster,
node: nodeWithoutCIDRRange,
},
{
desc: "delete with invalid CIDR range",
mode: SyncFromCluster,
node: &v1.Node{
ObjectMeta: metav1.ObjectMeta{Name: "node1"},
Spec: v1.NodeSpec{PodCIDR: "invalid"},
},
},
} {
sync := New(&tc.fake, &tc.fake, &tc.fake, tc.mode, "node1", cidrset.NewCIDRSet(clusterCIDRRange, 24))
doneChan := make(chan struct{})
// Do a single step of the loop.
go sync.Loop(doneChan)
sync.Delete(tc.node)
<-doneChan
tc.fake.dumpTrace()
/*
if !reflect.DeepEqual(tc.fake.events, tc.events) {
t.Errorf("%v, %v; fake.events = %#v, want %#v", tc.desc, tc.mode, tc.fake.events, tc.events)
}
var hasError bool
for _, r := range tc.fake.results {
hasError = hasError || (r != nil)
}
if hasError != tc.wantError {
t.Errorf("%v, %v; hasError = %t, errors = %v, want %t",
tc.desc, tc.mode, hasError, tc.fake.events, tc.wantError)
}
*/
}
}<|fim▁end|>
|
go sync.Loop(doneChan)
<-fake.reportChan
close(sync.opChan)
|
<|file_name|>test_account_payment_partner.py<|end_file_name|><|fim▁begin|># Copyright 2017 ForgeFlow S.L.
# Copyright 2021 Tecnativa - Víctor Martínez
# License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import _, fields
from odoo.exceptions import UserError, ValidationError
from odoo.fields import Date
from odoo.tests.common import Form, SavepointCase
class TestAccountPaymentPartner(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.res_users_model = cls.env["res.users"]
cls.move_model = cls.env["account.move"]
cls.journal_model = cls.env["account.journal"]
cls.payment_mode_model = cls.env["account.payment.mode"]
cls.partner_bank_model = cls.env["res.partner.bank"]
# Refs
cls.company = cls.env.ref("base.main_company")
cls.acct_type_payable = cls.env.ref("account.data_account_type_payable")
cls.acct_type_receivable = cls.env.ref("account.data_account_type_receivable")
cls.acct_type_expenses = cls.env.ref("account.data_account_type_expenses")
cls.company_2 = cls.env["res.company"].create({"name": "Company 2"})
charts = cls.env["account.chart.template"].search([])
if charts:
cls.chart = charts[0]
else:
raise ValidationError(_("No Chart of Account Template has been defined !"))
old_company = cls.env.user.company_id
cls.env.user.company_id = cls.company_2.id
cls.chart.try_loading()
cls.env.user.company_id = old_company.id
# refs
cls.manual_out = cls.env.ref("account.account_payment_method_manual_out")
cls.manual_out.bank_account_required = True
cls.manual_in = cls.env.ref("account.account_payment_method_manual_in")
cls.journal_sale = cls.env["account.journal"].create(
{
"name": "Test Sales Journal",
"code": "tSAL",
"type": "sale",
"company_id": cls.company.id,
}
)
cls.journal_purchase = cls.env["account.journal"].create(
{
"name": "Test Purchases Journal",
"code": "tPUR",
"type": "purchase",
"company_id": cls.company.id,
}
)
cls.journal_c1 = cls.journal_model.create(
{
"name": "J1",
"code": "J1",
"type": "bank",
"company_id": cls.company.id,
"bank_acc_number": "123456",
}
)
cls.journal_c2 = cls.journal_model.create(
{
"name": "J2",
"code": "J2",
"type": "bank",
"company_id": cls.company_2.id,
"bank_acc_number": "552344",
}
)
cls.supplier_payment_mode = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 1",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"show_bank_account_from_journal": True,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode_c2 = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 2",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"company_id": cls.company_2.id,
"fixed_journal_id": cls.journal_c2.id,
"variable_journal_ids": [(6, 0, [cls.journal_c2.id])],
}
)
cls.customer_payment_mode = cls.payment_mode_model.create(
{
"name": "Customers to Bank 1",
"bank_account_link": "fixed",
"payment_method_id": cls.manual_in.id,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"refund_payment_mode_id": cls.supplier_payment_mode.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode.write(
{"refund_payment_mode_id": cls.customer_payment_mode.id}
)
cls.customer = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": cls.customer_payment_mode,
}
)
)
cls.supplier = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test supplier",
"supplier_payment_mode_id": cls.supplier_payment_mode,
}
)
)
cls.supplier_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "5345345",
"partner_id": cls.supplier.id,
"company_id": cls.company.id,
}
)
cls.supplier_bank_2 = cls.env["res.partner.bank"].create(
{
"acc_number": "3452342",
"partner_id": cls.supplier.id,
"company_id": cls.company_2.id,
}
)
cls.supplier.with_company(
cls.company_2.id
).supplier_payment_mode_id = cls.supplier_payment_mode_c2
cls.invoice_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_payable.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.invoice_line_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_expenses.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.journal_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "GB95LOYD87430237296288",
"partner_id": cls.env.user.company_id.partner_id.id,
}
)
cls.journal = cls.env["account.journal"].create(
{
"name": "BANK TEST",
"code": "TEST",
"type": "bank",
"bank_account_id": cls.journal_bank.id,
}
)
cls.supplier_invoice = cls.move_model.create(
{
"partner_id": cls.supplier.id,
"invoice_date": fields.Date.today(),
"move_type": "in_invoice",
"journal_id": cls.journal_purchase.id,
}
)
def _create_invoice(self, default_move_type, partner):
move_form = Form(
self.env["account.move"].with_context(default_move_type=default_move_type)
)
move_form.partner_id = partner
move_form.invoice_date = Date.today()
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = self.env.ref("product.product_product_4")
line_form.name = "product that cost 100"
line_form.quantity = 1.0
line_form.price_unit = 100.0
line_form.account_id = self.invoice_line_account
return move_form.save()
def test_create_partner(self):
customer = (
self.env["res.partner"]
.with_company(self.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": self.customer_payment_mode,
}
)
)
self.assertEqual(
customer.with_company(self.company.id).customer_payment_mode_id,
self.customer_payment_mode,
)
self.assertEqual(
customer.with_company(self.company_2.id).customer_payment_mode_id,
self.payment_mode_model,
)
def test_partner_id_changes_compute_partner_bank(self):
# Test _compute_partner_bank is executed when partner_id changes
move_form = Form(
self.env["account.move"].with_context(default_move_type="out_invoice")
)
self.assertFalse(move_form.partner_bank_id)
move_form.partner_id = self.customer
self.assertEqual(move_form.payment_mode_id, self.customer_payment_mode)
self.assertFalse(move_form.partner_bank_id)
def test_out_invoice_onchange(self):
# Test the onchange methods in invoice
invoice = self.move_model.new(
{
"partner_id": self.customer.id,
"move_type": "out_invoice",
"company_id": self.company.id,
}
)
self.assertEqual(invoice.payment_mode_id, self.customer_payment_mode)
invoice.company_id = self.company_2
self.assertEqual(invoice.payment_mode_id, self.payment_mode_model)
invoice.payment_mode_id = False
self.assertFalse(invoice.partner_bank_id)
def test_in_invoice_onchange(self):
# Test the onchange methods in invoice
self.manual_out.bank_account_required = True
invoice = self.move_model.new(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
}
)
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode)
self.assertEqual(invoice.partner_bank_id, self.supplier_bank)
invoice.company_id = self.company_2
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode_c2)
self.assertEqual(invoice.partner_bank_id, self.supplier_bank_2)
invoice.payment_mode_id = self.supplier_payment_mode
self.assertTrue(invoice.partner_bank_id)
self.manual_out.bank_account_required = False
invoice.payment_mode_id = self.supplier_payment_mode_c2
self.assertFalse(invoice.partner_bank_id)
invoice.partner_id = False
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode_c2)
self.assertEqual(invoice.partner_bank_id, self.partner_bank_model)
def test_invoice_create_in_invoice(self):
invoice = self._create_invoice(
default_move_type="in_invoice", partner=self.supplier
)
invoice.action_post()
aml = invoice.line_ids.filtered(
lambda l: l.account_id.user_type_id == self.acct_type_payable
)
self.assertEqual(invoice.payment_mode_id, aml[0].payment_mode_id)
def test_invoice_create_out_invoice(self):
invoice = self._create_invoice(
default_move_type="out_invoice", partner=self.customer
)
invoice.action_post()
aml = invoice.line_ids.filtered(
lambda l: l.account_id.user_type_id == self.acct_type_receivable
)
self.assertEqual(invoice.payment_mode_id, aml[0].payment_mode_id)
def test_invoice_create_out_refund(self):
self.manual_out.bank_account_required = False
invoice = self._create_invoice(
default_move_type="out_refund", partner=self.customer
)
invoice.action_post()
self.assertEqual(
invoice.payment_mode_id,
self.customer.customer_payment_mode_id.refund_payment_mode_id,
)
def test_invoice_create_in_refund(self):
self.manual_in.bank_account_required = False
invoice = self._create_invoice(
default_move_type="in_refund", partner=self.supplier
)
invoice.action_post()
self.assertEqual(
invoice.payment_mode_id,
self.supplier.supplier_payment_mode_id.refund_payment_mode_id,
)
def test_invoice_constrains(self):
with self.assertRaises(UserError):
self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
"payment_mode_id": self.supplier_payment_mode_c2.id,
}
)
def test_payment_mode_constrains_01(self):
self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
}
)
with self.assertRaises(UserError):
self.supplier_payment_mode.company_id = self.company_2
def test_payment_mode_constrains_02(self):
self.move_model.create(
{
"date": fields.Date.today(),
"journal_id": self.journal_sale.id,
"name": "/",
"ref": "reference",
"state": "draft",
"invoice_line_ids": [
(
0,
0,
{
"account_id": self.invoice_account.id,
"credit": 1000,
"debit": 0,
"name": "Test",
"ref": "reference",
},
),
(
0,
0,
{
"account_id": self.invoice_line_account.id,
"credit": 0,
"debit": 1000,
"name": "Test",
"ref": "reference",
},
),
],
}
)
with self.assertRaises(UserError):
self.supplier_payment_mode.company_id = self.company_2
def test_invoice_in_refund(self):
invoice = self._create_invoice(
default_move_type="in_invoice", partner=self.supplier
)
invoice.partner_bank_id = False
invoice.action_post()
# Lets create a refund invoice for invoice_1.
# I refund the invoice Using Refund Button.
refund_invoice_wizard = (
self.env["account.move.reversal"]
.with_context(
{
"active_ids": [invoice.id],
"active_id": invoice.id,
"active_model": "account.move",
}
)
.create({"refund_method": "refund", "reason": "reason test create"})
)
refund_invoice = self.move_model.browse(
refund_invoice_wizard.reverse_moves()["res_id"]
)
self.assertEqual(
refund_invoice.payment_mode_id,
invoice.payment_mode_id.refund_payment_mode_id,
)
self.assertEqual(refund_invoice.partner_bank_id, invoice.partner_bank_id)
def test_invoice_out_refund(self):
invoice = self._create_invoice(
default_move_type="out_invoice", partner=self.customer
)
invoice.partner_bank_id = False
invoice.action_post()
# Lets create a refund invoice for invoice_1.
# I refund the invoice Using Refund Button.
refund_invoice_wizard = (
self.env["account.move.reversal"]
.with_context(
{<|fim▁hole|> "active_model": "account.move",
}
)
.create({"refund_method": "refund", "reason": "reason test create"})
)
refund_invoice = self.move_model.browse(
refund_invoice_wizard.reverse_moves()["res_id"]
)
self.assertEqual(
refund_invoice.payment_mode_id,
invoice.payment_mode_id.refund_payment_mode_id,
)
self.assertEqual(refund_invoice.partner_bank_id, invoice.partner_bank_id)
def test_partner(self):
self.customer.write({"customer_payment_mode_id": self.customer_payment_mode.id})
self.assertEqual(
self.customer.customer_payment_mode_id, self.customer_payment_mode
)
def test_partner_onchange(self):
customer_invoice = self.move_model.create(
{"partner_id": self.customer.id, "move_type": "out_invoice"}
)
self.assertEqual(customer_invoice.payment_mode_id, self.customer_payment_mode)
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
vals = {"partner_id": self.customer.id, "move_type": "out_refund"}
invoice = self.move_model.new(vals)
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode)
vals = {"partner_id": self.supplier.id, "move_type": "in_refund"}
invoice = self.move_model.new(vals)
self.assertEqual(invoice.payment_mode_id, self.customer_payment_mode)
vals = {"partner_id": False, "move_type": "out_invoice"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.payment_mode_id)
vals = {"partner_id": False, "move_type": "out_refund"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
vals = {"partner_id": False, "move_type": "in_invoice"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
vals = {"partner_id": False, "move_type": "in_refund"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
def test_onchange_payment_mode_id(self):
mode = self.supplier_payment_mode
mode.payment_method_id.bank_account_required = True
self.supplier_invoice.partner_bank_id = self.supplier_bank.id
self.supplier_invoice.payment_mode_id = mode.id
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
mode.payment_method_id.bank_account_required = False
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
self.supplier_invoice.payment_mode_id = False
self.assertFalse(self.supplier_invoice.partner_bank_id)
def test_print_report(self):
self.supplier_invoice.partner_bank_id = self.supplier_bank.id
report = self.env.ref("account.account_invoices")
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.supplier_bank.acc_number, res)
payment_mode = self.supplier_payment_mode
payment_mode.show_bank_account_from_journal = True
self.supplier_invoice.payment_mode_id = payment_mode.id
self.supplier_invoice.partner_bank_id = False
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.journal_c1.bank_acc_number, res)
payment_mode.bank_account_link = "variable"
payment_mode.variable_journal_ids = [(6, 0, self.journal.ids)]
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.journal_bank.acc_number, res)
def test_filter_type_domain(self):
in_invoice = self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"journal_id": self.journal_purchase.id,
}
)
self.assertEqual(in_invoice.payment_mode_filter_type_domain, "outbound")
self.assertEqual(
in_invoice.partner_bank_filter_type_domain, in_invoice.commercial_partner_id
)
out_refund = self.move_model.create(
{
"partner_id": self.customer.id,
"move_type": "out_refund",
"journal_id": self.journal_sale.id,
}
)
self.assertEqual(out_refund.payment_mode_filter_type_domain, "outbound")
self.assertEqual(
out_refund.partner_bank_filter_type_domain, out_refund.commercial_partner_id
)
in_refund = self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_refund",
"journal_id": self.journal_purchase.id,
}
)
self.assertEqual(in_refund.payment_mode_filter_type_domain, "inbound")
self.assertEqual(
in_refund.partner_bank_filter_type_domain, in_refund.bank_partner_id
)
out_invoice = self.move_model.create(
{
"partner_id": self.customer.id,
"move_type": "out_invoice",
"journal_id": self.journal_sale.id,
}
)
self.assertEqual(out_invoice.payment_mode_filter_type_domain, "inbound")
self.assertEqual(
out_invoice.partner_bank_filter_type_domain, out_invoice.bank_partner_id
)
def test_account_move_payment_mode_id_default(self):
payment_mode = self.env.ref("account_payment_mode.payment_mode_inbound_dd1")
field = self.env["ir.model.fields"].search(
[
("model_id.model", "=", self.move_model._name),
("name", "=", "payment_mode_id"),
]
)
move_form = Form(self.move_model.with_context(default_type="out_invoice"))
self.assertFalse(move_form.payment_mode_id)
self.env["ir.default"].create(
{"field_id": field.id, "json_value": payment_mode.id}
)
move_form = Form(self.move_model.with_context(default_type="out_invoice"))
self.assertEqual(move_form.payment_mode_id, payment_mode)<|fim▁end|>
|
"active_ids": [invoice.id],
"active_id": invoice.id,
|
<|file_name|>_tickfont.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class TickfontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="tickfont", parent_name="layout.ternary.aaxis", **kwargs
):
super(TickfontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Tickfont"),
data_docs=kwargs.pop(
"data_docs",
"""
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at<|fim▁hole|> https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
""",
),
**kwargs
)<|fim▁end|>
| |
<|file_name|>UIService.ts<|end_file_name|><|fim▁begin|>import _ from '../utils/underscore';
type axis = string | boolean;
type state = {
clientRect?: ClientRect,
documentWidth?: number,
documentHeight?: number,
startStyleLeft?: number,
startStyleTop?: number,
startStyleRight?: number,
startStyleBottom?: number,
startClientY?: number,
startClientX?: number
};
type position = {
left?: number,
right?: number,
top?: number,
bottom?: number
};
type borders = {
left?: number,
right?: number,
top?: number,
bottom?: number
};
let onStart: EventListener;
export default class UIService {
static __getDefaultsSetProperties(axis: axis): Array<string> {
return !axis ?
['left', 'top'] :
(axis === 'x' ? ['left'] :
(axis === 'y' ?
['top'] :
(console.warn('Unexpected value of axis'), ['left', 'top'])));
}
static draggable(
{ el, start, stop, drag, axis = false, borders = {}, setProperties = this.__getDefaultsSetProperties(axis) }:
{ el: HTMLElement, start: Function, stop: Function, drag: Function, axis: axis, borders: borders, setProperties: Array<string> }) {
el.ondragstart = () => false;
let state: state = {};
const eventOutOfClient = function (e: MouseEvent) {
return e.clientX > (borders.right || state.documentWidth || 0)
|| e.clientY > (borders.top || state.documentHeight || 0)
|| e.clientX < (borders.left || 0)
|| e.clientY < (borders.bottom || 0);
};
const onMove = function (e: MouseEvent) {
e.preventDefault();
if (eventOutOfClient(e)) {
return;
}
const position: position = {};
const diffX = e.clientX - (state.startClientX || 0);
if (axis !== 'y') {
position.left = (state.startStyleLeft || 0) + diffX;
position.right = (state.startStyleRight || 0) - diffX;
if (setProperties.includes('left')) {
el.style.left = `${position.left}px`;
} else if (setProperties.includes('right')) {
el.style.right = `${position.right}px`;
}
}
const diffY = e.clientY - (state.startClientY || 0);
if (axis !== 'x') {
position.top = (state.startStyleTop || 0) + diffY;
position.bottom = (state.startStyleBottom || 0) - diffY;
if (setProperties.includes('top')) {
el.style.top = `${position.top}px`;
} else if (setProperties.includes('bottom')) {
el.style.bottom = `${position.bottom}px`;
}
}
typeof drag === 'function' && drag(
event,
{
offset: position,
originalPosition: state.clientRect,
position,
translation: {
x: diffX,
y: diffY
}
}
);
};
const onStop = (event: MouseEvent) => {
document.removeEventListener('pointerup', onStop, true);
document.removeEventListener('pointermove', onMove, true);
typeof stop === 'function' && stop(event, el);
};
onStart = (event: MouseEvent) => {
const clientRect = el.getBoundingClientRect();
const documentWidth = document.body.offsetWidth;
const documentHeight = document.body.offsetHeight;
const computedStyle = getComputedStyle(el);
state = {
clientRect,
documentWidth,
documentHeight: document.body.offsetHeight,
startStyleLeft: parseInt(computedStyle.left || '0') || clientRect.left,
startStyleTop: parseInt(computedStyle.top || '0') || clientRect.top,
startStyleRight: parseInt(computedStyle.right || '0') || (documentWidth - clientRect.right),
startStyleBottom: parseInt(computedStyle.bottom || '0') || (documentHeight - clientRect.bottom),
startClientX: event.clientX,
startClientY: event.clientY
};<|fim▁hole|> document.addEventListener('pointerup', onStop, true);
document.addEventListener('pointermove', onMove, true);
typeof start === 'function' && start(event, el);
};
el.addEventListener('pointerdown', onStart);
}
static undraggable({ el }: { el: HTMLElement }) {
if (onStart) {
el.removeEventListener('pointerdown', onStart);
}
}
static getTransitionDurationMilliseconds(el: HTMLElement): number {
const transitionDuration = getComputedStyle(el).transitionDuration;
const isSeconds = /^(\d)+\.*(\d)*s$/.test(transitionDuration);
const isMilliseconds = /^(\d)+ms$/.test(transitionDuration);
if (isSeconds && !isMilliseconds) {
return parseFloat(transitionDuration) * 1000;
} else if (isMilliseconds && !isSeconds) {
return parseFloat(transitionDuration);
}
Core.InterfaceError.logError(`Unexpected transition duration "${transitionDuration}"`);
return 0;
}
static createElementsFromHTML(htmlString: string, context?: Object): Array<Element> {
let innerHTML = htmlString.trim();
if (context) {
innerHTML = Handlebars.compile(innerHTML)(context);
}
const div = document.createElement('div');
div.innerHTML = innerHTML;
return Array.from(div.children);
}
static createElementFromHTML(htmlString: string, context?: Object): Element {
return this.createElementsFromHTML(htmlString, context)[0];
}
}<|fim▁end|>
| |
<|file_name|>uptime.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013-2015 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Fox Wilson
#
# This program is free software; you can redistribute it and/or<|fim▁hole|>#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from time import time
from datetime import timedelta
from helpers.command import Command
@Command('uptime', ['handler'])
def cmd(send, _, args):
"""Shows the bot's uptime.
Syntax: {command}
"""
curr = time()
uptime = args['handler'].uptime
starttime = curr - uptime['start']
reloaded = curr - uptime['reloaded']
send("Time since start: %s" % timedelta(seconds=starttime))
send("Time since reload: %s" % timedelta(seconds=reloaded))<|fim▁end|>
|
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
|
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>//! jsonrpc request
use super::{Id, Params, Version};
/// Represents jsonrpc request which is a method call.
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct MethodCall {
/// A String specifying the version of the JSON-RPC protocol.
pub jsonrpc: Option<Version>,
/// A String containing the name of the method to be invoked.
pub method: String,
/// A Structured value that holds the parameter values to be used
/// during the invocation of the method. This member MAY be omitted.
#[serde(default = "default_params")]
pub params: Params,
/// An identifier established by the Client that MUST contain a String,
/// Number, or NULL value if included. If it is not included it is assumed
/// to be a notification.
pub id: Id,
}
/// Represents jsonrpc request which is a notification.
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[serde(deny_unknown_fields)]
pub struct Notification {
/// A String specifying the version of the JSON-RPC protocol.
pub jsonrpc: Option<Version>,
/// A String containing the name of the method to be invoked.
pub method: String,
/// A Structured value that holds the parameter values to be used
/// during the invocation of the method. This member MAY be omitted.
#[serde(default = "default_params")]
pub params: Params,
}
/// Represents single jsonrpc call.
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[serde(untagged)]
pub enum Call {
/// Call method
MethodCall(MethodCall),
/// Fire notification
Notification(Notification),
/// Invalid call
Invalid {
/// Call id (if known)
#[serde(default = "default_id")]
id: Id,
},
}
fn default_params() -> Params {
Params::None
}
fn default_id() -> Id {
Id::Null
}
impl From<MethodCall> for Call {
fn from(mc: MethodCall) -> Self {
Call::MethodCall(mc)
}
}
impl From<Notification> for Call {
fn from(n: Notification) -> Self {
Call::Notification(n)
}
}
/// Represents jsonrpc request.
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[serde(untagged)]
pub enum Request {
/// Single request (call)
Single(Call),
/// Batch of requests (calls)
Batch(Vec<Call>)
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::Value;
#[test]
fn method_call_serialize() {
use serde_json;
use serde_json::Value;
let m = MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)]),
id: Id::Num(1)
};
let serialized = serde_json::to_string(&m).unwrap();
assert_eq!(serialized, r#"{"jsonrpc":"2.0","method":"update","params":[1,2],"id":1}"#);
}
#[test]
fn notification_serialize() {
use serde_json;
use serde_json::Value;
let n = Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)])
};
let serialized = serde_json::to_string(&n).unwrap();
assert_eq!(serialized, r#"{"jsonrpc":"2.0","method":"update","params":[1,2]}"#);
}
#[test]
fn call_serialize() {<|fim▁hole|> use serde_json;
use serde_json::Value;
let n = Call::Notification(Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1)])
});
let serialized = serde_json::to_string(&n).unwrap();
assert_eq!(serialized, r#"{"jsonrpc":"2.0","method":"update","params":[1]}"#);
}
#[test]
fn request_serialize_batch() {
use serde_json;
let batch = Request::Batch(vec![
Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)]),
id: Id::Num(1)
}),
Call::Notification(Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1)])
})
]);
let serialized = serde_json::to_string(&batch).unwrap();
assert_eq!(serialized, r#"[{"jsonrpc":"2.0","method":"update","params":[1,2],"id":1},{"jsonrpc":"2.0","method":"update","params":[1]}]"#);
}
#[test]
fn notification_deserialize() {
use serde_json;
use serde_json::Value;
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": [1,2]}"#;
let deserialized: Notification = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)])
});
let s = r#"{"jsonrpc": "2.0", "method": "foobar"}"#;
let deserialized: Notification = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Notification {
jsonrpc: Some(Version::V2),
method: "foobar".to_owned(),
params: Params::None,
});
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": [1,2], "id": 1}"#;
let deserialized: Result<Notification, _> = serde_json::from_str(s);
assert!(deserialized.is_err())
}
#[test]
fn call_deserialize() {
use serde_json;
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": [1]}"#;
let deserialized: Call = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Call::Notification(Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1)])
}));
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": [1], "id": 1}"#;
let deserialized: Call = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1)]),
id: Id::Num(1)
}));
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": [], "id": 1}"#;
let deserialized: Call = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![]),
id: Id::Num(1)
}));
let s = r#"{"jsonrpc": "2.0", "method": "update", "params": null, "id": 1}"#;
let deserialized: Call = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::None,
id: Id::Num(1)
}));
let s = r#"{"jsonrpc": "2.0", "method": "update", "id": 1}"#;
let deserialized: Call = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::None,
id: Id::Num(1)
}));
}
#[test]
fn request_deserialize_batch() {
use serde_json;
let s = r#"[{}, {"jsonrpc": "2.0", "method": "update", "params": [1,2], "id": 1},{"jsonrpc": "2.0", "method": "update", "params": [1]}]"#;
let deserialized: Request = serde_json::from_str(s).unwrap();
assert_eq!(deserialized, Request::Batch(vec![
Call::Invalid { id: Id::Null },
Call::MethodCall(MethodCall {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1), Value::from(2)]),
id: Id::Num(1)
}),
Call::Notification(Notification {
jsonrpc: Some(Version::V2),
method: "update".to_owned(),
params: Params::Array(vec![Value::from(1)])
})
]))
}
#[test]
fn request_invalid_returns_id() {
use serde_json;
let s = r#"{"id":120,"method":"my_method","params":["foo", "bar"],"extra_field":[]}"#;
let deserialized: Request = serde_json::from_str(s).unwrap();
match deserialized {
Request::Single(Call::Invalid { id: Id::Num(120) }) => {},
_ => panic!("Request wrongly deserialized: {:?}", deserialized),
}
}
}<|fim▁end|>
| |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>//! Configuration module, read and write config file.
use APP_INFO;
use error::*;
use json::JsonValue;
use std::fs::File;
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use vars::VarsHandler;
/// Configuration structure to read and write config.
#[derive(Clone, Debug)]
pub struct Config {
/// Path to the metadata
pub metadata_path: PathBuf,
/// Interval between each autobackup
pub autobackup_interval: u64,
/// Path to vars
pub vars_path: PathBuf,
}
impl Config {
/// Default constructor
pub fn new<P: AsRef<Path>, Q: AsRef<Path>>(
metadata_path: P,
autobackup_interval: u64,
vars_path: Q,
) -> Config {
Config {
metadata_path: metadata_path.as_ref().to_path_buf(),
autobackup_interval: autobackup_interval,
vars_path: vars_path.as_ref().to_path_buf(),
}
}
/// Path to the config
pub fn config_path() -> Result<PathBuf> {
Ok(
::app_dirs::app_root(::app_dirs::AppDataType::UserConfig, &APP_INFO)?
.join("config"),
)
}
/// Read config from readable stream
pub fn from_json_stream<R: Read>(stream: &mut R) -> Result<Config> {
let mut buf = String::new();
stream.read_to_string(&mut buf)?;
let json = ::json::parse(&buf)?;
let metadata_path = match json["metadata_path"].as_str() {
Some(s) => s,
None => bail!(ErrorKind::MissingMetadataPath),
};
let autobackup_interval = json["autobackup_interval"].as_u64().unwrap_or(3600);
let vars_path = match json["vars_path"].as_str() {
Some(s) => s.to_string(),
None => VarsHandler::get_default_path()?.display().to_string(),
};
Ok(Config::new(metadata_path, autobackup_interval, vars_path))
}
/// Read config from user config
pub fn read_config() -> Result<Config> {
let path = Config::config_path()?;
Config::read_config_from_path(&path)
}
/// Read config from path
pub fn read_config_from_path<P: AsRef<Path>>(path: P) -> Result<Config> {
let mut f = File::open(path)?;
Config::from_json_stream(&mut f)
}
/// Save config
pub fn save_config(&self) -> Result<()> {
let path = Config::config_path()?;
let mut f = File::create(&path)?;
let json: JsonValue = self.clone().into();
let json_str = ::json::stringify_pretty(json, 2);<|fim▁hole|> }
}
/// Convert Config into Json
impl Into<JsonValue> for Config {
fn into(self) -> JsonValue {
object!{
"metadata_path" => self.metadata_path.display().to_string(),
"autobackup_interval" => self.autobackup_interval,
"vars_path" => self.vars_path.display().to_string()
}
}
}
/// Default config, may be panic
impl Default for Config {
fn default() -> Config {
let metadata_path = ::app_dirs::app_root(::app_dirs::AppDataType::UserData, &APP_INFO)
.unwrap()
.join("metadata.json");
let vars_path = VarsHandler::get_default_path().unwrap();
Config::new(metadata_path, 3600, vars_path)
}
}
#[cfg(test)]
mod unit_tests {
use std::io::Cursor;
use super::*;
#[test]
fn read_config_from_json() {
let json_str = "{\"metadata_path\":\"/\", \"autobackup_interval\":260}";
let mut cursor = Cursor::new(json_str);
let config = Config::from_json_stream(&mut cursor).unwrap();
assert_eq!(config.metadata_path, Path::new("/"));
assert_eq!(config.autobackup_interval, 260);
}
}<|fim▁end|>
|
f.write_all(json_str.as_bytes())?;
Ok(())
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-11 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
<|fim▁hole|> fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
]<|fim▁end|>
|
operations = [
migrations.CreateModel(
name='Control',
|
<|file_name|>RunSegmenter.cpp<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "platform/fonts/shaping/RunSegmenter.h"
#include "platform/fonts/ScriptRunIterator.h"
#include "platform/fonts/SmallCapsIterator.h"
#include "platform/fonts/SymbolsIterator.h"
#include "platform/fonts/UTF16TextIterator.h"
#include "platform/text/Character.h"
#include "wtf/Assertions.h"
namespace blink {
RunSegmenter::RunSegmenter(const UChar* buffer, unsigned bufferSize, FontOrientation runOrientation, FontVariant variant)
: m_bufferSize(bufferSize)
, m_candidateRange({ 0, 0, USCRIPT_INVALID_CODE, OrientationIterator::OrientationKeep, SmallCapsIterator::SmallCapsSameCase })
, m_scriptRunIterator(adoptPtr(new ScriptRunIterator(buffer, bufferSize)))
, m_orientationIterator(runOrientation == FontOrientation::VerticalMixed ? adoptPtr(new OrientationIterator(buffer, bufferSize, runOrientation)) : nullptr)
, m_smallCapsIterator(variant == FontVariantSmallCaps ? adoptPtr(new SmallCapsIterator(buffer, bufferSize)) : nullptr)
, m_symbolsIterator(adoptPtr(new SymbolsIterator(buffer, bufferSize)))
, m_lastSplit(0)
, m_scriptRunIteratorPosition(0)
, m_orientationIteratorPosition(runOrientation == FontOrientation::VerticalMixed ? 0 : m_bufferSize)
, m_smallCapsIteratorPosition(variant == FontVariantSmallCaps ? 0 : m_bufferSize)
, m_symbolsIteratorPosition(0)
, m_atEnd(false)<|fim▁hole|>}
void RunSegmenter::consumeScriptIteratorPastLastSplit()
{
ASSERT(m_scriptRunIterator);
if (m_scriptRunIteratorPosition <= m_lastSplit && m_scriptRunIteratorPosition < m_bufferSize) {
while (m_scriptRunIterator->consume(m_scriptRunIteratorPosition, m_candidateRange.script)) {
if (m_scriptRunIteratorPosition > m_lastSplit)
return;
}
}
}
void RunSegmenter::consumeOrientationIteratorPastLastSplit()
{
if (m_orientationIterator && m_orientationIteratorPosition <= m_lastSplit && m_orientationIteratorPosition < m_bufferSize) {
while (m_orientationIterator->consume(&m_orientationIteratorPosition, &m_candidateRange.renderOrientation)) {
if (m_orientationIteratorPosition > m_lastSplit)
return;
}
}
}
void RunSegmenter::consumeSmallCapsIteratorPastLastSplit()
{
if (m_smallCapsIterator && m_smallCapsIteratorPosition <= m_lastSplit && m_smallCapsIteratorPosition < m_bufferSize) {
while (m_smallCapsIterator->consume(&m_smallCapsIteratorPosition, &m_candidateRange.smallCapsBehavior)) {
if (m_smallCapsIteratorPosition > m_lastSplit)
return;
}
}
}
void RunSegmenter::consumeSymbolsIteratorPastLastSplit()
{
ASSERT(m_symbolsIterator);
if (m_symbolsIteratorPosition <= m_lastSplit && m_symbolsIteratorPosition < m_bufferSize) {
while (m_symbolsIterator->consume(&m_symbolsIteratorPosition, &m_candidateRange.fontFallbackPriority)) {
if (m_symbolsIteratorPosition > m_lastSplit)
return;
}
}
}
bool RunSegmenter::consume(RunSegmenterRange* nextRange)
{
if (m_atEnd || !m_bufferSize)
return false;
consumeScriptIteratorPastLastSplit();
consumeOrientationIteratorPastLastSplit();
consumeSmallCapsIteratorPastLastSplit();
consumeSymbolsIteratorPastLastSplit();
if (m_scriptRunIteratorPosition <= m_orientationIteratorPosition
&& m_scriptRunIteratorPosition <= m_smallCapsIteratorPosition
&& m_scriptRunIteratorPosition <= m_symbolsIteratorPosition) {
m_lastSplit = m_scriptRunIteratorPosition;
}
if (m_orientationIteratorPosition <= m_scriptRunIteratorPosition
&& m_orientationIteratorPosition <= m_smallCapsIteratorPosition
&& m_orientationIteratorPosition <= m_symbolsIteratorPosition) {
m_lastSplit = m_orientationIteratorPosition;
}
if (m_smallCapsIteratorPosition <= m_scriptRunIteratorPosition
&& m_smallCapsIteratorPosition <= m_orientationIteratorPosition
&& m_smallCapsIteratorPosition <= m_symbolsIteratorPosition) {
m_lastSplit = m_smallCapsIteratorPosition;
}
if (m_symbolsIteratorPosition <= m_scriptRunIteratorPosition
&& m_symbolsIteratorPosition <= m_orientationIteratorPosition
&& m_symbolsIteratorPosition <= m_smallCapsIteratorPosition) {
m_lastSplit = m_symbolsIteratorPosition;
}
m_candidateRange.start = m_candidateRange.end;
m_candidateRange.end = m_lastSplit;
*nextRange = m_candidateRange;
m_atEnd = m_lastSplit == m_bufferSize;
return true;
}
} // namespace blink<|fim▁end|>
|
{
|
<|file_name|>linq_principles.js<|end_file_name|><|fim▁begin|>"use strict";
describe('The two basic principles of LINQ', function () {
it('uses lazy evaluation', function () {<|fim▁hole|> // Arrange
var enumerable = Enumerable.range(0, 10);
// Act
enumerable
.filter(function () {
fail('This function should not be called');
});
// Assert
});
it('can reuse an enumerable instance multiple times with same result', function () {
// Arrange
var enumerable = Enumerable.range(0, 10);
// Act
var result1 = enumerable.first();
var result2 = enumerable.last();
// Assert
expect(result1).toBe(0);
expect(result2).toBe(10);
});
});<|fim▁end|>
| |
<|file_name|>GraphReporter.java<|end_file_name|><|fim▁begin|>package com.github.approval.sesame;
/*
* #%L
* approval-sesame
* %%
* Copyright (C) 2014 Nikolavp
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.github.approval.Reporter;
import com.github.approval.reporters.ExecutableDifferenceReporter;
import com.github.approval.reporters.Reporters;
import com.github.approval.reporters.SwingInteractiveReporter;
import java.io.File;
/**
* <p>
* A reporter that can be used to verify dot files. It will compile the file to an image and open it through
* {@link com.github.approval.reporters.Reporters#fileLauncher()}.
* </p>
* <p>
* Note that this reporter cannot be used for anything else and will give you an error beceause it will<|fim▁hole|> * </p>
*/
public final class GraphReporter extends ExecutableDifferenceReporter {
/**
* Get an instance of the reporter.
* @return a graph reporter
*/
public static Reporter getInstance() {
return SwingInteractiveReporter.wrap(new GraphReporter());
}
/**
* Main constructor for the executable reporter.
*/
private GraphReporter() {
super("dot -T png -O ", "dot -T png -O ", "dot");
}
private static File addPng(File file) {
return new File(file.getAbsoluteFile().getAbsolutePath() + ".png");
}
@Override
public void approveNew(byte[] value, File approvalDestination, File fileForVerification) {
super.approveNew(value, approvalDestination, fileForVerification);
Reporters.fileLauncherWithoutInteraction().approveNew(value, addPng(approvalDestination), addPng(fileForVerification));
}
@Override
protected String[] buildApproveNewCommand(File approvalDestination, File fileForVerification) {
return new String[]{getApprovalCommand(), approvalDestination.getAbsolutePath()};
}
@Override
protected String[] buildNotTheSameCommand(File fileForVerification, File fileForApproval) {
return new String[]{getDiffCommand(), fileForApproval.getAbsolutePath()};
}
@Override
public void notTheSame(byte[] oldValue, File fileForVerification, byte[] newValue, File fileForApproval) {
super.notTheSame(oldValue, fileForVerification, newValue, fileForApproval);
Reporters.fileLauncherWithoutInteraction().notTheSame(oldValue, addPng(fileForVerification), newValue, addPng(fileForApproval));
}
}<|fim▁end|>
|
* try to compile the verification file with the "dot" command.
|
<|file_name|>CalendarGuiDay.py<|end_file_name|><|fim▁begin|>import datetime
import time
from pandac.PandaModules import TextNode, Vec3, Vec4, PlaneNode, Plane, Point3
from toontown.pgui.DirectGui import DirectFrame, DirectLabel, DirectButton, DirectScrolledList, DGG
from direct.directnotify import DirectNotifyGlobal
from toontown.pgui import DirectGuiGlobals
from toontown.toonbase import TTLocalizer
from toontown.toonbase import ToontownGlobals
from toontown.parties.PartyInfo import PartyInfo
from toontown.parties import PartyGlobals
from toontown.ai.NewsManager import NewsManager
def myStrftime(myTime):
result = ''
result = myTime.strftime('%I')
if result[0] == '0':
result = result[1:]
result += myTime.strftime(':%M %p')
return result
class CalendarGuiDay(DirectFrame):
notify = directNotify.newCategory('CalendarGuiDay')
ScrollListTextSize = 0.03
def __init__(self, parent, myDate, startDate, dayClickCallback = None, onlyFutureDaysClickable = False):
self.origParent = parent
self.startDate = startDate
self.myDate = myDate
self.dayClickCallback = dayClickCallback
self.onlyFutureDaysClickable = onlyFutureDaysClickable
DirectFrame.__init__(self, parent=parent)
self.timedEvents = []
self.partiesInvitedToToday = []
self.hostedPartiesToday = []
self.yearlyHolidaysToday = []
self.showMarkers = config.GetBool('show-calendar-markers', 0)
self.filter = ToontownGlobals.CalendarFilterShowAll
self.load()
self.createGuiObjects()
self.update()
def createDummyLocators(self):
self.dayButtonLocator = self.attachNewNode('dayButtonLocator')
self.dayButtonLocator.setX(0.1)
self.dayButtonLocator.setZ(-0.05)
self.numberLocator = self.attachNewNode('numberLocator')
self.numberLocator.setX(0.09)
self.scrollLocator = self.attachNewNode('scrollLocator')
self.selectedLocator = self.attachNewNode('selectedLocator')
self.selectedLocator.setX(0.11)
self.selectedLocator.setZ(-0.06)
def load(self):
dayAsset = loader.loadModel('phase_4/models/parties/tt_m_gui_sbk_calendar_box')
dayAsset.reparentTo(self)
self.dayButtonLocator = self.find('**/loc_origin')
self.numberLocator = self.find('**/loc_number')
self.scrollLocator = self.find('**/loc_topLeftList')
self.selectedLocator = self.find('**/loc_origin')
self.todayBox = self.find('**/boxToday')
self.todayBox.hide()<|fim▁hole|> self.selectedFrame = self.find('**/boxHover')
self.selectedFrame.hide()
self.defaultBox = self.find('**/boxBlank')
self.scrollBottomRightLocator = self.find('**/loc_bottomRightList')
self.scrollDownLocator = self.find('**/loc_scrollDown')
self.attachMarker(self.scrollDownLocator)
self.scrollUpLocator = self.find('**/loc_scrollUp')
self.attachMarker(self.scrollUpLocator)
def attachMarker(self, parent, scale = 0.005, color = (1, 0, 0)):
if self.showMarkers:
marker = loader.loadModel('phase_3/models/misc/sphere')
marker.reparentTo(parent)
marker.setScale(scale)
marker.setColor(*color)
def createGuiObjects(self):
self.dayButton = DirectButton(parent=self.dayButtonLocator, image=self.selectedFrame, relief=None, command=self.__clickedOnDay, pressEffect=1, rolloverSound=None, clickSound=None)
self.numberWidget = DirectLabel(parent=self.numberLocator, relief=None, text=str(self.myDate.day), text_scale=0.04, text_align=TextNode.ACenter, text_font=ToontownGlobals.getInterfaceFont(), text_fg=Vec4(110 / 255.0, 126 / 255.0, 255 / 255.0, 1))
self.attachMarker(self.numberLocator)
self.listXorigin = 0
self.listFrameSizeX = self.scrollBottomRightLocator.getX() - self.scrollLocator.getX()
self.scrollHeight = self.scrollLocator.getZ() - self.scrollBottomRightLocator.getZ()
self.listZorigin = self.scrollBottomRightLocator.getZ()
self.listFrameSizeZ = self.scrollLocator.getZ() - self.scrollBottomRightLocator.getZ()
self.arrowButtonXScale = 1
self.arrowButtonZScale = 1
self.itemFrameXorigin = 0
self.itemFrameZorigin = 0
self.buttonXstart = self.itemFrameXorigin + 0.21
self.gui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
buttonOffSet = -0.01
incButtonPos = (0.0, 0, 0)
decButtonPos = (0.0, 0, 0)
itemFrameMinZ = self.listZorigin
itemFrameMaxZ = self.listZorigin + self.listFrameSizeZ
arrowUp = self.find('**/downScroll_up')
arrowDown = self.find('**/downScroll_down')
arrowHover = self.find('**/downScroll_hover')
self.scrollList = DirectScrolledList(parent=self.scrollLocator, relief=None, pos=(0, 0, 0), incButton_image=(arrowUp,
arrowDown,
arrowHover,
arrowUp), incButton_relief=None, incButton_scale=(self.arrowButtonXScale, 1, self.arrowButtonZScale), incButton_pos=incButtonPos, incButton_image3_color=Vec4(1, 1, 1, 0.2), decButton_image=(arrowUp,
arrowDown,
arrowHover,
arrowUp), decButton_relief=None, decButton_scale=(self.arrowButtonXScale, 1, -self.arrowButtonZScale), decButton_pos=decButtonPos, decButton_image3_color=Vec4(1, 1, 1, 0.2), itemFrame_pos=(self.itemFrameXorigin, 0, -0.03), numItemsVisible=4, incButtonCallback=self.scrollButtonPressed, decButtonCallback=self.scrollButtonPressed)
itemFrameParent = self.scrollList.itemFrame.getParent()
self.scrollList.incButton.reparentTo(self.scrollDownLocator)
self.scrollList.decButton.reparentTo(self.scrollUpLocator)
arrowUp.removeNode()
arrowDown.removeNode()
arrowHover.removeNode()
clipper = PlaneNode('clipper')
clipper.setPlane(Plane(Vec3(-1, 0, 0), Point3(0.23, 0, 0)))
clipNP = self.scrollList.component('itemFrame').attachNewNode(clipper)
self.scrollList.component('itemFrame').setClipPlane(clipNP)
return
def scrollButtonPressed(self):
self.__clickedOnDay()
def adjustForMonth(self):
curServerDate = base.cr.toontownTimeManager.getCurServerDateTime()
if self.onlyFutureDaysClickable:
if self.myDate.year < curServerDate.year or self.myDate.year == curServerDate.year and self.myDate.month < curServerDate.month or self.myDate.year == curServerDate.year and self.myDate.month == curServerDate.month and self.myDate.day < curServerDate.day:
self.numberWidget.setColorScale(0.5, 0.5, 0.5, 0.5)
self.numberWidget['state'] = DirectGuiGlobals.DISABLED
else:
self.numberWidget.setColorScale(1, 1, 1, 1)
if self.myDate.month != self.startDate.month:
self.setColorScale(0.75, 0.75, 0.75, 1.0)
if self.dayClickCallback is not None:
self.numberWidget['state'] = DirectGuiGlobals.DISABLED
else:
self.setColorScale(1, 1, 1, 1)
if self.myDate.date() == curServerDate.date():
self.defaultBox.hide()
self.todayBox.show()
else:
self.defaultBox.show()
self.todayBox.hide()
return
def destroy(self):
if self.dayClickCallback is not None:
self.numberWidget.destroy()
self.dayClickCallback = None
self.notify.debug('desroying %s' % self.myDate)
try:
for item in self.scrollList['items']:
if hasattr(item, 'description') and item.description and hasattr(item.description, 'destroy'):
self.notify.debug('desroying description of item %s' % item)
item.unbind(DGG.ENTER)
item.unbind(DGG.EXIT)
item.description.destroy()
except e:
self.notify.debug('pass %s' % self.myDate)
self.scrollList.removeAndDestroyAllItems()
self.scrollList.destroy()
self.dayButton.destroy()
DirectFrame.destroy(self)
return
def addWeeklyHolidays(self):
if not self.filter == ToontownGlobals.CalendarFilterShowAll and not self.filter == ToontownGlobals.CalendarFilterShowOnlyHolidays:
return
if base.cr.newsManager:
holidays = base.cr.newsManager.getHolidaysForWeekday(self.myDate.weekday())
holidayName = ''
holidayDesc = ''
for holidayId in holidays:
if holidayId in TTLocalizer.HolidayNamesInCalendar:
holidayName = TTLocalizer.HolidayNamesInCalendar[holidayId][0]
holidayDesc = TTLocalizer.HolidayNamesInCalendar[holidayId][1]
else:
holidayName = TTLocalizer.UnknownHoliday % holidayId
self.addTitleAndDescToScrollList(holidayName, holidayDesc)
self.scrollList.refresh()
if config.GetBool('calendar-test-items', 0):
if self.myDate.date() + datetime.timedelta(days=-1) == base.cr.toontownTimeManager.getCurServerDateTime().date():
testItems = ('1:00 AM Party', '2:00 AM CEO', '11:15 AM Party', '5:30 PM CJ', '11:00 PM Party', 'Really Really Long String')
for text in testItems:
newItem = DirectLabel(relief=None, text=text, text_scale=self.ScrollListTextSize, text_align=TextNode.ALeft)
self.scrollList.addItem(newItem)
if self.myDate.date() + datetime.timedelta(days=-2) == base.cr.toontownTimeManager.getCurServerDateTime().date():
testItems = ('1:00 AM Party', '3:00 AM CFO', '11:00 AM Party')
textSize = self.ScrollListTextSize
for text in testItems:
newItem = DirectLabel(relief=None, text=text, text_scale=textSize, text_align=TextNode.ALeft)
self.scrollList.addItem(newItem)
def updateArrowButtons(self):
numItems = 0
try:
numItems = len(self.scrollList['items'])
except e:
numItems = 0
if numItems <= self.scrollList.numItemsVisible:
self.scrollList.incButton.hide()
self.scrollList.decButton.hide()
else:
self.scrollList.incButton.show()
self.scrollList.decButton.show()
def collectTimedEvents(self):
self.timedEvents = []
if self.filter == ToontownGlobals.CalendarFilterShowAll or self.filter == ToontownGlobals.CalendarFilterShowOnlyParties:
for party in localAvatar.partiesInvitedTo:
if party.startTime.date() == self.myDate.date():
self.partiesInvitedToToday.append(party)
self.timedEvents.append((party.startTime.time(), party))
for party in localAvatar.hostedParties:
if party.startTime.date() == self.myDate.date():
self.hostedPartiesToday.append(party)
self.timedEvents.append((party.startTime.time(), party))
if base.cr.newsManager and (self.filter == ToontownGlobals.CalendarFilterShowAll or self.filter == ToontownGlobals.CalendarFilterShowOnlyHolidays):
yearlyHolidays = base.cr.newsManager.getYearlyHolidaysForDate(self.myDate)
for holiday in yearlyHolidays:
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
if holidayStart[0] == self.myDate.month and holidayStart[1] == self.myDate.day:
myTime = datetime.time(holidayStart[2], holidayStart[3])
elif holidayEnd[0] == self.myDate.month and holidayEnd[1] == self.myDate.day:
myTime = datetime.time(holidayEnd[2], holidayEnd[3])
else:
self.notify.error('holiday is not today %s' % holiday)
self.timedEvents.append((myTime, holiday))
oncelyHolidays = base.cr.newsManager.getOncelyHolidaysForDate(self.myDate)
for holiday in oncelyHolidays:
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
if holidayStart[0] == self.myDate.year and holidayStart[1] == self.myDate.month and holidayStart[2] == self.myDate.day:
myTime = datetime.time(holidayStart[3], holidayStart[4])
elif holidayEnd[0] == self.myDate.year and holidayEnd[1] == self.myDate.month and holidayEnd[2] == self.myDate.day:
myTime = datetime.time(holidayEnd[3], holidayEnd[4])
else:
self.notify.error('holiday is not today %s' % holiday)
self.timedEvents.append((myTime, holiday))
multipleStartHolidays = base.cr.newsManager.getMultipleStartHolidaysForDate(self.myDate)
for holiday in multipleStartHolidays:
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
if holidayStart[0] == self.myDate.year and holidayStart[1] == self.myDate.month and holidayStart[2] == self.myDate.day:
myTime = datetime.time(holidayStart[3], holidayStart[4])
elif holidayEnd[0] == self.myDate.year and holidayEnd[1] == self.myDate.month and holidayEnd[2] == self.myDate.day:
myTime = datetime.time(holidayEnd[3], holidayEnd[4])
else:
self.notify.error('holiday is not today %s' % holiday)
self.timedEvents.append((myTime, holiday))
relativelyHolidays = base.cr.newsManager.getRelativelyHolidaysForDate(self.myDate)
for holiday in relativelyHolidays:
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
if holidayStart[0] == self.myDate.month and holidayStart[1] == self.myDate.day:
myTime = datetime.time(holidayStart[2], holidayStart[3])
elif holidayEnd[0] == self.myDate.month and holidayEnd[1] == self.myDate.day:
myTime = datetime.time(holidayEnd[2], holidayEnd[3])
else:
self.notify.error('holiday is not today %s' % holiday)
self.timedEvents.append((myTime, holiday))
def timedEventCompare(te1, te2):
if te1[0] < te2[0]:
return -1
elif te1[0] == te2[0]:
return 0
else:
return 1
self.timedEvents.sort(cmp=timedEventCompare)
for timedEvent in self.timedEvents:
if isinstance(timedEvent[1], PartyInfo):
self.addPartyToScrollList(timedEvent[1])
elif isinstance(timedEvent[1], tuple) and timedEvent[1][0] == NewsManager.YearlyHolidayType:
self.addYearlyHolidayToScrollList(timedEvent[1])
elif isinstance(timedEvent[1], tuple) and timedEvent[1][0] == NewsManager.OncelyHolidayType:
self.addOncelyHolidayToScrollList(timedEvent[1])
elif isinstance(timedEvent[1], tuple) and timedEvent[1][0] == NewsManager.OncelyMultipleStartHolidayType:
self.addOncelyMultipleStartHolidayToScrollList(timedEvent[1])
elif isinstance(timedEvent[1], tuple) and timedEvent[1][0] == NewsManager.RelativelyHolidayType:
self.addRelativelyHolidayToScrollList(timedEvent[1])
def addYearlyHolidayToScrollList(self, holiday):
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
holidayText = ''
startTime = datetime.time(holidayStart[2], holidayStart[3])
endTime = datetime.time(holidayEnd[2], holidayEnd[3])
startDate = datetime.date(self.myDate.year, holidayStart[0], holidayStart[1])
endDate = datetime.date(self.myDate.year, holidayEnd[0], holidayEnd[1])
if endDate < startDate:
endDate = datetime.date(endDate.year + 1, endDate.month, endDate.day)
if holidayId in TTLocalizer.HolidayNamesInCalendar:
holidayName = TTLocalizer.HolidayNamesInCalendar[holidayId][0]
holidayDesc = TTLocalizer.HolidayNamesInCalendar[holidayId][1]
else:
holidayName = TTLocalizer.UnknownHoliday % holidayId
holidayDesc = TTLocalizer.UnknownHoliday % holidayId
if holidayStart[0] == holidayEnd[0] and holidayStart[1] == holidayEnd[1]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + myStrftime(endTime)
elif self.myDate.month == holidayStart[0] and self.myDate.day == holidayStart[1]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc = holidayName + '. ' + holidayDesc
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + endDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(endTime)
elif self.myDate.month == holidayEnd[0] and self.myDate.day == holidayEnd[1]:
holidayText = myStrftime(endTime)
holidayText += ' ' + TTLocalizer.CalendarEndDash + holidayName
holidayDesc = TTLocalizer.CalendarEndOf + holidayName
holidayDesc += '. ' + TTLocalizer.CalendarStartedOn + startDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(startTime)
else:
self.notify.error('unhandled case')
self.addTitleAndDescToScrollList(holidayText, holidayDesc)
def addOncelyHolidayToScrollList(self, holiday):
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
holidayText = ''
startTime = datetime.time(holidayStart[3], holidayStart[4])
endTime = datetime.time(holidayEnd[3], holidayEnd[4])
startDate = datetime.date(holidayStart[0], holidayStart[1], holidayStart[2])
endDate = datetime.date(holidayStart[0], holidayEnd[1], holidayEnd[2])
if endDate < startDate:
endDate = datetime.date(endDate.year + 1, endDate.month, endDate.day)
if holidayId in TTLocalizer.HolidayNamesInCalendar:
holidayName = TTLocalizer.HolidayNamesInCalendar[holidayId][0]
holidayDesc = TTLocalizer.HolidayNamesInCalendar[holidayId][1]
else:
holidayName = TTLocalizer.UnknownHoliday % holidayId
holidayDesc = ''
if holidayStart[1] == holidayEnd[1] and holidayStart[2] == holidayEnd[2]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc = holidayName + '. ' + holidayDesc
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + myStrftime(endTime)
elif self.myDate.year == holidayStart[0] and self.myDate.month == holidayStart[1] and self.myDate.day == holidayStart[2]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc = holidayName + '. ' + holidayDesc
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + endDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(endTime)
elif self.myDate.year == holidayEnd[0] and self.myDate.month == holidayEnd[1] and self.myDate.day == holidayEnd[2]:
holidayText = myStrftime(endTime)
holidayText += ' ' + TTLocalizer.CalendarEndDash + holidayName
holidayDesc = TTLocalizer.CalendarEndOf + holidayName
holidayDesc += '. ' + TTLocalizer.CalendarStartedOn + startDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(startTime)
else:
self.notify.error('unhandled case')
self.addTitleAndDescToScrollList(holidayText, holidayDesc)
def addOncelyMultipleStartHolidayToScrollList(self, holiday):
self.addOncelyHolidayToScrollList(holiday)
def addRelativelyHolidayToScrollList(self, holiday):
holidayId = holiday[1]
holidayStart = holiday[2]
holidayEnd = holiday[3]
holidayType = holiday[0]
holidayText = ''
startTime = datetime.time(holidayStart[2], holidayStart[3])
endTime = datetime.time(holidayEnd[2], holidayEnd[3])
startDate = datetime.date(self.myDate.year, holidayStart[0], holidayStart[1])
endDate = datetime.date(self.myDate.year, holidayEnd[0], holidayEnd[1])
if endDate < startDate:
endDate.year += 1
if holidayId in TTLocalizer.HolidayNamesInCalendar:
holidayName = TTLocalizer.HolidayNamesInCalendar[holidayId][0]
holidayDesc = TTLocalizer.HolidayNamesInCalendar[holidayId][1]
else:
holidayName = TTLocalizer.UnknownHoliday % holidayId
holidayDesc = ''
if holidayStart[0] == holidayEnd[0] and holidayStart[1] == holidayEnd[1]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + myStrftime(endTime)
elif self.myDate.month == holidayStart[0] and self.myDate.day == holidayStart[1]:
holidayText = myStrftime(startTime)
holidayText += ' ' + holidayName
holidayDesc = holidayName + '. ' + holidayDesc
holidayDesc += ' ' + TTLocalizer.CalendarEndsAt + endDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(endTime)
elif self.myDate.month == holidayEnd[0] and self.myDate.day == holidayEnd[1]:
holidayText = myStrftime(endTime)
holidayText += ' ' + TTLocalizer.CalendarEndDash + holidayName
holidayDesc = TTLocalizer.CalendarEndOf + holidayName
holidayDesc += '. ' + TTLocalizer.CalendarStartedOn + startDate.strftime(TTLocalizer.HolidayFormat) + myStrftime(startTime)
else:
self.notify.error('unhandled case')
self.addTitleAndDescToScrollList(holidayText, holidayDesc)
def addTitleAndDescToScrollList(self, title, desc):
textSize = self.ScrollListTextSize
descTextSize = 0.05
newItem = DirectButton(relief=None, text=title, text_scale=textSize, text_align=TextNode.ALeft, rolloverSound=None, clickSound=None, pressEffect=0, command=self.__clickedOnScrollItem)
scrollItemHeight = newItem.getHeight()
descUnderItemZAdjust = scrollItemHeight * descTextSize / textSize
descUnderItemZAdjust = max(0.0534, descUnderItemZAdjust)
descUnderItemZAdjust = -descUnderItemZAdjust
descZAdjust = descUnderItemZAdjust
newItem.description = DirectLabel(parent=newItem, pos=(0.115, 0, descZAdjust), text='', text_wordwrap=15, pad=(0.02, 0.02), text_scale=descTextSize, text_align=TextNode.ACenter, textMayChange=0)
newItem.description.checkedHeight = False
newItem.description.setBin('gui-popup', 0)
newItem.description.hide()
newItem.bind(DGG.ENTER, self.enteredTextItem, extraArgs=[newItem, desc, descUnderItemZAdjust])
newItem.bind(DGG.EXIT, self.exitedTextItem, extraArgs=[newItem])
self.scrollList.addItem(newItem)
return
def exitedTextItem(self, newItem, mousepos):
newItem.description.hide()
def enteredTextItem(self, newItem, descText, descUnderItemZAdjust, mousePos):
if not newItem.description.checkedHeight:
newItem.description.checkedHeight = True
newItem.description['text'] = descText
bounds = newItem.description.getBounds()
descHeight = newItem.description.getHeight()
scrollItemHeight = newItem.getHeight()
descOverItemZAdjust = descHeight - scrollItemHeight / 2.0
descZPos = self.getPos(aspect2d)[2] + descUnderItemZAdjust - descHeight
if descZPos < -1.0:
newItem.description.setZ(descOverItemZAdjust)
descWidth = newItem.description.getWidth()
brightFrame = loader.loadModel('phase_4/models/parties/tt_m_gui_sbk_calendar_popUp_bg')
newItem.description['geom'] = brightFrame
newItem.description['geom_scale'] = (descWidth, 1, descHeight)
descGeomZ = (bounds[2] - bounds[3]) / 2.0
descGeomZ += bounds[3]
newItem.description['geom_pos'] = (0, 0, descGeomZ)
newItem.description.show()
def addPartyToScrollList(self, party):
textSize = self.ScrollListTextSize
descTextSize = 0.05
partyTitle = myStrftime(party.startTime)
partyTitle = partyTitle + ' ' + TTLocalizer.EventsPageCalendarTabParty
textSize = self.ScrollListTextSize
descTextSize = 0.05
newItem = DirectButton(relief=None, text=partyTitle, text_scale=textSize, text_align=TextNode.ALeft, rolloverSound=None, clickSound=None, pressEffect=0, command=self.__clickedOnScrollItem)
scrollItemHeight = newItem.getHeight()
descUnderItemZAdjust = scrollItemHeight * descTextSize / textSize
descUnderItemZAdjust = max(0.0534, descUnderItemZAdjust)
descUnderItemZAdjust = -descUnderItemZAdjust
descZAdjust = descUnderItemZAdjust
self.scrollList.addItem(newItem)
newItem.description = MiniInviteVisual(newItem, party)
newItem.description.setBin('gui-popup', 0)
newItem.description.hide()
newItem.bind(DGG.ENTER, self.enteredTextItem, extraArgs=[newItem, newItem.description, descUnderItemZAdjust])
newItem.bind(DGG.EXIT, self.exitedTextItem, extraArgs=[newItem])
return
def __clickedOnScrollItem(self):
self.__clickedOnDay()
def __clickedOnDay(self):
acceptClick = True
if self.onlyFutureDaysClickable:
curServerDate = base.cr.toontownTimeManager.getCurServerDateTime()
if self.myDate.date() < curServerDate.date():
acceptClick = False
if not acceptClick:
return
if self.dayClickCallback:
self.dayClickCallback(self)
self.notify.debug('we got clicked on %s' % self.myDate.date())
messenger.send('clickedOnDay', [self.myDate.date()])
def updateSelected(self, selected):
multiplier = 1.1
if selected:
self.selectedFrame.show()
self.setScale(multiplier)
self.setPos(-0.01, 0, 0.01)
grandParent = self.origParent.getParent()
self.origParent.reparentTo(grandParent)
else:
self.selectedFrame.hide()
self.setScale(1.0)
self.setPos(0, 0, 0)
def changeDate(self, startDate, myDate):
self.startDate = startDate
self.myDate = myDate
self.scrollList.removeAndDestroyAllItems()
self.update()
def update(self):
self.numberWidget['text'] = str(self.myDate.day)
self.adjustForMonth()
self.addWeeklyHolidays()
self.collectTimedEvents()
self.updateArrowButtons()
def changeFilter(self, filter):
oldFilter = self.filter
self.filter = filter
if self.filter != oldFilter:
self.scrollList.removeAndDestroyAllItems()
self.update()
class MiniInviteVisual(DirectFrame):
def __init__(self, parent, partyInfo):
DirectFrame.__init__(self, parent, pos=(0.1, 0, -0.018))
self.checkedHeight = True
self.partyInfo = partyInfo
self._parent = parent
self.inviteBackgrounds = loader.loadModel('phase_4/models/parties/partyStickerbook')
backgrounds = ['calendar_popup_birthday',
'calendar_popup_fun',
'calendar_popup_cupcake',
'tt_t_gui_sbk_calendar_popup_racing',
'tt_t_gui_sbk_calendar_popup_valentine1',
'tt_t_gui_sbk_calendar_popup_victoryParty',
'tt_t_gui_sbk_calendar_popup_winter1']
self.background = DirectFrame(parent=self, relief=None, geom=self.inviteBackgrounds.find('**/%s' % backgrounds[self.partyInfo.inviteTheme]), scale=(0.7, 1.0, 0.23), pos=(0.0, 0.0, -0.1))
self.whosePartyLabel = DirectLabel(parent=self, relief=None, pos=(0.07, 0.0, -0.04), text=' ', text_scale=0.04, text_wordwrap=8, textMayChange=True)
self.whenTextLabel = DirectLabel(parent=self, relief=None, text=' ', pos=(0.07, 0.0, -0.13), text_scale=0.04, textMayChange=True)
self.partyStatusLabel = DirectLabel(parent=self, relief=None, text=' ', pos=(0.07, 0.0, -0.175), text_scale=0.04, textMayChange=True)
return
def show(self):
self.reparentTo(self._parent)
self.setPos(0.1, 0, -0.018)
newParent = self._parent.getParent().getParent()
self.wrtReparentTo(newParent)
if self.whosePartyLabel['text'] == ' ':
host = base.cr.identifyAvatar(self.partyInfo.hostId)
if host:
name = host.getName()
self.whosePartyLabel['text'] = name
if self.whenTextLabel['text'] == ' ':
time = myStrftime(self.partyInfo.startTime)
self.whenTextLabel['text'] = time
if self.partyStatusLabel['text'] == ' ':
if self.partyInfo.status == PartyGlobals.PartyStatus.Cancelled:
self.partyStatusLabel['text'] = TTLocalizer.CalendarPartyCancelled
elif self.partyInfo.status == PartyGlobals.PartyStatus.Finished:
self.partyStatusLabel['text'] = TTLocalizer.CalendarPartyFinished
elif self.partyInfo.status == PartyGlobals.PartyStatus.Started:
self.partyStatusLabel['text'] = TTLocalizer.CalendarPartyGo
elif self.partyInfo.status == PartyGlobals.PartyStatus.NeverStarted:
self.partyStatusLabel['text'] = TTLocalizer.CalendarPartyNeverStarted
else:
self.partyStatusLabel['text'] = TTLocalizer.CalendarPartyGetReady
DirectFrame.show(self)
def destroy(self):
del self.checkedHeight
del self.partyInfo
del self._parent
del self.background
del self.whosePartyLabel
del self.whenTextLabel
del self.partyStatusLabel
DirectFrame.destroy(self)<|fim▁end|>
| |
<|file_name|>admin.go<|end_file_name|><|fim▁begin|>// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package admin
import (
"context"
"encoding/json"
"math"
"sort"
"strings"
"github.com/pingcap/errors"
"github.com/pingcap/tidb/errno"
"github.com/pingcap/tidb/expression"
"github.com/pingcap/tidb/kv"
"github.com/pingcap/tidb/meta"
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/table"
"github.com/pingcap/tidb/tablecodec"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util"
"github.com/pingcap/tidb/util/dbterror"
"github.com/pingcap/tidb/util/logutil"
"github.com/pingcap/tidb/util/logutil/consistency"
decoder "github.com/pingcap/tidb/util/rowDecoder"
"github.com/pingcap/tidb/util/sqlexec"
"go.uber.org/zap"
)
// DDLInfo is for DDL information.
type DDLInfo struct {
SchemaVer int64
ReorgHandle kv.Key // It's only used for DDL information.
Jobs []*model.Job // It's the currently running jobs.
}
// GetDDLInfo returns DDL information.
func GetDDLInfo(txn kv.Transaction) (*DDLInfo, error) {
var err error
info := &DDLInfo{}
t := meta.NewMeta(txn)
info.Jobs = make([]*model.Job, 0, 2)
job, err := t.GetDDLJobByIdx(0)
if err != nil {
return nil, errors.Trace(err)
}
if job != nil {
info.Jobs = append(info.Jobs, job)
}
addIdxJob, err := t.GetDDLJobByIdx(0, meta.AddIndexJobListKey)
if err != nil {
return nil, errors.Trace(err)
}
if addIdxJob != nil {
info.Jobs = append(info.Jobs, addIdxJob)
}
info.SchemaVer, err = t.GetSchemaVersion()
if err != nil {
return nil, errors.Trace(err)
}
if addIdxJob == nil {
return info, nil
}
_, info.ReorgHandle, _, _, err = t.GetDDLReorgHandle(addIdxJob)
if err != nil {
if meta.ErrDDLReorgElementNotExist.Equal(err) {
return info, nil
}
return nil, errors.Trace(err)
}
return info, nil
}
// IsJobRollbackable checks whether the job can be rollback.
func IsJobRollbackable(job *model.Job) bool {
switch job.Type {
case model.ActionDropIndex, model.ActionDropPrimaryKey, model.ActionDropIndexes:
// We can't cancel if index current state is in StateDeleteOnly or StateDeleteReorganization or StateWriteOnly, otherwise there will be an inconsistent issue between record and index.
// In WriteOnly state, we can rollback for normal index but can't rollback for expression index(need to drop hidden column). Since we can't
// know the type of index here, we consider all indices except primary index as non-rollbackable.
// TODO: distinguish normal index and expression index so that we can rollback `DropIndex` for normal index in WriteOnly state.
// TODO: make DropPrimaryKey rollbackable in WriteOnly, it need to deal with some tests.
if job.SchemaState == model.StateDeleteOnly ||
job.SchemaState == model.StateDeleteReorganization ||
job.SchemaState == model.StateWriteOnly {
return false
}
case model.ActionDropSchema, model.ActionDropTable, model.ActionDropSequence:
// To simplify the rollback logic, cannot be canceled in the following states.
if job.SchemaState == model.StateWriteOnly ||
job.SchemaState == model.StateDeleteOnly {
return false
}
case model.ActionAddTablePartition:
return job.SchemaState == model.StateNone || job.SchemaState == model.StateReplicaOnly
case model.ActionDropColumn, model.ActionDropColumns, model.ActionDropTablePartition,
model.ActionRebaseAutoID, model.ActionShardRowID,
model.ActionTruncateTable, model.ActionAddForeignKey,
model.ActionDropForeignKey, model.ActionRenameTable,
model.ActionModifyTableCharsetAndCollate, model.ActionTruncateTablePartition,
model.ActionModifySchemaCharsetAndCollate, model.ActionRepairTable,
model.ActionModifyTableAutoIdCache, model.ActionModifySchemaDefaultPlacement:
return job.SchemaState == model.StateNone
}
return true
}
// CancelJobs cancels the DDL jobs.
func CancelJobs(txn kv.Transaction, ids []int64) ([]error, error) {
if len(ids) == 0 {
return nil, nil
}
errs := make([]error, len(ids))
t := meta.NewMeta(txn)
generalJobs, err := getDDLJobsInQueue(t, meta.DefaultJobListKey)
if err != nil {
return nil, errors.Trace(err)
}
addIdxJobs, err := getDDLJobsInQueue(t, meta.AddIndexJobListKey)
if err != nil {
return nil, errors.Trace(err)
}
jobs := append(generalJobs, addIdxJobs...)
jobsMap := make(map[int64]int)
for i, id := range ids {
jobsMap[id] = i
}
for j, job := range jobs {
i, ok := jobsMap[job.ID]
if !ok {
logutil.BgLogger().Debug("the job that needs to be canceled isn't equal to current job",<|fim▁hole|> }
delete(jobsMap, job.ID)
// These states can't be cancelled.
if job.IsDone() || job.IsSynced() {
errs[i] = ErrCancelFinishedDDLJob.GenWithStackByArgs(job.ID)
continue
}
// If the state is rolling back, it means the work is cleaning the data after cancelling the job.
if job.IsCancelled() || job.IsRollingback() || job.IsRollbackDone() {
continue
}
if !IsJobRollbackable(job) {
errs[i] = ErrCannotCancelDDLJob.GenWithStackByArgs(job.ID)
continue
}
job.State = model.JobStateCancelling
// Make sure RawArgs isn't overwritten.
err := json.Unmarshal(job.RawArgs, &job.Args)
if err != nil {
errs[i] = errors.Trace(err)
continue
}
if j >= len(generalJobs) {
offset := int64(j - len(generalJobs))
err = t.UpdateDDLJob(offset, job, true, meta.AddIndexJobListKey)
} else {
err = t.UpdateDDLJob(int64(j), job, true)
}
if err != nil {
errs[i] = errors.Trace(err)
}
}
for id, i := range jobsMap {
errs[i] = ErrDDLJobNotFound.GenWithStackByArgs(id)
}
return errs, nil
}
func getDDLJobsInQueue(t *meta.Meta, jobListKey meta.JobListKeyType) ([]*model.Job, error) {
cnt, err := t.DDLJobQueueLen(jobListKey)
if err != nil {
return nil, errors.Trace(err)
}
jobs := make([]*model.Job, cnt)
for i := range jobs {
jobs[i], err = t.GetDDLJobByIdx(int64(i), jobListKey)
if err != nil {
return nil, errors.Trace(err)
}
}
return jobs, nil
}
// GetDDLJobs get all DDL jobs and sorts jobs by job.ID.
func GetDDLJobs(txn kv.Transaction) ([]*model.Job, error) {
t := meta.NewMeta(txn)
generalJobs, err := getDDLJobsInQueue(t, meta.DefaultJobListKey)
if err != nil {
return nil, errors.Trace(err)
}
addIdxJobs, err := getDDLJobsInQueue(t, meta.AddIndexJobListKey)
if err != nil {
return nil, errors.Trace(err)
}
jobs := append(generalJobs, addIdxJobs...)
sort.Sort(jobArray(jobs))
return jobs, nil
}
type jobArray []*model.Job
func (v jobArray) Len() int {
return len(v)
}
func (v jobArray) Less(i, j int) bool {
return v[i].ID < v[j].ID
}
func (v jobArray) Swap(i, j int) {
v[i], v[j] = v[j], v[i]
}
// MaxHistoryJobs is exported for testing.
const MaxHistoryJobs = 10
// DefNumHistoryJobs is default value of the default number of history job
const DefNumHistoryJobs = 10
// GetHistoryDDLJobs returns the DDL history jobs and an error.
// The maximum count of history jobs is num.
func GetHistoryDDLJobs(txn kv.Transaction, maxNumJobs int) ([]*model.Job, error) {
t := meta.NewMeta(txn)
jobs, err := t.GetLastNHistoryDDLJobs(maxNumJobs)
if err != nil {
return nil, errors.Trace(err)
}
return jobs, nil
}
// IterHistoryDDLJobs iterates history DDL jobs until the `finishFn` return true or error.
func IterHistoryDDLJobs(txn kv.Transaction, finishFn func([]*model.Job) (bool, error)) error {
txnMeta := meta.NewMeta(txn)
iter, err := txnMeta.GetLastHistoryDDLJobsIterator()
if err != nil {
return err
}
cacheJobs := make([]*model.Job, 0, DefNumHistoryJobs)
for {
cacheJobs, err = iter.GetLastJobs(DefNumHistoryJobs, cacheJobs)
if err != nil || len(cacheJobs) == 0 {
return err
}
finish, err := finishFn(cacheJobs)
if err != nil || finish {
return err
}
}
}
// IterAllDDLJobs will iterates running DDL jobs first, return directly if `finishFn` return true or error,
// then iterates history DDL jobs until the `finishFn` return true or error.
func IterAllDDLJobs(txn kv.Transaction, finishFn func([]*model.Job) (bool, error)) error {
jobs, err := GetDDLJobs(txn)
if err != nil {
return err
}
finish, err := finishFn(jobs)
if err != nil || finish {
return err
}
return IterHistoryDDLJobs(txn, finishFn)
}
// RecordData is the record data composed of a handle and values.
type RecordData struct {
Handle kv.Handle
Values []types.Datum
}
func getCount(exec sqlexec.RestrictedSQLExecutor, snapshot uint64, sql string, args ...interface{}) (int64, error) {
rows, _, err := exec.ExecRestrictedSQL(context.Background(), []sqlexec.OptionFuncAlias{sqlexec.ExecOptionWithSnapshot(snapshot)}, sql, args...)
if err != nil {
return 0, errors.Trace(err)
}
if len(rows) != 1 {
return 0, errors.Errorf("can not get count, rows count = %d", len(rows))
}
return rows[0].GetInt64(0), nil
}
// Count greater Types
const (
// TblCntGreater means that the number of table rows is more than the number of index rows.
TblCntGreater byte = 1
// IdxCntGreater means that the number of index rows is more than the number of table rows.
IdxCntGreater byte = 2
)
// CheckIndicesCount compares indices count with table count.
// It returns the count greater type, the index offset and an error.
// It returns nil if the count from the index is equal to the count from the table columns,
// otherwise it returns an error and the corresponding index's offset.
func CheckIndicesCount(ctx sessionctx.Context, dbName, tableName string, indices []string) (byte, int, error) {
// Here we need check all indexes, includes invisible index
ctx.GetSessionVars().OptimizerUseInvisibleIndexes = true
defer func() {
ctx.GetSessionVars().OptimizerUseInvisibleIndexes = false
}()
var snapshot uint64
txn, err := ctx.Txn(false)
if err != nil {
return 0, 0, err
}
if txn.Valid() {
snapshot = txn.StartTS()
}
if ctx.GetSessionVars().SnapshotTS != 0 {
snapshot = ctx.GetSessionVars().SnapshotTS
}
// Add `` for some names like `table name`.
exec := ctx.(sqlexec.RestrictedSQLExecutor)
tblCnt, err := getCount(exec, snapshot, "SELECT COUNT(*) FROM %n.%n USE INDEX()", dbName, tableName)
if err != nil {
return 0, 0, errors.Trace(err)
}
for i, idx := range indices {
idxCnt, err := getCount(exec, snapshot, "SELECT COUNT(*) FROM %n.%n USE INDEX(%n)", dbName, tableName, idx)
if err != nil {
return 0, i, errors.Trace(err)
}
logutil.Logger(context.Background()).Info("check indices count",
zap.String("table", tableName), zap.Int64("cnt", tblCnt), zap.Reflect("index", idx), zap.Int64("cnt", idxCnt))
if tblCnt == idxCnt {
continue
}
var ret byte
if tblCnt > idxCnt {
ret = TblCntGreater
} else if idxCnt > tblCnt {
ret = IdxCntGreater
}
return ret, i, ErrAdminCheckTable.GenWithStack("table count %d != index(%s) count %d", tblCnt, idx, idxCnt)
}
return 0, 0, nil
}
// CheckRecordAndIndex is exported for testing.
func CheckRecordAndIndex(ctx context.Context, sessCtx sessionctx.Context, txn kv.Transaction, t table.Table, idx table.Index) error {
sc := sessCtx.GetSessionVars().StmtCtx
cols := make([]*table.Column, len(idx.Meta().Columns))
for i, col := range idx.Meta().Columns {
cols[i] = t.Cols()[col.Offset]
}
ir := func() *consistency.Reporter {
return &consistency.Reporter{
HandleEncode: func(handle kv.Handle) kv.Key {
return tablecodec.EncodeRecordKey(t.RecordPrefix(), handle)
},
IndexEncode: func(idxRow *consistency.RecordData) kv.Key {
var matchingIdx table.Index
for _, v := range t.Indices() {
if strings.EqualFold(v.Meta().Name.String(), idx.Meta().Name.O) {
matchingIdx = v
break
}
}
if matchingIdx == nil {
return nil
}
k, _, err := matchingIdx.GenIndexKey(sessCtx.GetSessionVars().StmtCtx, idxRow.Values, idxRow.Handle, nil)
if err != nil {
return nil
}
return k
},
Tbl: t.Meta(),
Idx: idx.Meta(),
Sctx: sessCtx,
}
}
startKey := tablecodec.EncodeRecordKey(t.RecordPrefix(), kv.IntHandle(math.MinInt64))
filterFunc := func(h1 kv.Handle, vals1 []types.Datum, cols []*table.Column) (bool, error) {
for i, val := range vals1 {
col := cols[i]
if val.IsNull() {
if mysql.HasNotNullFlag(col.Flag) && col.ToInfo().GetOriginDefaultValue() == nil {
return false, errors.Errorf("Column %v define as not null, but can't find the value where handle is %v", col.Name, h1)
}
// NULL value is regarded as its default value.
colDefVal, err := table.GetColOriginDefaultValue(sessCtx, col.ToInfo())
if err != nil {
return false, errors.Trace(err)
}
vals1[i] = colDefVal
}
}
isExist, h2, err := idx.Exist(sc, txn, vals1, h1)
if kv.ErrKeyExists.Equal(err) {
record1 := &consistency.RecordData{Handle: h1, Values: vals1}
record2 := &consistency.RecordData{Handle: h2, Values: vals1}
return false, ir().ReportAdminCheckInconsistent(ctx, h1, record2, record1)
}
if err != nil {
return false, errors.Trace(err)
}
if !isExist {
record := &consistency.RecordData{Handle: h1, Values: vals1}
return false, ir().ReportAdminCheckInconsistent(ctx, h1, nil, record)
}
return true, nil
}
err := iterRecords(sessCtx, txn, t, startKey, cols, filterFunc)
if err != nil {
return errors.Trace(err)
}
return nil
}
func makeRowDecoder(t table.Table, sctx sessionctx.Context) (*decoder.RowDecoder, error) {
dbName := model.NewCIStr(sctx.GetSessionVars().CurrentDB)
exprCols, _, err := expression.ColumnInfos2ColumnsAndNames(sctx, dbName, t.Meta().Name, t.Meta().Cols(), t.Meta())
if err != nil {
return nil, err
}
mockSchema := expression.NewSchema(exprCols...)
decodeColsMap := decoder.BuildFullDecodeColMap(t.Cols(), mockSchema)
return decoder.NewRowDecoder(t, t.Cols(), decodeColsMap), nil
}
func iterRecords(sessCtx sessionctx.Context, retriever kv.Retriever, t table.Table, startKey kv.Key, cols []*table.Column, fn table.RecordIterFunc) error {
prefix := t.RecordPrefix()
keyUpperBound := prefix.PrefixNext()
it, err := retriever.Iter(startKey, keyUpperBound)
if err != nil {
return errors.Trace(err)
}
defer it.Close()
if !it.Valid() {
return nil
}
logutil.BgLogger().Debug("record",
zap.Stringer("startKey", startKey),
zap.Stringer("key", it.Key()),
zap.Binary("value", it.Value()))
rowDecoder, err := makeRowDecoder(t, sessCtx)
if err != nil {
return err
}
for it.Valid() && it.Key().HasPrefix(prefix) {
// first kv pair is row lock information.
// TODO: check valid lock
// get row handle
handle, err := tablecodec.DecodeRowKey(it.Key())
if err != nil {
return errors.Trace(err)
}
rowMap, err := rowDecoder.DecodeAndEvalRowWithMap(sessCtx, handle, it.Value(), sessCtx.GetSessionVars().Location(), nil)
if err != nil {
return errors.Trace(err)
}
data := make([]types.Datum, 0, len(cols))
for _, col := range cols {
data = append(data, rowMap[col.ID])
}
more, err := fn(handle, data, cols)
if !more || err != nil {
return errors.Trace(err)
}
rk := tablecodec.EncodeRecordKey(t.RecordPrefix(), handle)
err = kv.NextUntil(it, util.RowKeyPrefixFilter(rk))
if err != nil {
return errors.Trace(err)
}
}
return nil
}
var (
// ErrDDLJobNotFound indicates the job id was not found.
ErrDDLJobNotFound = dbterror.ClassAdmin.NewStd(errno.ErrDDLJobNotFound)
// ErrCancelFinishedDDLJob returns when cancel a finished ddl job.
ErrCancelFinishedDDLJob = dbterror.ClassAdmin.NewStd(errno.ErrCancelFinishedDDLJob)
// ErrCannotCancelDDLJob returns when cancel a almost finished ddl job, because cancel in now may cause data inconsistency.
ErrCannotCancelDDLJob = dbterror.ClassAdmin.NewStd(errno.ErrCannotCancelDDLJob)
// ErrAdminCheckTable returns when the table records is inconsistent with the index values.
ErrAdminCheckTable = dbterror.ClassAdmin.NewStd(errno.ErrAdminCheckTable)
)<|fim▁end|>
|
zap.Int64("need to canceled job ID", job.ID),
zap.Int64("current job ID", job.ID))
continue
|
<|file_name|>Sum_over_array.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
pub fn sum_array(input: &[i32]) -> i32 {
input.iter().sum()
}
|
<|file_name|>push-notification-sender-admin.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
/**
* All of the code for your admin-facing JavaScript source
* should reside in this file.
*
* Note: It has been assumed you will write jQuery code here, so the
* $ function reference has been prepared for usage within the scope
* of this function.
*
* This enables you to define handlers, for when the DOM is ready:
*
* $(function() {
*
* });
*
* When the window is loaded:
*
* $( window ).load(function() {
*
* });
*
* ...and/or other possibilities.
*
* Ideally, it is not considered best practise to attach more than a
* single DOM-ready or window-load handler for a particular page.
* Although scripts in the WordPress core, Plugins and Themes may be
* practising this, we should strive to set a better example in our own work.
*/
})(jQuery);
jQuery(document).ready(function () {
// validate signup form on keyup and submit
jQuery("#pns_os_setting").validate({
rules: {
pns_upload_ios_certi: "required",
pns_push_to_ios: "required",
pns_send_to_ios: "required"
},
messages: {
pns_upload_ios_certi: "Please Upload Valid file",
pns_push_to_ios: "Please Select Option",
pns_send_to_ios: "Please Select option for send notifications to ios devices"
},<|fim▁hole|> jQuery(element).closest('tr').next().find('.error_label').html(error);
}
});
// Uploading pem file
var file_frame;
jQuery('#pns_upload_image_button').on('click', function (event) {
event.preventDefault();
// If the media frame already exists, reopen it.
if (file_frame) {
file_frame.open();
return;
}
// Create the media frame.
file_frame = wp.media.frames.file_frame = wp.media({
title: jQuery(this).data('uploader_title'),
button: {
text: jQuery(this).data('uploader_button_text')
},
multiple: false // Set to true to allow multiple files to be selected
});
// When an image is selected, run a callback.
file_frame.on('select', function () {
var attachment = file_frame.state().get('selection').first().toJSON();
jQuery('#pns_upload_ios_certi').val(attachment.url);
jQuery('#pns_upload_ios_certi_name').val(attachment.filename);
});
// Finally, open the modal
file_frame.open();
});
});<|fim▁end|>
|
errorPlacement: function (error, element) {
|
<|file_name|>statics.rs<|end_file_name|><|fim▁begin|>// This test case tests the incremental compilation hash (ICH) implementation
// for statics.
// The general pattern followed here is: Change one thing between rev1 and rev2
// and make sure that the hash has changed, then change nothing between rev2 and
// rev3 and make sure that the hash has not changed.
// build-pass (FIXME(62277): could be check-pass?)
// revisions: cfail1 cfail2 cfail3
// compile-flags: -Z query-dep-graph -Zincremental-ignore-spans
#![allow(warnings)]
#![feature(rustc_attrs)]
#![feature(linkage)]
#![feature(thread_local)]
#![crate_type="rlib"]
// Change static visibility
#[cfg(cfail1)]
static STATIC_VISIBILITY: u8 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
pub static STATIC_VISIBILITY: u8 = 0;
// Change static mutability
#[cfg(cfail1)]
static STATIC_MUTABILITY: u8 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
static mut STATIC_MUTABILITY: u8 = 0;
// Add linkage attribute
#[cfg(cfail1)]
static STATIC_LINKAGE: u8 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[linkage="weak_odr"]
static STATIC_LINKAGE: u8 = 0;
// Add no_mangle attribute
#[cfg(cfail1)]
static STATIC_NO_MANGLE: u8 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[no_mangle]
static STATIC_NO_MANGLE: u8 = 0;
// Add thread_local attribute
#[cfg(cfail1)]
static STATIC_THREAD_LOCAL: u8 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[thread_local]
static STATIC_THREAD_LOCAL: u8 = 0;
// Change type from i16 to u64
#[cfg(cfail1)]
static STATIC_CHANGE_TYPE_1: i16 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,type_of")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_TYPE_1: u64 = 0;
// Change type from Option<i8> to Option<u16>
#[cfg(cfail1)]
static STATIC_CHANGE_TYPE_2: Option<i8> = None;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,type_of")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_TYPE_2: Option<u16> = None;
// Change value between simple literals
#[rustc_clean(cfg="cfail2", except="hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_VALUE_1: i16 = {
#[cfg(cfail1)]
{ 1 }
#[cfg(not(cfail1))]
{ 2 }
};
// Change value between expressions
#[rustc_clean(cfg="cfail2", except="hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_VALUE_2: i16 = {
#[cfg(cfail1)]
{ 1 + 1 }
#[cfg(not(cfail1))]
{ 1 + 2 }
};
#[rustc_clean(cfg="cfail2", except="hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_VALUE_3: i16 = {
#[cfg(cfail1)]
{ 2 + 3 }
#[cfg(not(cfail1))]
{ 2 * 3 }
};
#[rustc_clean(cfg="cfail2", except="hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_VALUE_4: i16 = {
#[cfg(cfail1)]
{ 1 + 2 * 3 }
#[cfg(not(cfail1))]
{ 1 + 2 * 4 }
};
// Change type indirectly
struct ReferencedType1;
struct ReferencedType2;
mod static_change_type_indirectly {
#[cfg(cfail1)]
use super::ReferencedType1 as Type;
#[cfg(not(cfail1))]
use super::ReferencedType2 as Type;
<|fim▁hole|> static STATIC_CHANGE_TYPE_INDIRECTLY_1: Type = Type;
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,type_of")]
#[rustc_clean(cfg="cfail3")]
static STATIC_CHANGE_TYPE_INDIRECTLY_2: Option<Type> = None;
}<|fim▁end|>
|
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,type_of")]
#[rustc_clean(cfg="cfail3")]
|
<|file_name|>underscore.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Underscore 1.7.0
// Project: http://underscorejs.org/
// Definitions by: Boris Yankov <https://github.com/borisyankov/>, Josh Baldwin <https://github.com/jbaldwin/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module _ {
/**
* underscore.js _.throttle options.
**/
interface ThrottleSettings {
/**
* If you'd like to disable the leading-edge call, pass this as false.
**/
leading?: boolean;
/**
* If you'd like to disable the execution on the trailing-edge, pass false.
**/
trailing?: boolean;
}
/**
* underscore.js template settings, set templateSettings or pass as an argument
* to 'template()' to override defaults.
**/
interface TemplateSettings {
/**
* Default value is '/<%([\s\S]+?)%>/g'.
**/
evaluate?: RegExp;
/**
* Default value is '/<%=([\s\S]+?)%>/g'.
**/
interpolate?: RegExp;
/**
* Default value is '/<%-([\s\S]+?)%>/g'.
**/
escape?: RegExp;
}
interface Collection<T> { }
// Common interface between Arrays and jQuery objects
interface List<T> extends Collection<T> {
[index: number]: T;
length: number;
}
interface Dictionary<T> extends Collection<T> {
[index: string]: T;
}
interface ListIterator<T, TResult> {
(value: T, index: number, list: List<T>): TResult;
}
interface ObjectIterator<T, TResult> {
(element: T, key: string, list: Dictionary<T>): TResult;
}
interface MemoIterator<T, TResult> {
(prev: TResult, curr: T, index: number, list: List<T>): TResult;
}
interface MemoObjectIterator<T, TResult> {
(prev: TResult, curr: T, key: string, list: Dictionary<T>): TResult;
}
}
interface UnderscoreStatic {
/**
* Underscore OOP Wrapper, all Underscore functions that take an object
* as the first parameter can be invoked through this function.
* @param key First argument to Underscore object functions.
**/
<T>(value: Array<T>): Underscore<T>;
<T>(value: T): Underscore<T>;
/* *************
* Collections *
************* */
/**
* Iterates over a list of elements, yielding each in turn to an iterator function. The iterator is
* bound to the context object, if one is passed. Each invocation of iterator is called with three
* arguments: (element, index, list). If list is a JavaScript object, iterator's arguments will be
* (value, key, object). Delegates to the native forEach function if it exists.
* @param list Iterates over this list of elements.
* @param iterator Iterator function for each element `list`.
* @param context 'this' object in `iterator`, optional.
**/
each<T>(
list: _.List<T>,
iterator: _.ListIterator<T, void>,
context?: any): _.List<T>;
/**
* @see _.each
* @param object Iterates over properties of this object.
* @param iterator Iterator function for each property on `object`.
* @param context 'this' object in `iterator`, optional.
**/
each<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, void>,
context?: any): _.Dictionary<T>;
/**
* @see _.each
**/
forEach<T>(
list: _.List<T>,
iterator: _.ListIterator<T, void>,
context?: any): _.List<T>;
/**
* @see _.each
**/
forEach<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, void>,
context?: any): _.Dictionary<T>;
/**
* Produces a new array of values by mapping each value in list through a transformation function
* (iterator). If the native map method exists, it will be used instead. If list is a JavaScript
* object, iterator's arguments will be (value, key, object).
* @param list Maps the elements of this array.
* @param iterator Map iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return The mapped array result.
**/
map<T, TResult>(
list: _.List<T>,
iterator: _.ListIterator<T, TResult>,
context?: any): TResult[];
/**
* @see _.map
* @param object Maps the properties of this object.
* @param iterator Map iterator function for each property on `object`.
* @param context `this` object in `iterator`, optional.
* @return The mapped object result.
**/
map<T, TResult>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, TResult>,
context?: any): TResult[];
/**
* @see _.map
**/
collect<T, TResult>(
list: _.List<T>,
iterator: _.ListIterator<T, TResult>,
context?: any): TResult[];
/**
* @see _.map
**/
collect<T, TResult>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, TResult>,
context?: any): TResult[];
/**
* Also known as inject and foldl, reduce boils down a list of values into a single value.
* Memo is the initial state of the reduction, and each successive step of it should be
* returned by iterator. The iterator is passed four arguments: the memo, then the value
* and index (or key) of the iteration, and finally a reference to the entire list.
* @param list Reduces the elements of this array.
* @param iterator Reduce iterator function for each element in `list`.
* @param memo Initial reduce state.
* @param context `this` object in `iterator`, optional.
* @return Reduced object result.
**/
reduce<T, TResult>(
list: _.Collection<T>,
iterator: _.MemoIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
reduce<T, TResult>(
list: _.Dictionary<T>,
iterator: _.MemoObjectIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
/**
* @see _.reduce
**/
inject<T, TResult>(
list: _.Collection<T>,
iterator: _.MemoIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
/**
* @see _.reduce
**/
foldl<T, TResult>(
list: _.Collection<T>,
iterator: _.MemoIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
/**
* The right-associative version of reduce. Delegates to the JavaScript 1.8 version of
* reduceRight, if it exists. `foldr` is not as useful in JavaScript as it would be in a
* language with lazy evaluation.
* @param list Reduces the elements of this array.
* @param iterator Reduce iterator function for each element in `list`.
* @param memo Initial reduce state.
* @param context `this` object in `iterator`, optional.
* @return Reduced object result.
**/
reduceRight<T, TResult>(
list: _.Collection<T>,
iterator: _.MemoIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
/**
* @see _.reduceRight
**/
foldr<T, TResult>(
list: _.Collection<T>,
iterator: _.MemoIterator<T, TResult>,
memo?: TResult,
context?: any): TResult;
/**
* Looks through each value in the list, returning the first one that passes a truth
* test (iterator). The function returns as soon as it finds an acceptable element,
* and doesn't traverse the entire list.
* @param list Searches for a value in this list.
* @param iterator Search iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return The first acceptable found element in `list`, if nothing is found undefined/null is returned.
**/
find<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T;
/**
* @see _.find
**/
find<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, boolean>,
context?: any): T;
/**
* @see _.find
**/
detect<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T;
/**
* @see _.find
**/
detect<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, boolean>,
context?: any): T;
/**
* Looks through each value in the list, returning the index of the first one that passes a truth
* test (iterator). The function returns as soon as it finds an acceptable element,
* and doesn't traverse the entire list.
* @param list Searches for a value in this list.
* @param iterator Search iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return The index of the first acceptable found element in `list`, if nothing is found -1 is returned.
**/
findIndex<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): number;
/**
* Looks through each value in the list, returning an array of all the values that pass a truth
* test (iterator). Delegates to the native filter method, if it exists.
* @param list Filter elements out of this list.
* @param iterator Filter iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return The filtered list of elements.
**/
filter<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T[];
/**
* @see _.filter
**/
filter<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, boolean>,
context?: any): T[];
/**
* @see _.filter
**/
select<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T[];
/**
* @see _.filter
**/
select<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, boolean>,
context?: any): T[];
/**
* Looks through each value in the list, returning an array of all the values that contain all
* of the key-value pairs listed in properties.
* @param list List to match elements again `properties`.
* @param properties The properties to check for on each element within `list`.
* @return The elements within `list` that contain the required `properties`.
**/
where<T, U extends {}>(
list: _.List<T>,
properties: U): T[];
/**
* Looks through the list and returns the first value that matches all of the key-value pairs listed in properties.
* @param list Search through this list's elements for the first object with all `properties`.
* @param properties Properties to look for on the elements within `list`.
* @return The first element in `list` that has all `properties`.
**/
findWhere<T, U extends {}>(
list: _.List<T>,
properties: U): T;
/**
* Returns the values in list without the elements that the truth test (iterator) passes.
* The opposite of filter.
* Return all the elements for which a truth test fails.
* @param list Reject elements within this list.
* @param iterator Reject iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return The rejected list of elements.
**/
reject<T>(
list: _.List<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T[];
/**
* @see _.reject
**/
reject<T>(
object: _.Dictionary<T>,
iterator: _.ObjectIterator<T, boolean>,
context?: any): T[];
/**
* Returns true if all of the values in the list pass the iterator truth test. Delegates to the
* native method every, if present.
* @param list Truth test against all elements within this list.
* @param iterator Trust test iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return True if all elements passed the truth test, otherwise false.
**/
every<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.every
**/
every<T>(
list: _.Dictionary<T>,
iterator?: _.ObjectIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.every
**/
all<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.every
**/
all<T>(
list: _.Dictionary<T>,
iterator?: _.ObjectIterator<T, boolean>,
context?: any): boolean;
/**
* Returns true if any of the values in the list pass the iterator truth test. Short-circuits and
* stops traversing the list if a true element is found. Delegates to the native method some, if present.
* @param list Truth test against all elements within this list.
* @param iterator Trust test iterator function for each element in `list`.
* @param context `this` object in `iterator`, optional.
* @return True if any elements passed the truth test, otherwise false.
**/
some<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.some
**/
some<T>(
object: _.Dictionary<T>,
iterator?: _.ObjectIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.some
**/
any<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, boolean>,
context?: any): boolean;
/**
* @see _.some
**/
any<T>(
object: _.Dictionary<T>,
iterator?: _.ObjectIterator<T, boolean>,
context?: any): boolean;
/**
* Returns true if the value is present in the list. Uses indexOf internally,
* if list is an Array.
* @param list Checks each element to see if `value` is present.
* @param value The value to check for within `list`.
* @return True if `value` is present in `list`, otherwise false.
**/
contains<T>(
list: _.List<T>,
value: T): boolean;
/**
* @see _.contains
**/
contains<T>(
object: _.Dictionary<T>,
value: T): boolean;
/**
* @see _.contains
**/
include<T>(
list: _.Collection<T>,
value: T): boolean;
/**
* @see _.contains
**/
include<T>(
object: _.Dictionary<T>,
value: T): boolean;
/**
* Calls the method named by methodName on each value in the list. Any extra arguments passed to
* invoke will be forwarded on to the method invocation.
* @param list The element's in this list will each have the method `methodName` invoked.
* @param methodName The method's name to call on each element within `list`.
* @param arguments Additional arguments to pass to the method `methodName`.
**/
invoke<T extends {}>(
list: _.List<T>,
methodName: string,
...arguments: any[]): any;
/**
* A convenient version of what is perhaps the most common use-case for map: extracting a list of
* property values.
* @param list The list to pluck elements out of that have the property `propertyName`.
* @param propertyName The property to look for on each element within `list`.
* @return The list of elements within `list` that have the property `propertyName`.
**/
pluck<T extends {}>(
list: _.List<T>,
propertyName: string): any[];
/**
* Returns the maximum value in list.
* @param list Finds the maximum value in this list.
* @return Maximum value in `list`.
**/
max(list: _.List<number>): number;
/**
* Returns the maximum value in list. If iterator is passed, it will be used on each value to generate
* the criterion by which the value is ranked.
* @param list Finds the maximum value in this list.
* @param iterator Compares each element in `list` to find the maximum value.
* @param context `this` object in `iterator`, optional.
* @return The maximum element within `list`.
**/
max<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, any>,
context?: any): T;
/**
* Returns the minimum value in list.
* @param list Finds the minimum value in this list.
* @return Minimum value in `list`.
**/
min(list: _.List<number>): number;
/**
* Returns the minimum value in list. If iterator is passed, it will be used on each value to generate
* the criterion by which the value is ranked.
* @param list Finds the minimum value in this list.
* @param iterator Compares each element in `list` to find the minimum value.
* @param context `this` object in `iterator`, optional.
* @return The minimum element within `list`.
**/
min<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, any>,
context?: any): T;
/**
* Returns a sorted copy of list, ranked in ascending order by the results of running each value
* through iterator. Iterator may also be the string name of the property to sort by (eg. length).
* @param list Sorts this list.
* @param iterator Sort iterator for each element within `list`.
* @param context `this` object in `iterator`, optional.
* @return A sorted copy of `list`.
**/
sortBy<T, TSort>(
list: _.List<T>,
iterator?: _.ListIterator<T, TSort>,
context?: any): T[];
/**
* @see _.sortBy
* @param iterator Sort iterator for each element within `list`.
**/
sortBy<T>(
list: _.List<T>,
iterator: string,
context?: any): T[];
/**
* Splits a collection into sets, grouped by the result of running each value through iterator.
* If iterator is a string instead of a function, groups by the property named by iterator on
* each of the values.
* @param list Groups this list.
* @param iterator Group iterator for each element within `list`, return the key to group the element by.
* @param context `this` object in `iterator`, optional.
* @return An object with the group names as properties where each property contains the grouped elements from `list`.
**/
groupBy<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, any>,
context?: any): _.Dictionary<T[]>;
/**
* @see _.groupBy
* @param iterator Property on each object to group them by.
**/
groupBy<T>(
list: _.List<T>,
iterator: string,
context?: any): _.Dictionary<T[]>;
/**
* Given a `list`, and an `iterator` function that returns a key for each element in the list (or a property name),
* returns an object with an index of each item. Just like _.groupBy, but for when you know your keys are unique.
**/
indexBy<T>(
list: _.List<T>,
iterator: _.ListIterator<T, any>,
context?: any): _.Dictionary<T>;
/**
* @see _.indexBy
* @param iterator Property on each object to index them by.
**/
indexBy<T>(
list: _.List<T>,
iterator: string,
context?: any): _.Dictionary<T>;
/**
* Sorts a list into groups and returns a count for the number of objects in each group. Similar
* to groupBy, but instead of returning a list of values, returns a count for the number of values
* in that group.
* @param list Group elements in this list and then count the number of elements in each group.
* @param iterator Group iterator for each element within `list`, return the key to group the element by.
* @param context `this` object in `iterator`, optional.
* @return An object with the group names as properties where each property contains the number of elements in that group.
**/
countBy<T>(
list: _.List<T>,
iterator?: _.ListIterator<T, any>,
context?: any): _.Dictionary<number>;
/**
* @see _.countBy
* @param iterator Function name
**/
countBy<T>(
list: _.List<T>,
<|fim▁hole|> context?: any): _.Dictionary<number>;
/**
* Returns a shuffled copy of the list, using a version of the Fisher-Yates shuffle.
* @param list List to shuffle.
* @return Shuffled copy of `list`.
**/
shuffle<T>(list: _.Collection<T>): T[];
/**
* Produce a random sample from the `list`. Pass a number to return `n` random elements from the list. Otherwise a single random item will be returned.
* @param list List to sample.
* @return Random sample of `n` elements in `list`.
**/
sample<T>(list: _.Collection<T>, n: number): T[];
/**
* @see _.sample
**/
sample<T>(list: _.Collection<T>): T;
/**
* Converts the list (anything that can be iterated over), into a real Array. Useful for transmuting
* the arguments object.
* @param list object to transform into an array.
* @return `list` as an array.
**/
toArray<T>(list: _.Collection<T>): T[];
/**
* Return the number of values in the list.
* @param list Count the number of values/elements in this list.
* @return Number of values in `list`.
**/
size<T>(list: _.Collection<T>): number;
/**
* Split array into two arrays:
* one whose elements all satisfy predicate and one whose elements all do not satisfy predicate.
* @param array Array to split in two.
* @param iterator Filter iterator function for each element in `array`.
* @param context `this` object in `iterator`, optional.
* @return Array where Array[0] are the elements in `array` that satisfies the predicate, and Array[1] the elements that did not.
**/
partition<T>(
array: Array<T>,
iterator: _.ListIterator<T, boolean>,
context?: any): T[][];
/*********
* Arrays *
**********/
/**
* Returns the first element of an array. Passing n will return the first n elements of the array.
* @param array Retrieves the first element of this array.
* @return Returns the first element of `array`.
**/
first<T>(array: _.List<T>): T;
/**
* @see _.first
* @param n Return more than one element from `array`.
**/
first<T>(
array: _.List<T>,
n: number): T[];
/**
* @see _.first
**/
head<T>(array: _.List<T>): T;
/**
* @see _.first
**/
head<T>(
array: _.List<T>,
n: number): T[];
/**
* @see _.first
**/
take<T>(array: _.List<T>): T;
/**
* @see _.first
**/
take<T>(
array: _.List<T>,
n: number): T[];
/**
* Returns everything but the last entry of the array. Especially useful on the arguments object.
* Pass n to exclude the last n elements from the result.
* @param array Retrieve all elements except the last `n`.
* @param n Leaves this many elements behind, optional.
* @return Returns everything but the last `n` elements of `array`.
**/
initial<T>(
array: _.List<T>,
n?: number): T[];
/**
* Returns the last element of an array. Passing n will return the last n elements of the array.
* @param array Retrieves the last element of this array.
* @return Returns the last element of `array`.
**/
last<T>(array: _.List<T>): T;
/**
* @see _.last
* @param n Return more than one element from `array`.
**/
last<T>(
array: _.List<T>,
n: number): T[];
/**
* Returns the rest of the elements in an array. Pass an index to return the values of the array
* from that index onward.
* @param array The array to retrieve all but the first `index` elements.
* @param n The index to start retrieving elements forward from, optional, default = 1.
* @return Returns the elements of `array` from `index` to the end of `array`.
**/
rest<T>(
array: _.List<T>,
n?: number): T[];
/**
* @see _.rest
**/
tail<T>(
array: _.List<T>,
n?: number): T[];
/**
* @see _.rest
**/
drop<T>(
array: _.List<T>,
n?: number): T[];
/**
* Returns a copy of the array with all falsy values removed. In JavaScript, false, null, 0, "",
* undefined and NaN are all falsy.
* @param array Array to compact.
* @return Copy of `array` without false values.
**/
compact<T>(array: _.List<T>): T[];
/**
* Flattens a nested array (the nesting can be to any depth). If you pass shallow, the array will
* only be flattened a single level.
* @param array The array to flatten.
* @param shallow If true then only flatten one level, optional, default = false.
* @return `array` flattened.
**/
flatten(
array: _.List<any>,
shallow?: boolean): any[];
/**
* Returns a copy of the array with all instances of the values removed.
* @param array The array to remove `values` from.
* @param values The values to remove from `array`.
* @return Copy of `array` without `values`.
**/
without<T>(
array: _.List<T>,
...values: T[]): T[];
/**
* Computes the union of the passed-in arrays: the list of unique items, in order, that are
* present in one or more of the arrays.
* @param arrays Array of arrays to compute the union of.
* @return The union of elements within `arrays`.
**/
union<T>(...arrays: _.List<T>[]): T[];
/**
* Computes the list of values that are the intersection of all the arrays. Each value in the result
* is present in each of the arrays.
* @param arrays Array of arrays to compute the intersection of.
* @return The intersection of elements within `arrays`.
**/
intersection<T>(...arrays: _.List<T>[]): T[];
/**
* Similar to without, but returns the values from array that are not present in the other arrays.
* @param array Keeps values that are within `others`.
* @param others The values to keep within `array`.
* @return Copy of `array` with only `others` values.
**/
difference<T>(
array: _.List<T>,
...others: _.List<T>[]): T[];
/**
* Produces a duplicate-free version of the array, using === to test object equality. If you know in
* advance that the array is sorted, passing true for isSorted will run a much faster algorithm. If
* you want to compute unique items based on a transformation, pass an iterator function.
* @param array Array to remove duplicates from.
* @param isSorted True if `array` is already sorted, optional, default = false.
* @param iterator Transform the elements of `array` before comparisons for uniqueness.
* @param context 'this' object in `iterator`, optional.
* @return Copy of `array` where all elements are unique.
**/
uniq<T, TSort>(
array: _.List<T>,
isSorted?: boolean,
iterator?: _.ListIterator<T, TSort>,
context?: any): T[];
/**
* @see _.uniq
**/
uniq<T, TSort>(
array: _.List<T>,
iterator?: _.ListIterator<T, TSort>,
context?: any): T[];
/**
* @see _.uniq
**/
unique<T, TSort>(
array: _.List<T>,
iterator?: _.ListIterator<T, TSort>,
context?: any): T[];
/**
* @see _.uniq
**/
unique<T, TSort>(
array: _.List<T>,
isSorted?: boolean,
iterator?: _.ListIterator<T, TSort>,
context?: any): T[];
/**
* Merges together the values of each of the arrays with the values at the corresponding position.
* Useful when you have separate data sources that are coordinated through matching array indexes.
* If you're working with a matrix of nested arrays, zip.apply can transpose the matrix in a similar fashion.
* @param arrays The arrays to merge/zip.
* @return Zipped version of `arrays`.
**/
zip(...arrays: any[][]): any[][];
/**
* @see _.zip
**/
zip(...arrays: any[]): any[];
/**
* Converts arrays into objects. Pass either a single list of [key, value] pairs, or a
* list of keys, and a list of values.
* @param keys Key array.
* @param values Value array.
* @return An object containing the `keys` as properties and `values` as the property values.
**/
object<TResult extends {}>(
keys: _.List<string>,
values: _.List<any>): TResult;
/**
* Converts arrays into objects. Pass either a single list of [key, value] pairs, or a
* list of keys, and a list of values.
* @param keyValuePairs Array of [key, value] pairs.
* @return An object containing the `keys` as properties and `values` as the property values.
**/
object<TResult extends {}>(...keyValuePairs: any[][]): TResult;
/**
* @see _.object
**/
object<TResult extends {}>(
list: _.List<any>,
values?: any): TResult;
/**
* Returns the index at which value can be found in the array, or -1 if value is not present in the array.
* Uses the native indexOf function unless it's missing. If you're working with a large array, and you know
* that the array is already sorted, pass true for isSorted to use a faster binary search ... or, pass a number
* as the third argument in order to look for the first matching value in the array after the given index.
* @param array The array to search for the index of `value`.
* @param value The value to search for within `array`.
* @param isSorted True if the array is already sorted, optional, default = false.
* @return The index of `value` within `array`.
**/
indexOf<T>(
array: _.List<T>,
value: T,
isSorted?: boolean): number;
/**
* @see _indexof
**/
indexOf<T>(
array: _.List<T>,
value: T,
startFrom: number): number;
/**
* Returns the index of the last occurrence of value in the array, or -1 if value is not present. Uses the
* native lastIndexOf function if possible. Pass fromIndex to start your search at a given index.
* @param array The array to search for the last index of `value`.
* @param value The value to search for within `array`.
* @param from The starting index for the search, optional.
* @return The index of the last occurrence of `value` within `array`.
**/
lastIndexOf<T>(
array: _.List<T>,
value: T,
from?: number): number;
/**
* Uses a binary search to determine the index at which the value should be inserted into the list in order
* to maintain the list's sorted order. If an iterator is passed, it will be used to compute the sort ranking
* of each value, including the value you pass.
* @param list The sorted list.
* @param value The value to determine its index within `list`.
* @param iterator Iterator to compute the sort ranking of each value, optional.
* @return The index where `value` should be inserted into `list`.
**/
sortedIndex<T, TSort>(
list: _.List<T>,
value: T,
iterator?: (x: T) => TSort, context?: any): number;
/**
* A function to create flexibly-numbered lists of integers, handy for each and map loops. start, if omitted,
* defaults to 0; step defaults to 1. Returns a list of integers from start to stop, incremented (or decremented)
* by step, exclusive.
* @param start Start here.
* @param stop Stop here.
* @param step The number to count up by each iteration, optional, default = 1.
* @return Array of numbers from `start` to `stop` with increments of `step`.
**/
range(
start: number,
stop: number,
step?: number): number[];
/**
* @see _.range
* @param stop Stop here.
* @return Array of numbers from 0 to `stop` with increments of 1.
* @note If start is not specified the implementation will never pull the step (step = arguments[2] || 0)
**/
range(stop: number): number[];
/*************
* Functions *
*************/
/**
* Bind a function to an object, meaning that whenever the function is called, the value of this will
* be the object. Optionally, bind arguments to the function to pre-fill them, also known as partial application.
* @param func The function to bind `this` to `object`.
* @param context The `this` pointer whenever `fn` is called.
* @param arguments Additional arguments to pass to `fn` when called.
* @return `fn` with `this` bound to `object`.
**/
bind(
func: Function,
context: any,
...arguments: any[]): () => any;
/**
* Binds a number of methods on the object, specified by methodNames, to be run in the context of that object
* whenever they are invoked. Very handy for binding functions that are going to be used as event handlers,
* which would otherwise be invoked with a fairly useless this. If no methodNames are provided, all of the
* object's function properties will be bound to it.
* @param object The object to bind the methods `methodName` to.
* @param methodNames The methods to bind to `object`, optional and if not provided all of `object`'s
* methods are bound.
**/
bindAll(
object: any,
...methodNames: string[]): any;
/**
* Partially apply a function by filling in any number of its arguments, without changing its dynamic this value.
* A close cousin of bind. You may pass _ in your list of arguments to specify an argument that should not be
* pre-filled, but left open to supply at call-time.
* @param fn Function to partially fill in arguments.
* @param arguments The partial arguments.
* @return `fn` with partially filled in arguments.
**/
partial(
fn: Function,
...arguments: any[]): Function;
/**
* Memoizes a given function by caching the computed result. Useful for speeding up slow-running computations.
* If passed an optional hashFunction, it will be used to compute the hash key for storing the result, based
* on the arguments to the original function. The default hashFunction just uses the first argument to the
* memoized function as the key.
* @param fn Computationally expensive function that will now memoized results.
* @param hashFn Hash function for storing the result of `fn`.
* @return Memoized version of `fn`.
**/
memoize(
fn: Function,
hashFn?: (...args: any[]) => string): Function;
/**
* Much like setTimeout, invokes function after wait milliseconds. If you pass the optional arguments,
* they will be forwarded on to the function when it is invoked.
* @param func Function to delay `waitMS` amount of ms.
* @param wait The amount of milliseconds to delay `fn`.
* @arguments Additional arguments to pass to `fn`.
**/
delay(
func: Function,
wait: number,
...arguments: any[]): any;
/**
* @see _delay
**/
delay(
func: Function,
...arguments: any[]): any;
/**
* Defers invoking the function until the current call stack has cleared, similar to using setTimeout
* with a delay of 0. Useful for performing expensive computations or HTML rendering in chunks without
* blocking the UI thread from updating. If you pass the optional arguments, they will be forwarded on
* to the function when it is invoked.
* @param fn The function to defer.
* @param arguments Additional arguments to pass to `fn`.
**/
defer(
fn: Function,
...arguments: any[]): void;
/**
* Creates and returns a new, throttled version of the passed function, that, when invoked repeatedly,
* will only actually call the original function at most once per every wait milliseconds. Useful for
* rate-limiting events that occur faster than you can keep up with.
* By default, throttle will execute the function as soon as you call it for the first time, and,
* if you call it again any number of times during the wait period, as soon as that period is over.
* If you'd like to disable the leading-edge call, pass {leading: false}, and if you'd like to disable
* the execution on the trailing-edge, pass {trailing: false}.
* @param func Function to throttle `waitMS` ms.
* @param wait The number of milliseconds to wait before `fn` can be invoked again.
* @param options Allows for disabling execution of the throttled function on either the leading or trailing edge.
* @return `fn` with a throttle of `wait`.
**/
throttle<T extends Function>(
func: T,
wait: number,
options?: _.ThrottleSettings): T;
/**
* Creates and returns a new debounced version of the passed function that will postpone its execution
* until after wait milliseconds have elapsed since the last time it was invoked. Useful for implementing
* behavior that should only happen after the input has stopped arriving. For example: rendering a preview
* of a Markdown comment, recalculating a layout after the window has stopped being resized, and so on.
*
* Pass true for the immediate parameter to cause debounce to trigger the function on the leading instead
* of the trailing edge of the wait interval. Useful in circumstances like preventing accidental double
*-clicks on a "submit" button from firing a second time.
* @param fn Function to debounce `waitMS` ms.
* @param wait The number of milliseconds to wait before `fn` can be invoked again.
* @param immediate True if `fn` should be invoked on the leading edge of `waitMS` instead of the trailing edge.
* @return Debounced version of `fn` that waits `wait` ms when invoked.
**/
debounce<T extends Function>(
fn: T,
wait: number,
immediate?: boolean): T;
/**
* Creates a version of the function that can only be called one time. Repeated calls to the modified
* function will have no effect, returning the value from the original call. Useful for initialization
* functions, instead of having to set a boolean flag and then check it later.
* @param fn Function to only execute once.
* @return Copy of `fn` that can only be invoked once.
**/
once<T extends Function>(fn: T): T;
/**
* Creates a version of the function that will only be run after first being called count times. Useful
* for grouping asynchronous responses, where you want to be sure that all the async calls have finished,
* before proceeding.
* @param number count Number of times to be called before actually executing.
* @param Function fn The function to defer execution `count` times.
* @return Copy of `fn` that will not execute until it is invoked `count` times.
**/
after(
count: number,
fn: Function): Function;
/**
* Creates a version of the function that can be called no more than count times. The result of
* the last function call is memoized and returned when count has been reached.
* @param number count The maxmimum number of times the function can be called.
* @param Function fn The function to limit the number of times it can be called.
* @return Copy of `fn` that can only be called `count` times.
**/
before(
count: number,
fn: Function): Function;
/**
* Wraps the first function inside of the wrapper function, passing it as the first argument. This allows
* the wrapper to execute code before and after the function runs, adjust the arguments, and execute it
* conditionally.
* @param fn Function to wrap.
* @param wrapper The function that will wrap `fn`.
* @return Wrapped version of `fn.
**/
wrap(
fn: Function,
wrapper: (fn: Function, ...args: any[]) => any): Function;
/**
* Returns a negated version of the pass-in predicate.
* @param Function predicate
* @return boolean
**/
negate(predicate: Function): boolean;
/**
* Returns the composition of a list of functions, where each function consumes the return value of the
* function that follows. In math terms, composing the functions f(), g(), and h() produces f(g(h())).
* @param functions List of functions to compose.
* @return Composition of `functions`.
**/
compose(...functions: Function[]): Function;
/**********
* Objects *
***********/
/**
* Retrieve all the names of the object's properties.
* @param object Retrieve the key or property names from this object.
* @return List of all the property names on `object`.
**/
keys(object: any): string[];
/**
* Return all of the values of the object's properties.
* @param object Retrieve the values of all the properties on this object.
* @return List of all the values on `object`.
**/
values(object: any): any[];
/**
* Like map, but for objects. Transform the value of each property in turn.
* @param object The object to transform
* @param iteratee The function that transforms property values
* @param context The optional context (value of `this`) to bind to
* @return a new _.Dictionary of property values
*/
mapObject<T, U>(object: _.Dictionary<T>, iteratee: (val: T, key: string, object: _.Dictionary<T>) => U, context?: any): _.Dictionary<U>;
/**
* Like map, but for objects. Transform the value of each property in turn.
* @param object The object to transform
* @param iteratee The function that tranforms property values
* @param context The optional context (value of `this`) to bind to
*/
mapObject<T>(object: any, iteratee: (val: any, key: string, object: any) => T, context?: any): _.Dictionary<T>;
/**
* Like map, but for objects. Retrieves a property from each entry in the object, as if by _.property
* @param object The object to transform
* @param iteratee The property name to retrieve
* @param context The optional context (value of `this`) to bind to
*/
mapObject(object: any, iteratee: string, context?: any): _.Dictionary<any>;
/**
* Convert an object into a list of [key, value] pairs.
* @param object Convert this object to a list of [key, value] pairs.
* @return List of [key, value] pairs on `object`.
**/
pairs(object: any): any[][];
/**
* Returns a copy of the object where the keys have become the values and the values the keys.
* For this to work, all of your object's values should be unique and string serializable.
* @param object Object to invert key/value pairs.
* @return An inverted key/value paired version of `object`.
**/
invert(object: any): any;
/**
* Returns a sorted list of the names of every method in an object - that is to say,
* the name of every function property of the object.
* @param object Object to pluck all function property names from.
* @return List of all the function names on `object`.
**/
functions(object: any): string[];
/**
* @see _functions
**/
methods(object: any): string[];
/**
* Copy all of the properties in the source objects over to the destination object, and return
* the destination object. It's in-order, so the last source will override properties of the
* same name in previous arguments.
* @param destination Object to extend all the properties from `sources`.
* @param sources Extends `destination` with all properties from these source objects.
* @return `destination` extended with all the properties from the `sources` objects.
**/
extend(
destination: any,
...sources: any[]): any;
/**
* Like extend, but only copies own properties over to the destination object. (alias: assign)
*/
extendOwn(
destination: any,
...source: any[]): any;
/**
* Like extend, but only copies own properties over to the destination object. (alias: extendOwn)
*/
assign(
destination: any,
...source: any[]): any;
/**
* Return a copy of the object, filtered to only have values for the whitelisted keys
* (or array of valid keys).
* @param object Object to strip unwanted key/value pairs.
* @keys The key/value pairs to keep on `object`.
* @return Copy of `object` with only the `keys` properties.
**/
pick(
object: any,
...keys: any[]): any;
/**
* @see _.pick
**/
pick(
object: any,
fn: (value: any, key: any, object: any) => any): any;
/**
* Return a copy of the object, filtered to omit the blacklisted keys (or array of keys).
* @param object Object to strip unwanted key/value pairs.
* @param keys The key/value pairs to remove on `object`.
* @return Copy of `object` without the `keys` properties.
**/
omit(
object: any,
...keys: string[]): any;
/**
* @see _.omit
**/
omit(
object: any,
keys: string[]): any;
/**
* @see _.omit
**/
omit(
object: any,
iteratee: Function): any;
/**
* Fill in null and undefined properties in object with values from the defaults objects,
* and return the object. As soon as the property is filled, further defaults will have no effect.
* @param object Fill this object with default values.
* @param defaults The default values to add to `object`.
* @return `object` with added `defaults` values.
**/
defaults(
object: any,
...defaults: any[]): any;
/**
* Create a shallow-copied clone of the object.
* Any nested objects or arrays will be copied by reference, not duplicated.
* @param object Object to clone.
* @return Copy of `object`.
**/
clone<T>(object: T): T;
/**
* Invokes interceptor with the object, and then returns object. The primary purpose of this method
* is to "tap into" a method chain, in order to perform operations on intermediate results within the chain.
* @param object Argument to `interceptor`.
* @param intercepter The function to modify `object` before continuing the method chain.
* @return Modified `object`.
**/
tap<T>(object: T, intercepter: Function): T;
/**
* Does the object contain the given key? Identical to object.hasOwnProperty(key), but uses a safe
* reference to the hasOwnProperty function, in case it's been overridden accidentally.
* @param object Object to check for `key`.
* @param key The key to check for on `object`.
* @return True if `key` is a property on `object`, otherwise false.
**/
has(object: any, key: string): boolean;
/**
* Returns a predicate function that will tell you if a passed in object contains all of the key/value properties present in attrs.
* @param attrs Object with key values pair
* @return Predicate function
**/
matches<T, TResult>(attrs: T): _.ListIterator<T, TResult>;
/**
* Returns a function that will itself return the key property of any passed-in object.
* @param key Property of the object.
* @return Function which accept an object an returns the value of key in that object.
**/
property(key: string): (object: Object) => any;
/**
* Performs an optimized deep comparison between the two objects,
* to determine if they should be considered equal.
* @param object Compare to `other`.
* @param other Compare to `object`.
* @return True if `object` is equal to `other`.
**/
isEqual(object: any, other: any): boolean;
/**
* Returns true if object contains no values.
* @param object Check if this object has no properties or values.
* @return True if `object` is empty.
**/
isEmpty(object: any): boolean;
/**
* Returns true if object is a DOM element.
* @param object Check if this object is a DOM element.
* @return True if `object` is a DOM element, otherwise false.
**/
isElement(object: any): boolean;
/**
* Returns true if object is an Array.
* @param object Check if this object is an Array.
* @return True if `object` is an Array, otherwise false.
**/
isArray(object: any): boolean;
/**
* Returns true if value is an Object. Note that JavaScript arrays and functions are objects,
* while (normal) strings and numbers are not.
* @param object Check if this object is an Object.
* @return True of `object` is an Object, otherwise false.
**/
isObject(object: any): boolean;
/**
* Returns true if object is an Arguments object.
* @param object Check if this object is an Arguments object.
* @return True if `object` is an Arguments object, otherwise false.
**/
isArguments(object: any): boolean;
/**
* Returns true if object is a Function.
* @param object Check if this object is a Function.
* @return True if `object` is a Function, otherwise false.
**/
isFunction(object: any): boolean;
/**
* Returns true if object is a String.
* @param object Check if this object is a String.
* @return True if `object` is a String, otherwise false.
**/
isString(object: any): boolean;
/**
* Returns true if object is a Number (including NaN).
* @param object Check if this object is a Number.
* @return True if `object` is a Number, otherwise false.
**/
isNumber(object: any): boolean;
/**
* Returns true if object is a finite Number.
* @param object Check if this object is a finite Number.
* @return True if `object` is a finite Number.
**/
isFinite(object: any): boolean;
/**
* Returns true if object is either true or false.
* @param object Check if this object is a bool.
* @return True if `object` is a bool, otherwise false.
**/
isBoolean(object: any): boolean;
/**
* Returns true if object is a Date.
* @param object Check if this object is a Date.
* @return True if `object` is a Date, otherwise false.
**/
isDate(object: any): boolean;
/**
* Returns true if object is a RegExp.
* @param object Check if this object is a RegExp.
* @return True if `object` is a RegExp, otherwise false.
**/
isRegExp(object: any): boolean;
/**
* Returns true if object is NaN.
* Note: this is not the same as the native isNaN function,
* which will also return true if the variable is undefined.
* @param object Check if this object is NaN.
* @return True if `object` is NaN, otherwise false.
**/
isNaN(object: any): boolean;
/**
* Returns true if the value of object is null.
* @param object Check if this object is null.
* @return True if `object` is null, otherwise false.
**/
isNull(object: any): boolean;
/**
* Returns true if value is undefined.
* @param object Check if this object is undefined.
* @return True if `object` is undefined, otherwise false.
**/
isUndefined(value: any): boolean;
/* *********
* Utility *
********** */
/**
* Give control of the "_" variable back to its previous owner.
* Returns a reference to the Underscore object.
* @return Underscore object reference.
**/
noConflict(): any;
/**
* Returns the same value that is used as the argument. In math: f(x) = x
* This function looks useless, but is used throughout Underscore as a default iterator.
* @param value Identity of this object.
* @return `value`.
**/
identity<T>(value: T): T;
/**
* Creates a function that returns the same value that is used as the argument of _.constant
* @param value Identity of this object.
* @return Function that return value.
**/
constant<T>(value: T): () => T;
/**
* Returns undefined irrespective of the arguments passed to it. Useful as the default
* for optional callback arguments.
* Note there is no way to indicate a 'undefined' return, so it is currently typed as void.
* @return undefined
**/
noop(): void;
/**
* Invokes the given iterator function n times.
* Each invocation of iterator is called with an index argument
* @param n Number of times to invoke `iterator`.
* @param iterator Function iterator to invoke `n` times.
* @param context `this` object in `iterator`, optional.
**/
times<TResult>(n: number, iterator: (n: number) => TResult, context?: any): TResult[];
/**
* Returns a random integer between min and max, inclusive. If you only pass one argument,
* it will return a number between 0 and that number.
* @param max The maximum random number.
* @return A random number between 0 and `max`.
**/
random(max: number): number;
/**
* @see _.random
* @param min The minimum random number.
* @return A random number between `min` and `max`.
**/
random(min: number, max: number): number;
/**
* Allows you to extend Underscore with your own utility functions. Pass a hash of
* {name: function} definitions to have your functions added to the Underscore object,
* as well as the OOP wrapper.
* @param object Mixin object containing key/function pairs to add to the Underscore object.
**/
mixin(object: any): void;
/**
* A mostly-internal function to generate callbacks that can be applied to each element
* in a collection, returning the desired result -- either identity, an arbitrary callback,
* a property matcher, or a propetery accessor.
* @param string|Function|Object value The value to iterate over, usually the key.
* @param any context
* @param number argCount
* @return Callback that can be applied to each element in a collection.
**/
iteratee(value: string): Function;
iteratee(value: Function, context?: any, argCount?: number): Function;
iteratee(value: Object): Function;
/**
* Generate a globally-unique id for client-side models or DOM elements that need one.
* If prefix is passed, the id will be appended to it. Without prefix, returns an integer.
* @param prefix A prefix string to start the unique ID with.
* @return Unique string ID beginning with `prefix`.
**/
uniqueId(prefix: string): string;
/**
* @see _.uniqueId
**/
uniqueId(): number;
/**
* Escapes a string for insertion into HTML, replacing &, <, >, ", ', and / characters.
* @param str Raw string to escape.
* @return `str` HTML escaped.
**/
escape(str: string): string;
/**
* The opposite of escape, replaces &, <, >, ", and ' with their unescaped counterparts.
* @param str HTML escaped string.
* @return `str` Raw string.
**/
unescape(str: string): string;
/**
* If the value of the named property is a function then invoke it; otherwise, return it.
* @param object Object to maybe invoke function `property` on.
* @param property The function by name to invoke on `object`.
* @return The result of invoking the function `property` on `object.
**/
result(object: any, property: string): any;
/**
* Compiles JavaScript templates into functions that can be evaluated for rendering. Useful
* for rendering complicated bits of HTML from JSON data sources. Template functions can both
* interpolate variables, using <%= ... %>, as well as execute arbitrary JavaScript code, with
* <% ... %>. If you wish to interpolate a value, and have it be HTML-escaped, use <%- ... %> When
* you evaluate a template function, pass in a data object that has properties corresponding to
* the template's free variables. If you're writing a one-off, you can pass the data object as
* the second parameter to template in order to render immediately instead of returning a template
* function. The settings argument should be a hash containing any _.templateSettings that should
* be overridden.
* @param templateString Underscore HTML template.
* @param data Data to use when compiling `templateString`.
* @param settings Settings to use while compiling.
* @return Returns the compiled Underscore HTML template.
**/
template(templateString: string, settings?: _.TemplateSettings): (...data: any[]) => string;
/**
* By default, Underscore uses ERB-style template delimiters, change the
* following template settings to use alternative delimiters.
**/
templateSettings: _.TemplateSettings;
/**
* Returns an integer timestamp for the current time, using the fastest method available in the runtime. Useful for implementing timing/animation functions.
**/
now(): number;
/* **********
* Chaining *
*********** */
/**
* Returns a wrapped object. Calling methods on this object will continue to return wrapped objects
* until value() is used.
* @param obj Object to chain.
* @return Wrapped `obj`.
**/
chain<T>(obj: T[]): _Chain<T>;
chain<T extends {}>(obj: T): _Chain<T>;
}
interface Underscore<T> {
/* *************
* Collections *
************* */
/**
* Wrapped type `any[]`.
* @see _.each
**/
each(iterator: _.ListIterator<T, void>, context?: any): T[];
/**
* @see _.each
**/
each(iterator: _.ObjectIterator<T, void>, context?: any): T[];
/**
* @see _.each
**/
forEach(iterator: _.ListIterator<T, void>, context?: any): T[];
/**
* @see _.each
**/
forEach(iterator: _.ObjectIterator<T, void>, context?: any): T[];
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TResult>(iterator: _.ListIterator<T, TResult>, context?: any): TResult[];
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TResult>(iterator: _.ObjectIterator<T, TResult>, context?: any): TResult[];
/**
* @see _.map
**/
collect<TResult>(iterator: _.ListIterator<T, TResult>, context?: any): TResult[];
/**
* @see _.map
**/
collect<TResult>(iterator: _.ObjectIterator<T, TResult>, context?: any): TResult[];
/**
* Wrapped type `any[]`.
* @see _.reduce
**/
reduce<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): TResult;
/**
* @see _.reduce
**/
inject<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): TResult;
/**
* @see _.reduce
**/
foldl<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): TResult;
/**
* Wrapped type `any[]`.
* @see _.reduceRight
**/
reduceRight<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): TResult;
/**
* @see _.reduceRight
**/
foldr<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): TResult;
/**
* Wrapped type `any[]`.
* @see _.find
**/
find(iterator: _.ListIterator<T, boolean>, context?: any): T;
/**
* @see _.find
**/
detect(iterator: _.ListIterator<T, boolean>, context?: any): T;
/**
* Wrapped type `any[]`.
* @see _.filter
**/
filter(iterator: _.ListIterator<T, boolean>, context?: any): T[];
/**
* @see _.filter
**/
select(iterator: _.ListIterator<T, boolean>, context?: any): T[];
/**
* Wrapped type `any[]`.
* @see _.where
**/
where<U extends {}>(properties: U): T[];
/**
* Wrapped type `any[]`.
* @see _.findWhere
**/
findWhere<U extends {}>(properties: U): T;
/**
* Wrapped type `any[]`.
* @see _.reject
**/
reject(iterator: _.ListIterator<T, boolean>, context?: any): T[];
/**
* Wrapped type `any[]`.
* @see _.all
**/
all(iterator?: _.ListIterator<T, boolean>, context?: any): boolean;
/**
* @see _.all
**/
every(iterator?: _.ListIterator<T, boolean>, context?: any): boolean;
/**
* Wrapped type `any[]`.
* @see _.any
**/
any(iterator?: _.ListIterator<T, boolean>, context?: any): boolean;
/**
* @see _.any
**/
some(iterator?: _.ListIterator<T, boolean>, context?: any): boolean;
/**
* Wrapped type `any[]`.
* @see _.contains
**/
contains(value: T): boolean;
/**
* Alias for 'contains'.
* @see contains
**/
include(value: T): boolean;
/**
* Wrapped type `any[]`.
* @see _.invoke
**/
invoke(methodName: string, ...arguments: any[]): any;
/**
* Wrapped type `any[]`.
* @see _.pluck
**/
pluck(propertyName: string): any[];
/**
* Wrapped type `number[]`.
* @see _.max
**/
max(): number;
/**
* Wrapped type `any[]`.
* @see _.max
**/
max(iterator: _.ListIterator<T, number>, context?: any): T;
/**
* Wrapped type `any[]`.
* @see _.max
**/
max(iterator?: _.ListIterator<T, any>, context?: any): T;
/**
* Wrapped type `number[]`.
* @see _.min
**/
min(): number;
/**
* Wrapped type `any[]`.
* @see _.min
**/
min(iterator: _.ListIterator<T, number>, context?: any): T;
/**
* Wrapped type `any[]`.
* @see _.min
**/
min(iterator?: _.ListIterator<T, any>, context?: any): T;
/**
* Wrapped type `any[]`.
* @see _.sortBy
**/
sortBy(iterator?: _.ListIterator<T, any>, context?: any): T[];
/**
* Wrapped type `any[]`.
* @see _.sortBy
**/
sortBy(iterator: string, context?: any): T[];
/**
* Wrapped type `any[]`.
* @see _.groupBy
**/
groupBy(iterator?: _.ListIterator<T, any>, context?: any): _.Dictionary<_.List<T>>;
/**
* Wrapped type `any[]`.
* @see _.groupBy
**/
groupBy(iterator: string, context?: any): _.Dictionary<T[]>;
/**
* Wrapped type `any[]`.
* @see _.indexBy
**/
indexBy(iterator: _.ListIterator<T, any>, context?: any): _.Dictionary<T>;
/**
* Wrapped type `any[]`.
* @see _.indexBy
**/
indexBy(iterator: string, context?: any): _.Dictionary<T>;
/**
* Wrapped type `any[]`.
* @see _.countBy
**/
countBy(iterator?: _.ListIterator<T, any>, context?: any): _.Dictionary<number>;
/**
* Wrapped type `any[]`.
* @see _.countBy
**/
countBy(iterator: string, context?: any): _.Dictionary<number>;
/**
* Wrapped type `any[]`.
* @see _.shuffle
**/
shuffle(): T[];
/**
* Wrapped type `any[]`.
* @see _.sample
**/
sample<T>(n: number): T[];
/**
* @see _.sample
**/
sample<T>(): T;
/**
* Wrapped type `any`.
* @see _.toArray
**/
toArray(): T[];
/**
* Wrapped type `any`.
* @see _.size
**/
size(): number;
/*********
* Arrays *
**********/
/**
* Wrapped type `any[]`.
* @see _.first
**/
first(): T;
/**
* Wrapped type `any[]`.
* @see _.first
**/
first(n: number): T[];
/**
* @see _.first
**/
head(): T;
/**
* @see _.first
**/
head(n: number): T[];
/**
* @see _.first
**/
take(): T;
/**
* @see _.first
**/
take(n: number): T[];
/**
* Wrapped type `any[]`.
* @see _.initial
**/
initial(n?: number): T[];
/**
* Wrapped type `any[]`.
* @see _.last
**/
last(): T;
/**
* Wrapped type `any[]`.
* @see _.last
**/
last(n: number): T[];
/**
* Wrapped type `any[]`.
* @see _.rest
**/
rest(n?: number): T[];
/**
* @see _.rest
**/
tail(n?: number): T[];
/**
* @see _.rest
**/
drop(n?: number): T[];
/**
* Wrapped type `any[]`.
* @see _.compact
**/
compact(): T[];
/**
* Wrapped type `any`.
* @see _.flatten
**/
flatten(shallow?: boolean): any[];
/**
* Wrapped type `any[]`.
* @see _.without
**/
without(...values: T[]): T[];
/**
* Wrapped type `any[]`.
* @see _.partition
**/
partition(iterator: _.ListIterator<T, boolean>, context?: any): T[][];
/**
* Wrapped type `any[][]`.
* @see _.union
**/
union(...arrays: _.List<T>[]): T[];
/**
* Wrapped type `any[][]`.
* @see _.intersection
**/
intersection(...arrays: _.List<T>[]): T[];
/**
* Wrapped type `any[]`.
* @see _.difference
**/
difference(...others: _.List<T>[]): T[];
/**
* Wrapped type `any[]`.
* @see _.uniq
**/
uniq(isSorted?: boolean, iterator?: _.ListIterator<T, any>): T[];
/**
* Wrapped type `any[]`.
* @see _.uniq
**/
uniq<TSort>(iterator?: _.ListIterator<T, TSort>, context?: any): T[];
/**
* @see _.uniq
**/
unique<TSort>(isSorted?: boolean, iterator?: _.ListIterator<T, TSort>): T[];
/**
* @see _.uniq
**/
unique<TSort>(iterator?: _.ListIterator<T, TSort>, context?: any): T[];
/**
* Wrapped type `any[][]`.
* @see _.zip
**/
zip(...arrays: any[][]): any[][];
/**
* Wrapped type `any[][]`.
* @see _.object
**/
object(...keyValuePairs: any[][]): any;
/**
* @see _.object
**/
object(values?: any): any;
/**
* Wrapped type `any[]`.
* @see _.indexOf
**/
indexOf(value: T, isSorted?: boolean): number;
/**
* @see _.indexOf
**/
indexOf(value: T, startFrom: number): number;
/**
* Wrapped type `any[]`.
* @see _.lastIndexOf
**/
lastIndexOf(value: T, from?: number): number;
/**
* Wrapped type `any[]`.
* @see _.sortedIndex
**/
sortedIndex(value: T, iterator?: (x: T) => any, context?: any): number;
/**
* Wrapped type `number`.
* @see _.range
**/
range(stop: number, step?: number): number[];
/**
* Wrapped type `number`.
* @see _.range
**/
range(): number[];
/* ***********
* Functions *
************ */
/**
* Wrapped type `Function`.
* @see _.bind
**/
bind(object: any, ...arguments: any[]): Function;
/**
* Wrapped type `object`.
* @see _.bindAll
**/
bindAll(...methodNames: string[]): any;
/**
* Wrapped type `Function`.
* @see _.partial
**/
partial(...arguments: any[]): Function;
/**
* Wrapped type `Function`.
* @see _.memoize
**/
memoize(hashFn?: (n: any) => string): Function;
/**
* Wrapped type `Function`.
* @see _.defer
**/
defer(...arguments: any[]): void;
/**
* Wrapped type `Function`.
* @see _.delay
**/
delay(wait: number, ...arguments: any[]): any;
/**
* @see _.delay
**/
delay(...arguments: any[]): any;
/**
* Wrapped type `Function`.
* @see _.throttle
**/
throttle(wait: number, options?: _.ThrottleSettings): Function;
/**
* Wrapped type `Function`.
* @see _.debounce
**/
debounce(wait: number, immediate?: boolean): Function;
/**
* Wrapped type `Function`.
* @see _.once
**/
once(): Function;
/**
* Wrapped type `number`.
* @see _.after
**/
after(fn: Function): Function;
/**
* Wrapped type `number`.
* @see _.before
**/
before(fn: Function): Function;
/**
* Wrapped type `Function`.
* @see _.wrap
**/
wrap(wrapper: Function): () => Function;
/**
* Wrapped type `Function`.
* @see _.negate
**/
negate(): boolean;
/**
* Wrapped type `Function[]`.
* @see _.compose
**/
compose(...functions: Function[]): Function;
/********* *
* Objects *
********** */
/**
* Wrapped type `object`.
* @see _.keys
**/
keys(): string[];
/**
* Wrapped type `object`.
* @see _.values
**/
values(): T[];
/**
* Wrapped type `object`.
* @see _.pairs
**/
pairs(): any[][];
/**
* Wrapped type `object`.
* @see _.invert
**/
invert(): any;
/**
* Wrapped type `object`.
* @see _.functions
**/
functions(): string[];
/**
* @see _.functions
**/
methods(): string[];
/**
* Wrapped type `object`.
* @see _.extend
**/
extend(...sources: any[]): any;
/**
* Wrapped type `object`.
* @see _.pick
**/
pick(...keys: any[]): any;
pick(keys: any[]): any;
pick(fn: (value: any, key: any, object: any) => any): any;
/**
* Wrapped type `object`.
* @see _.omit
**/
omit(...keys: string[]): any;
omit(keys: string[]): any;
omit(fn: Function): any;
/**
* Wrapped type `object`.
* @see _.defaults
**/
defaults(...defaults: any[]): any;
/**
* Wrapped type `any[]`.
* @see _.clone
**/
clone(): T;
/**
* Wrapped type `object`.
* @see _.tap
**/
tap(interceptor: (...as: any[]) => any): any;
/**
* Wrapped type `object`.
* @see _.has
**/
has(key: string): boolean;
/**
* Wrapped type `any[]`.
* @see _.matches
**/
matches<TResult>(): _.ListIterator<T, TResult>;
/**
* Wrapped type `string`.
* @see _.property
**/
property(): (object: Object) => any;
/**
* Wrapped type `object`.
* @see _.isEqual
**/
isEqual(other: any): boolean;
/**
* Wrapped type `object`.
* @see _.isEmpty
**/
isEmpty(): boolean;
/**
* Wrapped type `object`.
* @see _.isElement
**/
isElement(): boolean;
/**
* Wrapped type `object`.
* @see _.isArray
**/
isArray(): boolean;
/**
* Wrapped type `object`.
* @see _.isObject
**/
isObject(): boolean;
/**
* Wrapped type `object`.
* @see _.isArguments
**/
isArguments(): boolean;
/**
* Wrapped type `object`.
* @see _.isFunction
**/
isFunction(): boolean;
/**
* Wrapped type `object`.
* @see _.isString
**/
isString(): boolean;
/**
* Wrapped type `object`.
* @see _.isNumber
**/
isNumber(): boolean;
/**
* Wrapped type `object`.
* @see _.isFinite
**/
isFinite(): boolean;
/**
* Wrapped type `object`.
* @see _.isBoolean
**/
isBoolean(): boolean;
/**
* Wrapped type `object`.
* @see _.isDate
**/
isDate(): boolean;
/**
* Wrapped type `object`.
* @see _.isRegExp
**/
isRegExp(): boolean;
/**
* Wrapped type `object`.
* @see _.isNaN
**/
isNaN(): boolean;
/**
* Wrapped type `object`.
* @see _.isNull
**/
isNull(): boolean;
/**
* Wrapped type `object`.
* @see _.isUndefined
**/
isUndefined(): boolean;
/********* *
* Utility *
********** */
/**
* Wrapped type `any`.
* @see _.identity
**/
identity(): any;
/**
* Wrapped type `any`.
* @see _.constant
**/
constant(): () => T;
/**
* Wrapped type `any`.
* @see _.noop
**/
noop(): void;
/**
* Wrapped type `number`.
* @see _.times
**/
times<TResult>(iterator: (n: number) => TResult, context?: any): TResult[];
/**
* Wrapped type `number`.
* @see _.random
**/
random(): number;
/**
* Wrapped type `number`.
* @see _.random
**/
random(max: number): number;
/**
* Wrapped type `object`.
* @see _.mixin
**/
mixin(): void;
/**
* Wrapped type `string|Function|Object`.
* @see _.iteratee
**/
iteratee(context?: any, argCount?: number): Function;
/**
* Wrapped type `string`.
* @see _.uniqueId
**/
uniqueId(): string;
/**
* Wrapped type `string`.
* @see _.escape
**/
escape(): string;
/**
* Wrapped type `string`.
* @see _.unescape
**/
unescape(): string;
/**
* Wrapped type `object`.
* @see _.result
**/
result(property: string): any;
/**
* Wrapped type `string`.
* @see _.template
**/
template(settings?: _.TemplateSettings): (...data: any[]) => string;
/********** *
* Chaining *
*********** */
/**
* Wrapped type `any`.
* @see _.chain
**/
chain(): _Chain<T>;
/**
* Wrapped type `any`.
* Extracts the value of a wrapped object.
* @return Value of the wrapped object.
**/
value<TResult>(): TResult;
}
interface _Chain<T> {
/* *************
* Collections *
************* */
/**
* Wrapped type `any[]`.
* @see _.each
**/
each(iterator: _.ListIterator<T, void>, context?: any): _Chain<T>;
/**
* @see _.each
**/
each(iterator: _.ObjectIterator<T, void>, context?: any): _Chain<T>;
/**
* @see _.each
**/
forEach(iterator: _.ListIterator<T, void>, context?: any): _Chain<T>;
/**
* @see _.each
**/
forEach(iterator: _.ObjectIterator<T, void>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TArray>(iterator: _.ListIterator<T, TArray[]>, context?: any): _ChainOfArrays<TArray>;
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TResult>(iterator: _.ListIterator<T, TResult>, context?: any): _Chain<TResult>;
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TArray>(iterator: _.ObjectIterator<T, TArray[]>, context?: any): _ChainOfArrays<TArray>;
/**
* Wrapped type `any[]`.
* @see _.map
**/
map<TResult>(iterator: _.ObjectIterator<T, TResult>, context?: any): _Chain<TResult>;
/**
* @see _.map
**/
collect<TResult>(iterator: _.ListIterator<T, TResult>, context?: any): _Chain<TResult>;
/**
* @see _.map
**/
collect<TResult>(iterator: _.ObjectIterator<T, TResult>, context?: any): _Chain<TResult>;
/**
* Wrapped type `any[]`.
* @see _.reduce
**/
reduce<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): _ChainSingle<TResult>;
/**
* @see _.reduce
**/
inject<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): _ChainSingle<TResult>;
/**
* @see _.reduce
**/
foldl<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): _ChainSingle<TResult>;
/**
* Wrapped type `any[]`.
* @see _.reduceRight
**/
reduceRight<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): _ChainSingle<TResult>;
/**
* @see _.reduceRight
**/
foldr<TResult>(iterator: _.MemoIterator<T, TResult>, memo?: TResult, context?: any): _ChainSingle<TResult>;
/**
* Wrapped type `any[]`.
* @see _.find
**/
find(iterator: _.ListIterator<T, boolean>, context?: any): _ChainSingle<T>;
/**
* @see _.find
**/
detect(iterator: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.filter
**/
filter(iterator: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* @see _.filter
**/
select(iterator: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.where
**/
where<U extends {}>(properties: U): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.findWhere
**/
findWhere<U extends {}>(properties: U): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.reject
**/
reject(iterator: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.all
**/
all(iterator?: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* @see _.all
**/
every(iterator?: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.any
**/
any(iterator?: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* @see _.any
**/
some(iterator?: _.ListIterator<T, boolean>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.contains
**/
contains(value: T): _Chain<T>;
/**
* Alias for 'contains'.
* @see contains
**/
include(value: T): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.invoke
**/
invoke(methodName: string, ...arguments: any[]): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.pluck
**/
pluck(propertyName: string): _Chain<any>;
/**
* Wrapped type `number[]`.
* @see _.max
**/
max(): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.max
**/
max(iterator: _.ListIterator<T, number>, context?: any): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.max
**/
max(iterator?: _.ListIterator<T, any>, context?: any): _ChainSingle<T>;
/**
* Wrapped type `number[]`.
* @see _.min
**/
min(): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.min
**/
min(iterator: _.ListIterator<T, number>, context?: any): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.min
**/
min(iterator?: _.ListIterator<T, any>, context?: any): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.sortBy
**/
sortBy(iterator?: _.ListIterator<T, any>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.sortBy
**/
sortBy(iterator: string, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.groupBy
**/
groupBy(iterator?: _.ListIterator<T, any>, context?: any): _ChainOfArrays<T>;
/**
* Wrapped type `any[]`.
* @see _.groupBy
**/
groupBy(iterator: string, context?: any): _ChainOfArrays<T>;
/**
* Wrapped type `any[]`.
* @see _.indexBy
**/
indexBy(iterator: _.ListIterator<T, any>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.indexBy
**/
indexBy(iterator: string, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.countBy
**/
countBy(iterator?: _.ListIterator<T, any>, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.countBy
**/
countBy(iterator: string, context?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.shuffle
**/
shuffle(): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.sample
**/
sample<T>(n: number): _Chain<T>;
/**
* @see _.sample
**/
sample<T>(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.toArray
**/
toArray(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.size
**/
size(): _Chain<T>;
/*********
* Arrays *
**********/
/**
* Wrapped type `any[]`.
* @see _.first
**/
first(): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.first
**/
first(n: number): _Chain<T>;
/**
* @see _.first
**/
head(): _Chain<T>;
/**
* @see _.first
**/
head(n: number): _Chain<T>;
/**
* @see _.first
**/
take(): _Chain<T>;
/**
* @see _.first
**/
take(n: number): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.initial
**/
initial(n?: number): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.last
**/
last(): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.last
**/
last(n: number): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.rest
**/
rest(n?: number): _Chain<T>;
/**
* @see _.rest
**/
tail(n?: number): _Chain<T>;
/**
* @see _.rest
**/
drop(n?: number): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.compact
**/
compact(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.flatten
**/
flatten(shallow?: boolean): _Chain<any>;
/**
* Wrapped type `any[]`.
* @see _.without
**/
without(...values: T[]): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.partition
**/
partition(iterator: _.ListIterator<T, boolean>, context?: any): _Chain<T[]>;
/**
* Wrapped type `any[][]`.
* @see _.union
**/
union(...arrays: _.List<T>[]): _Chain<T>;
/**
* Wrapped type `any[][]`.
* @see _.intersection
**/
intersection(...arrays: _.List<T>[]): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.difference
**/
difference(...others: _.List<T>[]): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.uniq
**/
uniq(isSorted?: boolean, iterator?: _.ListIterator<T, any>): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.uniq
**/
uniq<TSort>(iterator?: _.ListIterator<T, TSort>, context?: any): _Chain<T>;
/**
* @see _.uniq
**/
unique<TSort>(isSorted?: boolean, iterator?: _.ListIterator<T, TSort>): _Chain<T>;
/**
* @see _.uniq
**/
unique<TSort>(iterator?: _.ListIterator<T, TSort>, context?: any): _Chain<T>;
/**
* Wrapped type `any[][]`.
* @see _.zip
**/
zip(...arrays: any[][]): _Chain<T>;
/**
* Wrapped type `any[][]`.
* @see _.object
**/
object(...keyValuePairs: any[][]): _Chain<T>;
/**
* @see _.object
**/
object(values?: any): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.indexOf
**/
indexOf(value: T, isSorted?: boolean): _ChainSingle<T>;
/**
* @see _.indexOf
**/
indexOf(value: T, startFrom: number): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.lastIndexOf
**/
lastIndexOf(value: T, from?: number): _ChainSingle<T>;
/**
* Wrapped type `any[]`.
* @see _.sortedIndex
**/
sortedIndex(value: T, iterator?: (x: T) => any, context?: any): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.range
**/
range(stop: number, step?: number): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.range
**/
range(): _Chain<T>;
/* ***********
* Functions *
************ */
/**
* Wrapped type `Function`.
* @see _.bind
**/
bind(object: any, ...arguments: any[]): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.bindAll
**/
bindAll(...methodNames: string[]): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.partial
**/
partial(...arguments: any[]): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.memoize
**/
memoize(hashFn?: (n: any) => string): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.defer
**/
defer(...arguments: any[]): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.delay
**/
delay(wait: number, ...arguments: any[]): _Chain<T>;
/**
* @see _.delay
**/
delay(...arguments: any[]): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.throttle
**/
throttle(wait: number, options?: _.ThrottleSettings): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.debounce
**/
debounce(wait: number, immediate?: boolean): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.once
**/
once(): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.after
**/
after(func: Function): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.before
**/
before(fn: Function): _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.wrap
**/
wrap(wrapper: Function): () => _Chain<T>;
/**
* Wrapped type `Function`.
* @see _.negate
**/
negate(): _Chain<T>;
/**
* Wrapped type `Function[]`.
* @see _.compose
**/
compose(...functions: Function[]): _Chain<T>;
/********* *
* Objects *
********** */
/**
* Wrapped type `object`.
* @see _.keys
**/
keys(): _Chain<string>;
/**
* Wrapped type `object`.
* @see _.values
**/
values(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.pairs
**/
pairs(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.invert
**/
invert(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.functions
**/
functions(): _Chain<T>;
/**
* @see _.functions
**/
methods(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.extend
**/
extend(...sources: any[]): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.pick
**/
pick(...keys: any[]): _Chain<T>;
pick(keys: any[]): _Chain<T>;
pick(fn: (value: any, key: any, object: any) => any): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.omit
**/
omit(...keys: string[]): _Chain<T>;
omit(keys: string[]): _Chain<T>;
omit(iteratee: Function): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.defaults
**/
defaults(...defaults: any[]): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.clone
**/
clone(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.tap
**/
tap(interceptor: (...as: any[]) => any): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.has
**/
has(key: string): _Chain<T>;
/**
* Wrapped type `any[]`.
* @see _.matches
**/
matches<TResult>(): _Chain<T>;
/**
* Wrapped type `string`.
* @see _.property
**/
property(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isEqual
**/
isEqual(other: any): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isEmpty
**/
isEmpty(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isElement
**/
isElement(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isArray
**/
isArray(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isObject
**/
isObject(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isArguments
**/
isArguments(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isFunction
**/
isFunction(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isString
**/
isString(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isNumber
**/
isNumber(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isFinite
**/
isFinite(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isBoolean
**/
isBoolean(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isDate
**/
isDate(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isRegExp
**/
isRegExp(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isNaN
**/
isNaN(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isNull
**/
isNull(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.isUndefined
**/
isUndefined(): _Chain<T>;
/********* *
* Utility *
********** */
/**
* Wrapped type `any`.
* @see _.identity
**/
identity(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.constant
**/
constant(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.noop
**/
noop(): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.times
**/
times<TResult>(iterator: (n: number) => TResult, context?: any): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.random
**/
random(): _Chain<T>;
/**
* Wrapped type `number`.
* @see _.random
**/
random(max: number): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.mixin
**/
mixin(): _Chain<T>;
/**
* Wrapped type `string|Function|Object`.
* @see _.iteratee
**/
iteratee(context?: any, argCount?: number): _Chain<T>;
/**
* Wrapped type `string`.
* @see _.uniqueId
**/
uniqueId(): _Chain<T>;
/**
* Wrapped type `string`.
* @see _.escape
**/
escape(): _Chain<T>;
/**
* Wrapped type `string`.
* @see _.unescape
**/
unescape(): _Chain<T>;
/**
* Wrapped type `object`.
* @see _.result
**/
result(property: string): _Chain<T>;
/**
* Wrapped type `string`.
* @see _.template
**/
template(settings?: _.TemplateSettings): (...data: any[]) => _Chain<T>;
/************* *
* Array proxy *
************** */
/**
* Returns a new array comprised of the array on which it is called
* joined with the array(s) and/or value(s) provided as arguments.
* @param arr Arrays and/or values to concatenate into a new array. See the discussion below for details.
* @return A new array comprised of the array on which it is called
**/
concat(...arr: Array<T[]>): _Chain<T>;
/**
* Join all elements of an array into a string.
* @param separator Optional. Specifies a string to separate each element of the array. The separator is converted to a string if necessary. If omitted, the array elements are separated with a comma.
* @return The string conversions of all array elements joined into one string.
**/
join(separator?: any): _ChainSingle<T>;
/**
* Removes the last element from an array and returns that element.
* @return Returns the popped element.
**/
pop(): _ChainSingle<T>;
/**
* Adds one or more elements to the end of an array and returns the new length of the array.
* @param item The elements to add to the end of the array.
* @return The array with the element added to the end.
**/
push(...item: Array<T>): _Chain<T>;
/**
* Reverses an array in place. The first array element becomes the last and the last becomes the first.
* @return The reversed array.
**/
reverse(): _Chain<T>;
/**
* Removes the first element from an array and returns that element. This method changes the length of the array.
* @return The shifted element.
**/
shift(): _ChainSingle<T>;
/**
* Returns a shallow copy of a portion of an array into a new array object.
* @param start Zero-based index at which to begin extraction.
* @param end Optional. Zero-based index at which to end extraction. slice extracts up to but not including end.
* @return A shallow copy of a portion of an array into a new array object.
**/
slice(start: number, end?: number): _Chain<T>;
/**
* Sorts the elements of an array in place and returns the array. The sort is not necessarily stable. The default sort order is according to string Unicode code points.
* @param compareFn Optional. Specifies a function that defines the sort order. If omitted, the array is sorted according to each character's Unicode code point value, according to the string conversion of each element.
* @return The sorted array.
**/
sort(compareFn: (a: T, b: T) => boolean): _Chain<T>;
/**
* Changes the content of an array by removing existing elements and/or adding new elements.
* @param index Index at which to start changing the array. If greater than the length of the array, actual starting index will be set to the length of the array. If negative, will begin that many elements from the end.
* @param quantity An integer indicating the number of old array elements to remove. If deleteCount is 0, no elements are removed. In this case, you should specify at least one new element. If deleteCount is greater than the number of elements left in the array starting at index, then all of the elements through the end of the array will be deleted.
* @param items The element to add to the array. If you don't specify any elements, splice will only remove elements from the array.
* @return An array containing the deleted elements. If only one element is removed, an array of one element is returned. If no elements are removed, an empty array is returned.
**/
splice(index: number, quantity: number, ...items: Array<T>): _Chain<T>;
/**
* A string representing the specified array and its elements.
* @return A string representing the specified array and its elements.
**/
toString(): _ChainSingle<T>;
/**
* Adds one or more elements to the beginning of an array and returns the new length of the array.
* @param items The elements to add to the front of the array.
* @return The array with the element added to the beginning.
**/
unshift(...items: Array<T>): _Chain<T>;
/********** *
* Chaining *
*********** */
/**
* Wrapped type `any`.
* @see _.chain
**/
chain(): _Chain<T>;
/**
* Wrapped type `any`.
* @see _.value
**/
value<TResult>(): T[];
}
interface _ChainSingle<T> {
value(): T;
}
interface _ChainOfArrays<T> extends _Chain<T[]> {
flatten(): _Chain<T>;
}
declare var _: UnderscoreStatic;
declare module "underscore" {
export = _;
}<|fim▁end|>
|
iterator: string,
|
<|file_name|>pow.ts<|end_file_name|><|fim▁begin|>/**
* Calculates the complex number raised to some power
*
* @param {numeric} c The power to which the complex number should be raised
* @return {Complex}
*/
pow(c) : Complex {
var re, im, abs, arg;
if (MathLib.type(c) === 'complex') {
re = c.re;
im = c.im;
abs = this.abs();
arg = this.arg();
// Fixes inf^(2+5i) = inf and 0^(2+5i) = 0
if ((this.isZero() || this.re === Infinity) && !(c.isZero() || c.re === Infinity || MathLib.isNaN(c.re))) {
return new MathLib.Complex(this.re, this.im);
}
return MathLib.Complex.polar(
MathLib.times(
MathLib.pow(abs, re),
MathLib.exp(
MathLib.negative(
MathLib.times(im, arg)
)
)
),
MathLib.plus(MathLib.times(re, arg), MathLib.times(im, MathLib.ln(abs)))
);
}
else {
// The naive pow method has some rounding errrors. For example
// (2+5i)^3 = -142.00000000000006-64.99999999999999i
// instead of -142-65i which are errors of magnitude around 1e-14.
// This error increases quickly for increasing exponents.
// (2+5i)^21 has an error of 5.8 in the real part
// return MathLib.Complex.polar(MathLib.pow(abs, c), MathLib.times(arg, c));
<|fim▁hole|> // TODO: Improve the algorithm.
var i,
int = MathLib.floor(Math.abs(c)),
res = new MathLib.Complex(1),
power = this,
bin = int.toString(2);
// If the exponent is not an integer we use the naive approach
if (c % 1) {
abs = this.abs();
arg = this.arg();
return MathLib.Complex.polar(MathLib.pow(abs, c), MathLib.times(arg, c));
}
// The imaginary part of (2+5i)^-0 should be -0 not +0.
if (MathLib.isZero(c)) {
return new MathLib.Complex(1, c);
}
for (i = bin.length - 1; i >= 0; i--) {
if (bin[i] === '1') {
res = MathLib.times(res, power);
}
power = MathLib.times(power, power);
}
if (c < 0) {
res = res.inverse();
}
return res;
}
}<|fim▁end|>
|
// The following algorithm uses a different approach for integer exponents,
// where it yields exact results.
// Non integer exponents are evaluated using the naive approach.
|
<|file_name|>bitten.py<|end_file_name|><|fim▁begin|>import codecs
unicode_string = "Hello Python 3 String"
bytes_object = b"Hello Python 3 Bytes"
print(unicode_string, type(unicode_string))
print(bytes_object, type(bytes_object))
#decode to unicode_string
ux = str(object=bytes_object, encoding="utf-8", errors="strict")
print(ux, type(ux))
ux = bytes_object.decode(encoding="utf-8", errors="strict")
print(ux, type(ux))
hex_bytes = codecs.encode(b"Binary Object", "hex_codec")
def string_to_bytes( text ):
return bin(int.from_bytes(text.encode(), 'big'))
def bytes_to_string( btext ):
#btext = int('0b110100001100101011011000110110001101111', 2)
return btext.to_bytes((btext.bit_length() + 7) // 8, 'big').decode()
def char_to_bytes(char):
return bin(ord(char))
def encodes(text):
bext = text.encode(encoding="utf-8")
enc_bext = codecs.encode(bext, "hex_codec")<|fim▁hole|>def decodes():
pass
if __name__ == "__main__":
print( encodes("walla") )<|fim▁end|>
|
return enc_bext.decode("utf-8")
|
<|file_name|>dst-bad-coerce1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Attempt to change the type as well as unsizing.
struct Fat<Sized? T> {
ptr: T
}
struct Foo;
trait Bar {}
pub fn main() {
// With a vec of ints.<|fim▁hole|>
// With a trait.
let f1 = Fat { ptr: Foo };
let f2: &Fat<Foo> = &f1;
let f3: &Fat<Bar> = f2;
//~^ ERROR the trait `Bar` is not implemented for the type `Foo`
}<|fim▁end|>
|
let f1 = Fat { ptr: [1, 2, 3] };
let f2: &Fat<[int, ..3]> = &f1;
let f3: &Fat<[uint]> = f2;
//~^ ERROR mismatched types: expected `&Fat<[uint]>`, found `&Fat<[int, ..3]>`
|
<|file_name|>_reader.py<|end_file_name|><|fim▁begin|>"""
The basic module about log readers
"""
import os
import re
from ..utils.gzip2 import GzipFile
__author__ = 'chenxm'
__all__ = ["FileReader"]
class FileReader(object):
@staticmethod
def open_file(filename, mode='rb'):
""" open plain or compressed file
@return file handler
"""
parts = os.path.basename(filename).split('.')
try:
assert parts[-1] == 'gz'
fh = GzipFile(mode=mode, filename = filename)<|fim▁hole|>
@staticmethod
def list_files(folder, regex_str=r'.', match=True):
""" find all files under 'folder' with names matching
some reguler expression
"""
assert os.path.isdir(folder)
all_files_path = []
for root, dirs, files in os.walk(folder):
for filename in files:
if match and re.match(regex_str, filename, re.IGNORECASE):
all_files_path.append(os.path.join(root, filename))
elif not match and re.search(regex_str, filename, re.IGNORECASE):
all_files_path.append(os.path.join(root, filename))
return all_files_path
class LogEntry(object):
def __init__(self):
self.data = {}
def get(self, property):
try:
return self[property]
except KeyError:
return None
def set(self, property, value):
self[property] = value
def __getitem__(self, property):
return self.data[property]
def __setitem__(self, property, value):
self.data[property] = value
def __str__(self):
return str(self.data)
class LogReader(object):
def __init__(self, filename):
self.filename = filename
self.filehandler = FileReader.open_file(filename)
def __iter__(self):
return self
def next(self):
try:
new_line = self.filehandler.next()
return new_line
except StopIteration:
self.filehandler.close()
raise StopIteration<|fim▁end|>
|
except:
fh = open(filename, mode)
return fh
|
<|file_name|>init.py<|end_file_name|><|fim▁begin|>from stard.services import BaseService
class Service(BaseService):
def init_service(self):<|fim▁hole|> self.add_parent('dhcpcd')
self.add_parent('getty', terminal=1)<|fim▁end|>
|
self.add_parent('multiuser')
|
<|file_name|>commswave.py<|end_file_name|><|fim▁begin|>"""
commswave
=========
Takes device communications up and down according to a timefunction.
Comms will be working whenever the timefunction returns non-zero.
Configurable parameters::
{
"timefunction" : A timefunction definition
"threshold" : (optional) Comms will only work when the timefunction is returning >= threshold. If missing then any non-zero value will make comms work.
"gate_properties" : (optional) ["list", "of", "properties"] If this is defined, then instead of taking whole comms up and down, only these specific properties are gated
}
Device properties created::
{<|fim▁hole|>
from .device import Device
from common import importer
import logging
class Commswave(Device):
def __init__(self, instance_name, time, engine, update_callback, context, params):
"""Take Comms up and down according to some time function"""
tf = params["commswave"]["timefunction"]
self.comms_timefunction = importer.get_class("timefunction", list(tf.keys())[0])(engine, self, tf[list(tf.keys())[0]])
self.comms_tf_threshold = params["commswave"].get("threshold", None)
self.comms_gate_properties = params["commswave"].get("gate_properties", None)
self.messages_sent = 0
self.messages_attempted = 0
super(Commswave,self).__init__(instance_name, time, engine, update_callback, context, params)
def timefunction_says_communicate(self):
thresh = 0.0
if self.comms_tf_threshold is not None:
thresh = self.comms_tf_threshold
return self.comms_timefunction.state() > thresh
def comms_ok(self):
if self.comms_gate_properties is not None: # If we're gating individual properties, then don't gate overall comms
return super(Commswave, self).comms_ok()
else:
self.messages_attempted += 1
is_ok = super(Commswave, self).comms_ok()
is_ok = is_ok and self.timefunction_says_communicate()
if is_ok:
self.messages_sent += 1
return is_ok
def transmit(self, the_id, ts, properties, force_comms):
if self.comms_gate_properties is not None: # We're gating properties
if not self.timefunction_says_communicate():
for p in self.comms_gate_properties:
properties.pop(p, None) # Remove the property, if it's there
super(Commswave, self).transmit(the_id, ts, properties, force_comms)
def external_event(self, event_name, arg):
super(Commswave, self).external_event(event_name, arg)
def close(self):
super(Commswave,self).close()
logging.info("Comms report for " + str(self.properties["$id"]) + " " +
str(self.messages_sent) + " sent ("+str(100 * self.messages_sent/self.messages_attempted) + "%) from " +
str(self.messages_attempted) + " total")
# Private methods
## (we don't actually need to tick, as we can instantaneously look up timefunction state whenever we need to)
## def tick_commswave(self, _):
## self.ok_commswave = self.comms_timefunction.state()
## self.engine.register_event_at(self.comms_timefunction.next_change(), self.tick_commswave, self, self)<|fim▁end|>
|
}
"""
|
<|file_name|>query14.rs<|end_file_name|><|fim▁begin|>use timely::order::TotalOrder;
use timely::dataflow::*;
use timely::dataflow::operators::probe::Handle as ProbeHandle;
use differential_dataflow::operators::*;
use differential_dataflow::operators::arrange::ArrangeBySelf;
use differential_dataflow::difference::DiffPair;
use differential_dataflow::lattice::Lattice;
use {Arrangements, Experiment, Collections};
use ::types::create_date;
// -- $ID$
// -- TPC-H/TPC-R Promotion Effect Query (Q14)
// -- Functional Query Definition
// -- Approved February 1998
// :x
// :o
// select
// 100.00 * sum(case<|fim▁hole|>// else 0
// end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue
// from
// lineitem,
// part
// where
// l_partkey = p_partkey
// and l_shipdate >= date ':1'
// and l_shipdate < date ':1' + interval '1' month;
// :n -1
fn starts_with(source: &[u8], query: &[u8]) -> bool {
source.len() >= query.len() && &source[..query.len()] == query
}
pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>)
where G::Timestamp: Lattice+TotalOrder+Ord {
let lineitems =
collections
.lineitems()
.explode(|l|
if create_date(1995,9,1) <= l.ship_date && l.ship_date < create_date(1995,10,1) {
Some((l.part_key, (l.extended_price * (100 - l.discount) / 100) as isize ))
}
else { None }
)
.arrange_by_self();
collections
.parts()
.explode(|p| Some((p.part_key, DiffPair::new(1, if starts_with(&p.typ.as_bytes(), b"PROMO") { 1 } else { 0 }))))
.arrange_by_self()
.join_core(&lineitems, |&_part_key, _, _| Some(()))
.count_total()
// .inspect(|x| println!("{:?}", x))
.probe_with(probe);
}
pub fn query_arranged<G: Scope<Timestamp=usize>>(
scope: &mut G,
probe: &mut ProbeHandle<usize>,
experiment: &mut Experiment,
arrangements: &mut Arrangements,
)
where
G::Timestamp: Lattice+TotalOrder+Ord
{
let arrangements = arrangements.in_scope(scope, experiment);
experiment
.lineitem(scope)
.explode(|l|
if create_date(1995,9,1) <= l.ship_date && l.ship_date < create_date(1995,10,1) {
Some((l.part_key, (l.extended_price * (100 - l.discount) / 100) as isize ))
}
else { None }
)
.arrange_by_self()
.join_core(&arrangements.part, |_pk,&(),p| Some(DiffPair::new(1, if starts_with(&p.typ.as_bytes(), b"PROMO") { 1 } else { 0 })))
.explode(|dp| Some(((),dp)))
.count_total()
.probe_with(probe);
}<|fim▁end|>
|
// when p_type like 'PROMO%'
// then l_extendedprice * (1 - l_discount)
|
<|file_name|>nfs.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
The NfsCollector collects nfs utilization metrics using /proc/net/rpc/nfs.
#### Dependencies
* /proc/net/rpc/nfs
"""
import diamond.collector
import os
class NfsCollector(diamond.collector.Collector):
PROC = '/proc/net/rpc/nfs'
def get_default_config_help(self):
config_help = super(NfsCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(NfsCollector, self).get_default_config()
config.update({
'enabled': False,
'path': 'nfs'
})
return config
def collect(self):
"""
Collect stats
"""
if os.access(self.PROC, os.R_OK):
results = {}
# Open file
file = open(self.PROC)
for line in file:
line = line.split()
if line[0] == 'net':
results['net.packets'] = line[1]
results['net.udpcnt'] = line[2]
results['net.tcpcnt'] = line[3]
results['net.tcpconn'] = line[4]
elif line[0] == 'rpc':
results['rpc.calls'] = line[1]
results['rpc.retrans'] = line[2]
results['rpc.authrefrsh'] = line[3]
elif line[0] == 'proc2':
results['v2.null'] = line[1]
results['v2.getattr'] = line[2]
results['v2.setattr'] = line[3]
results['v2.root'] = line[4]
results['v2.lookup'] = line[5]
results['v2.readlink'] = line[6]
results['v2.read'] = line[7]
results['v2.wrcache'] = line[8]
results['v2.write'] = line[9]
results['v2.create'] = line[10]
results['v2.remove'] = line[11]
results['v2.rename'] = line[12]
results['v2.link'] = line[13]
results['v2.symlink'] = line[14]
results['v2.mkdir'] = line[15]
results['v2.rmdir'] = line[16]
results['v2.readdir'] = line[17]
results['v2.fsstat'] = line[18]
elif line[0] == 'proc3':
results['v3.null'] = line[1]
results['v3.getattr'] = line[2]
results['v3.setattr'] = line[3]
results['v3.lookup'] = line[4]
results['v3.access'] = line[5]
results['v3.readlink'] = line[6]
results['v3.read'] = line[7]
results['v3.write'] = line[8]
results['v3.create'] = line[9]
results['v3.mkdir'] = line[10]
results['v3.symlink'] = line[11]
results['v3.mknod'] = line[12]
results['v3.remove'] = line[13]
results['v3.rmdir'] = line[14]
results['v3.rename'] = line[15]
results['v3.link'] = line[16]
results['v3.readdir'] = line[17]
results['v3.readdirplus'] = line[18]
results['v3.fsstat'] = line[19]
results['v3.fsinfo'] = line[20]
results['v3.pathconf'] = line[21]
results['v3.commit'] = line[22]
elif line[0] == 'proc4':
results['v4.null'] = line[1]
results['v4.read'] = line[2]
results['v4.write'] = line[3]
results['v4.commit'] = line[4]
results['v4.open'] = line[5]
results['v4.open_conf'] = line[6]
results['v4.open_noat'] = line[7]
results['v4.open_dgrd'] = line[8]
results['v4.close'] = line[9]
results['v4.setattr'] = line[10]
results['v4.fsinfo'] = line[11]
results['v4.renew'] = line[12]
results['v4.setclntid'] = line[13]
results['v4.confirm'] = line[14]
results['v4.lock'] = line[15]
results['v4.lockt'] = line[16]
results['v4.locku'] = line[17]
results['v4.access'] = line[18]
results['v4.getattr'] = line[19]
results['v4.lookup'] = line[20]
results['v4.lookup_root'] = line[21]
results['v4.remove'] = line[22]
results['v4.rename'] = line[23]
results['v4.link'] = line[24]
results['v4.symlink'] = line[25]
results['v4.create'] = line[26]
results['v4.pathconf'] = line[27]
results['v4.statfs'] = line[28]
results['v4.readlink'] = line[29]
results['v4.readdir'] = line[30]
try:
results['v4.server_caps'] = line[31]
except IndexError:
pass
try:
results['v4.delegreturn'] = line[32]
except IndexError:
pass
try:
results['v4.getacl'] = line[33]
except IndexError:
pass
try:
results['v4.setacl'] = line[34]
except IndexError:
pass
try:
results['v4.fs_locations'] = line[35]
except IndexError:
pass
try:
results['v4.rel_lkowner'] = line[36]
except IndexError:
pass
try:
results['v4.exchange_id'] = line[37]
except IndexError:
pass
try:
results['v4.create_ses'] = line[38]
except IndexError:
pass
try:
results['v4.destroy_ses'] = line[39]
except IndexError:
pass
try:
results['v4.sequence'] = line[40]
except IndexError:
pass
try:
results['v4.get_lease_t'] = line[41]
except IndexError:
pass
try:
results['v4.reclaim_comp'] = line[42]
except IndexError:
pass
try:
results['v4.layoutget'] = line[43]
except IndexError:
pass
try:
results['v4.layoutcommit'] = line[44]
except IndexError:
pass
try:
results['v4.layoutreturn'] = line[45]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[46]
except IndexError:
pass
try:
results['v4.getdevinfo'] = line[47]
except IndexError:
pass
try:
results['v4.ds_write'] = line[48]
except IndexError:
pass
try:
results['v4.ds_commit'] = line[49]
except IndexError:
pass
try:
results['v4.getdevlist'] = line[50]
except IndexError:
pass
# Close File
file.close()
for stat in results.keys():
metric_name = '.' + stat
metric_value = long(float(results[stat]))
metric_value = self.derivative(metric_name, metric_value)
self.publish(metric_name, metric_value)
return True
<|fim▁hole|><|fim▁end|>
|
return False
|
<|file_name|>sale.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.multi
def onchange_template_id(self, template_id, partner=False,
fiscal_position=False):
res = super(SaleOrder, self).onchange_template_id(
template_id, partner=partner, fiscal_position=fiscal_position)
if (template_id and res.get('value', False) and
res.get('value')['order_line']):
res = self._catch_month_week_day_information(template_id, res)
return res
def _catch_month_week_day_information(self, template_id, res):
quote_obj = self.env['sale.quote.line']
order_lines = res.get('value')['order_line']
for line in order_lines:
if len(line) > 1:
dic = line[2]
cond = [('quote_id', '=', template_id)]
price_unit = dic.get('price_unit', False)
if price_unit:
cond.append(('price_unit', '=', price_unit))
discount = dic.get('discount', False)
if discount:
cond.append(('discount', '=', discount))
product_uom_qty = dic.get('product_uom_qty', False)
if product_uom_qty:
cond.append(('product_uom_qty', '=', product_uom_qty))
product_id = dic.get('product_id', False)
if product_id:
cond.append(('product_id', '=', product_id))
product_uom = dic.get('product_uom', False)
if product_uom:
cond.append(('product_uom_id', '=', product_uom))
website_description = dic.get('website_description', False)
if website_description:
cond.append(('website_description', '=',
website_description))
template = quote_obj.search(cond)
if len(template) > 1:
cond = [('quote_id', '=', template_id),
('product_template', '!=', False),
('product_id', '=', False),
('name', '=', dic.get('name'))]
template = quote_obj.search(cond, limit=1)
line = self._sale_line_with_sale_quote_information(
template, line)
if template.product_id:
line[2].update({'product_id': template.product_id.id})
return res
@api.multi
def _sale_line_with_sale_quote_information(self, template, line):
line[2].update({
'january': template.january,
'february': template.february,
'march': template.march,
'april': template.april,
'may': template.may,
'june': template.june,
'july': template.july,
'august': template.august,
'september': template.september,
'november': template.november,
'december': template.december,
'week1': template.week1,
'week2': template.week2,<|fim▁hole|> 'week4': template.week4,
'week5': template.week5,
'week6': template.week6,
'monday': template.monday,
'tuesday': template.tuesday,
'wednesday': template.wednesday,
'thursday': template.thursday,
'friday': template.friday,
'saturday': template.saturday,
'sunday': template.sunday})
return line
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
recurring_service = fields.Boolean(
string='Recurring Service', related='product_id.recurring_service')
january = fields.Boolean('January')
february = fields.Boolean('February')
march = fields.Boolean('March')
april = fields.Boolean('April')
may = fields.Boolean('May')
june = fields.Boolean('June')
july = fields.Boolean('July')
august = fields.Boolean('August')
september = fields.Boolean('September')
october = fields.Boolean('October')
november = fields.Boolean('November')
december = fields.Boolean('December')
week1 = fields.Boolean('Week 1')
week2 = fields.Boolean('Week 2')
week3 = fields.Boolean('Week 3')
week4 = fields.Boolean('Week 4')
week5 = fields.Boolean('Week 5')
week6 = fields.Boolean('Week 6')
monday = fields.Boolean('Monday')
tuesday = fields.Boolean('Tuesday')
wednesday = fields.Boolean('Wednesday')
thursday = fields.Boolean('Thursday')
friday = fields.Boolean('Friday')
saturday = fields.Boolean('Saturday')
sunday = fields.Boolean('Sunday')
class SaleQuoteLine(models.Model):
_inherit = 'sale.quote.line'
product_id = fields.Many2one(required=False)
product_template = fields.Many2one(
comodel_name='product.template', string='Product Template')
january = fields.Boolean('January')
february = fields.Boolean('February')
march = fields.Boolean('March')
april = fields.Boolean('April')
may = fields.Boolean('May')
june = fields.Boolean('June')
july = fields.Boolean('July')
august = fields.Boolean('August')
september = fields.Boolean('September')
october = fields.Boolean('October')
november = fields.Boolean('November')
december = fields.Boolean('December')
week1 = fields.Boolean('Week 1')
week2 = fields.Boolean('Week 2')
week3 = fields.Boolean('Week 3')
week4 = fields.Boolean('Week 4')
week5 = fields.Boolean('Week 5')
week6 = fields.Boolean('Week 6')
monday = fields.Boolean('Monday')
tuesday = fields.Boolean('Tuesday')
wednesday = fields.Boolean('Wednesday')
thursday = fields.Boolean('Thursday')
friday = fields.Boolean('Friday')
saturday = fields.Boolean('Saturday')
sunday = fields.Boolean('Sunday')
@api.multi
@api.onchange('product_template')
def onchange_product_template(self):
self.ensure_one()
if not self.product_template:
self.product_id = False
else:
self.product_uom_id = self.product_template.uom_id.id
self.name = self.product_template.name
if not self.product_template.attribute_line_ids:
self.product_id = (
self.product_template.product_variant_ids and
self.product_template.product_variant_ids[0])
return {'domain': {'product_id': [('product_tmpl_id', '=',
self.product_template.id)]}}
@api.multi
def on_change_product_id(self, product):
result = super(SaleQuoteLine, self).on_change_product_id(product)
if 'value' in result and product:
prod = self.env['product.product'].browse(product)
result['value']['product_template'] = prod.product_tmpl_id.id
return result<|fim▁end|>
|
'week3': template.week3,
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/**
* @license<|fim▁hole|> * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
export * from './toolbar.component';<|fim▁end|>
|
* Copyright (C) 2017 Jonas Bürkel
*
* You should have received a copy of the GNU General Public License
|
<|file_name|>_template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
package/module TEST
Descripción del test.
Autor: PABLO PIZARRO @ github.com/ppizarror
Fecha: AGOSTO 2016
Licencia: GPLv2
"""
__author__ = "ppizarror"
# Importación de librerías
# noinspection PyUnresolvedReferences
from _testpath import * # @UnusedWildImport
import unittest
# Constantes de los test
DISABLE_HEAVY_TESTS = True
DISABLE_HEAVY_TESTS_MSG = "Se desactivaron los tests pesados"
VERBOSE = False
# Se cargan argumentos desde la consola
if __name__ == '__main__':
from bin.arguments import argument_parser_factory
argparser = argument_parser_factory("Template Test", verbose=True, version=True,
enable_skipped_test=True).parse_args()
DISABLE_HEAVY_TESTS = argparser.enableHeavyTest
VERBOSE = argparser.verbose
# Clase UnitTest
class ModuleTest(unittest.TestCase):
def setUp(self):
"""
Inicio de los test.
:return: void
:rtype: None
"""
pass
# noinspection PyMethodMayBeStatic
def testA(self):
<|fim▁hole|> :rtype: None
"""
pass
@unittest.skipIf(DISABLE_HEAVY_TESTS, DISABLE_HEAVY_TESTS_MSG)
def testSkipped(self):
"""
Ejemplo de test saltado.
:return: void
:rtype: None
"""
pass
# Main test
if __name__ == '__main__':
runner = unittest.TextTestRunner()
itersuite = unittest.TestLoader().loadTestsFromTestCase(ModuleTest)
runner.run(itersuite)<|fim▁end|>
|
"""
Ejemplo de test.
:return: void
|
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>import logging
from ..DataUploader import Plugin as DataUploaderPlugin
from .reader import AndroidReader, AndroidStatsReader
from ...common.interfaces import AbstractPlugin
try:
from volta.core.core import Core as VoltaCore
except Exception:
raise RuntimeError("Please install volta. https://github.com/yandex-load/volta")
logger = logging.getLogger(__name__)
class Plugin(AbstractPlugin):
SECTION = "android"
SECTION_META = "meta"
def __init__(self, core, cfg, name):
self.stats_reader = None
self.reader = None
super(Plugin, self).__init__(core, cfg, name)
self.device = None
try:
self.cfg = cfg['volta_options']
for key, value in self.cfg.items():
if not isinstance(value, dict):
logger.debug('Malformed VoltaConfig key: %s value %s', key, value)
raise RuntimeError('Malformed VoltaConfig passed, key: %s. Should by dict' % key)
except AttributeError:
logger.error('Failed to read Volta config', exc_info=True)
self.volta_core = VoltaCore(self.cfg)
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = ["volta_options"]
return opts
<|fim▁hole|> def configure(self):
self.volta_core.configure()
def get_reader(self):
if self.reader is None:
self.reader = AndroidReader()
return self.reader
def get_stats_reader(self):
if self.stats_reader is None:
self.stats_reader = AndroidStatsReader()
return self.stats_reader
def prepare_test(self):
self.core.add_artifact_file(self.volta_core.currents_fname)
[self.core.add_artifact_file(fname) for fname in self.volta_core.event_fnames.values()]
def start_test(self):
try:
self.volta_core.start_test()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.info('Failed to start test of Android plugin', exc_info=True)
return 1
def is_test_finished(self):
try:
if hasattr(self.volta_core, 'phone'):
if hasattr(self.volta_core.phone, 'test_performer'):
if not self.volta_core.phone.test_performer:
logger.warning('There is no test performer process on the phone, interrupting test')
return 1
if not self.volta_core.phone.test_performer.is_finished():
logger.debug('Waiting for phone test to finish...')
return -1
else:
return self.volta_core.phone.test_performer.retcode
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Unknown exception of Android plugin. Interrupting test', exc_info=True)
return 1
def end_test(self, retcode):
try:
self.volta_core.end_test()
uploaders = self.core.get_plugins_of_type(DataUploaderPlugin)
for uploader in uploaders:
response = uploader.lp_job.api_client.link_mobile_job(
lp_key=uploader.lp_job.number,
mobile_key=self.volta_core.uploader.jobno
)
logger.info(
'Linked mobile job %s to %s for plugin: %s. Response: %s',
self.volta_core.uploader.jobno, uploader.lp_job.number, uploader.backend_type, response
)
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete end_test of Android plugin', exc_info=True)
retcode = 1
return retcode
def get_info(self):
return AndroidInfo()
def post_process(self, retcode):
try:
self.volta_core.post_process()
# FIXME raise/catch appropriate exception here
except: # noqa: E722
logger.error('Failed to complete post_process of Android plugin', exc_info=True)
retcode = 1
return retcode
class AndroidInfo(object):
def __init__(self):
self.address = ''
self.port = 80
self.ammo_file = ''
self.duration = 0
self.loop_count = 1
self.instances = 1
self.rps_schedule = ''<|fim▁end|>
| |
<|file_name|>youtube.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';<|fim▁hole|>import { IconBaseProps } from 'react-icon-base';
export default class FaYoutube extends React.Component<IconBaseProps> { }<|fim▁end|>
| |
<|file_name|>test_legendre_q.cpp<|end_file_name|><|fim▁begin|>// Copyright John Maddock 2015.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifdef _MSC_VER
# pragma warning (disable : 4224)
#endif
#include <boost/math/special_functions/legendre.hpp>
#include <boost/array.hpp>
#include <boost/lexical_cast.hpp>
#include "../../test/table_type.hpp"
#include "table_helper.hpp"
#include "performance.hpp"
#include <iostream>
typedef double T;
#define SC_(x) static_cast<double>(x)
int main()
{
# include "legendre_p.ipp"
# include "legendre_p_large.ipp"
add_data(legendre_p);
add_data(legendre_p_large);
unsigned data_total = data.size();
screen_data([](const std::vector<double>& v){ return boost::math::legendre_q(v[0], v[1]); }, [](const std::vector<double>& v){ return v[3]; });
#if defined(TEST_GSL) && !defined(COMPILER_COMPARISON_TABLES)
screen_data([](const std::vector<double>& v){ return gsl_sf_legendre_Ql(v[0], v[1]); }, [](const std::vector<double>& v){ return v[3]; });
#endif
unsigned data_used = data.size();
std::string function = "legendre Q[br](" + boost::lexical_cast<std::string>(data_used) + "/" + boost::lexical_cast<std::string>(data_total) + " tests selected)";
std::string function_short = "legendre Q";
double time;<|fim▁hole|> std::cout << time << std::endl;
#if !defined(COMPILER_COMPARISON_TABLES) && (defined(TEST_GSL) || defined(TEST_RMATH))
report_execution_time(time, std::string("Library Comparison with ") + std::string(compiler_name()) + std::string(" on ") + platform_name(), function, boost_name());
#endif
report_execution_time(time, std::string("Compiler Comparison on ") + std::string(platform_name()), function_short, compiler_name() + std::string("[br]") + boost_name());
//
// Boost again, but with promotion to long double turned off:
//
#if !defined(COMPILER_COMPARISON_TABLES)
if(sizeof(long double) != sizeof(double))
{
time = exec_timed_test([](const std::vector<double>& v){ return boost::math::legendre_q(v[0], v[1], boost::math::policies::make_policy(boost::math::policies::promote_double<false>())); });
std::cout << time << std::endl;
#if !defined(COMPILER_COMPARISON_TABLES) && (defined(TEST_GSL) || defined(TEST_RMATH))
report_execution_time(time, std::string("Library Comparison with ") + std::string(compiler_name()) + std::string(" on ") + platform_name(), function, boost_name() + "[br]promote_double<false>");
#endif
report_execution_time(time, std::string("Compiler Comparison on ") + std::string(platform_name()), function_short, compiler_name() + std::string("[br]") + boost_name() + "[br]promote_double<false>");
}
#endif
#if defined(TEST_GSL) && !defined(COMPILER_COMPARISON_TABLES)
time = exec_timed_test([](const std::vector<double>& v){ return gsl_sf_legendre_Ql(v[0], v[1]); });
std::cout << time << std::endl;
report_execution_time(time, std::string("Library Comparison with ") + std::string(compiler_name()) + std::string(" on ") + platform_name(), function, "GSL " GSL_VERSION);
#endif
return 0;
}<|fim▁end|>
|
time = exec_timed_test([](const std::vector<double>& v){ return boost::math::legendre_q(v[0], v[1]); });
|
<|file_name|>base58.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
##
## Copyright 2009 Adriana Lukas & Alec Muffett
##
## Licensed under the Apache License, Version 2.0 (the "License"); you
## may not use this file except in compliance with the License. You
## may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
## implied. See the License for the specific language governing
## permissions and limitations under the License.
##<|fim▁hole|>
"""docstring goes here""" # :-)
# spec: http://www.flickr.com/groups/api/discuss/72157616713786392/
__b58chars = '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'
__b58base = len(__b58chars) # let's not bother hard-coding
def b58encode(value):
"""
encode integer 'value' as a base58 string; returns string
"""
encoded = ''
while value >= __b58base:
div, mod = divmod(value, __b58base)
encoded = __b58chars[mod] + encoded # add to left
value = div
encoded = __b58chars[value] + encoded # most significant remainder
return encoded
def b58decode(encoded):
"""
decodes base58 string 'encoded' to return integer
"""
value = 0
column_multiplier = 1;
for c in encoded[::-1]:
column = __b58chars.index(c)
value += column * column_multiplier
column_multiplier *= __b58base
return value
if __name__ == '__main__':
x = b58encode(12345678)
print x, '26gWw'
print b58decode(x), 12345678<|fim▁end|>
| |
<|file_name|>table.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use self::BucketState::*;
use clone::Clone;
use cmp;
use hash::{Hash, Hasher};
use iter::{Iterator, ExactSizeIterator};
use marker::{Copy, Send, Sync, Sized, self};
use mem::{align_of, size_of};
use mem;
use num::wrapping::OverflowingOps;
use ops::{Deref, DerefMut, Drop};
use option::Option;
use option::Option::{Some, None};
use ptr::{self, Unique};
use rt::heap::{allocate, deallocate, EMPTY};
use collections::hash_state::HashState;
const EMPTY_BUCKET: u64 = 0;
/// The raw hashtable, providing safe-ish access to the unzipped and highly
/// optimized arrays of hashes, keys, and values.
///
/// This design uses less memory and is a lot faster than the naive
/// `Vec<Option<u64, K, V>>`, because we don't pay for the overhead of an
/// option on every element, and we get a generally more cache-aware design.
///
/// Essential invariants of this structure:
///
/// - if t.hashes[i] == EMPTY_BUCKET, then `Bucket::at_index(&t, i).raw`
/// points to 'undefined' contents. Don't read from it. This invariant is
/// enforced outside this module with the `EmptyBucket`, `FullBucket`,
/// and `SafeHash` types.
///
/// - An `EmptyBucket` is only constructed at an index with
/// a hash of EMPTY_BUCKET.
///
/// - A `FullBucket` is only constructed at an index with a
/// non-EMPTY_BUCKET hash.
///
/// - A `SafeHash` is only constructed for non-`EMPTY_BUCKET` hash. We get
/// around hashes of zero by changing them to 0x8000_0000_0000_0000,
/// which will likely map to the same bucket, while not being confused
/// with "empty".
///
/// - All three "arrays represented by pointers" are the same length:
/// `capacity`. This is set at creation and never changes. The arrays
/// are unzipped to save space (we don't have to pay for the padding
/// between odd sized elements, such as in a map from u64 to u8), and
/// be more cache aware (scanning through 8 hashes brings in at most
/// 2 cache lines, since they're all right beside each other).
///
/// You can kind of think of this module/data structure as a safe wrapper
/// around just the "table" part of the hashtable. It enforces some
/// invariants at the type level and employs some performance trickery,
/// but in general is just a tricked out `Vec<Option<u64, K, V>>`.
#[unsafe_no_drop_flag]
pub struct RawTable<K, V> {
capacity: usize,
size: usize,
hashes: Unique<u64>,
// Because K/V do not appear directly in any of the types in the struct,
// inform rustc that in fact instances of K and V are reachable from here.
marker: marker::PhantomData<(K,V)>,
}
unsafe impl<K: Send, V: Send> Send for RawTable<K, V> {}
unsafe impl<K: Sync, V: Sync> Sync for RawTable<K, V> {}
struct RawBucket<K, V> {
hash: *mut u64,
key: *mut K,
val: *mut V,
_marker: marker::PhantomData<(K,V)>,
}
impl<K,V> Copy for RawBucket<K,V> {}
impl<K,V> Clone for RawBucket<K,V> {
fn clone(&self) -> RawBucket<K, V> { *self }
}
pub struct Bucket<K, V, M> {
raw: RawBucket<K, V>,
idx: usize,
table: M
}
impl<K,V,M:Copy> Copy for Bucket<K,V,M> {}
impl<K,V,M:Copy> Clone for Bucket<K,V,M> {
fn clone(&self) -> Bucket<K,V,M> { *self }
}
pub struct EmptyBucket<K, V, M> {
raw: RawBucket<K, V>,
idx: usize,
table: M
}
pub struct FullBucket<K, V, M> {
raw: RawBucket<K, V>,
idx: usize,
table: M
}
pub type EmptyBucketImm<'table, K, V> = EmptyBucket<K, V, &'table RawTable<K, V>>;
pub type FullBucketImm<'table, K, V> = FullBucket<K, V, &'table RawTable<K, V>>;
pub type EmptyBucketMut<'table, K, V> = EmptyBucket<K, V, &'table mut RawTable<K, V>>;
pub type FullBucketMut<'table, K, V> = FullBucket<K, V, &'table mut RawTable<K, V>>;
pub enum BucketState<K, V, M> {
Empty(EmptyBucket<K, V, M>),
Full(FullBucket<K, V, M>),
}
// A GapThenFull encapsulates the state of two consecutive buckets at once.
// The first bucket, called the gap, is known to be empty.
// The second bucket is full.
struct GapThenFull<K, V, M> {
gap: EmptyBucket<K, V, ()>,
full: FullBucket<K, V, M>,
}
/// A hash that is not zero, since we use a hash of zero to represent empty
/// buckets.
#[derive(PartialEq, Copy, Clone)]
pub struct SafeHash {
hash: u64,
}
impl SafeHash {
/// Peek at the hash value, which is guaranteed to be non-zero.
#[inline(always)]
pub fn inspect(&self) -> u64 { self.hash }
}
/// We need to remove hashes of 0. That's reserved for empty buckets.
/// This function wraps up `hash_keyed` to be the only way outside this
/// module to generate a SafeHash.
pub fn make_hash<T: ?Sized, S>(hash_state: &S, t: &T) -> SafeHash
where T: Hash, S: HashState
{
let mut state = hash_state.hasher();
t.hash(&mut state);
// We need to avoid 0 in order to prevent collisions with
// EMPTY_HASH. We can maintain our precious uniform distribution
// of initial indexes by unconditionally setting the MSB,
// effectively reducing 64-bits hashes to 63 bits.
SafeHash { hash: 0x8000_0000_0000_0000 | state.finish() }
}
// `replace` casts a `*u64` to a `*SafeHash`. Since we statically
// ensure that a `FullBucket` points to an index with a non-zero hash,
// and a `SafeHash` is just a `u64` with a different name, this is
// safe.
//
// This test ensures that a `SafeHash` really IS the same size as a
// `u64`. If you need to change the size of `SafeHash` (and
// consequently made this test fail), `replace` needs to be
// modified to no longer assume this.
#[test]
fn can_alias_safehash_as_u64() {
assert_eq!(size_of::<SafeHash>(), size_of::<u64>())
}
impl<K, V> RawBucket<K, V> {
unsafe fn offset(self, count: isize) -> RawBucket<K, V> {
RawBucket {
hash: self.hash.offset(count),
key: self.key.offset(count),
val: self.val.offset(count),
_marker: marker::PhantomData,
}
}
}
// Buckets hold references to the table.
impl<K, V, M> FullBucket<K, V, M> {
/// Borrow a reference to the table.
pub fn table(&self) -> &M {
&self.table
}
/// Move out the reference to the table.
pub fn into_table(self) -> M {
self.table
}
/// Get the raw index.
pub fn index(&self) -> usize {
self.idx
}
}
impl<K, V, M> EmptyBucket<K, V, M> {
/// Borrow a reference to the table.
pub fn table(&self) -> &M {
&self.table
}
/// Move out the reference to the table.
pub fn into_table(self) -> M {
self.table
}
}
impl<K, V, M> Bucket<K, V, M> {
/// Move out the reference to the table.
pub fn into_table(self) -> M {
self.table
}
/// Get the raw index.
pub fn index(&self) -> usize {
self.idx
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>>> Bucket<K, V, M> {
pub fn new(table: M, hash: SafeHash) -> Bucket<K, V, M> {
Bucket::at_index(table, hash.inspect() as usize)
}
pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> {
// if capacity is 0, then the RawBucket will be populated with bogus pointers.
// This is an uncommon case though, so avoid it in release builds.
debug_assert!(table.capacity() > 0, "Table should have capacity at this point");
let ib_index = ib_index & (table.capacity() - 1);
Bucket {
raw: unsafe {
table.first_bucket_raw().offset(ib_index as isize)
},
idx: ib_index,
table: table
}
}
pub fn first(table: M) -> Bucket<K, V, M> {
Bucket {
raw: table.first_bucket_raw(),
idx: 0,
table: table
}
}
/// Reads a bucket at a given index, returning an enum indicating whether
/// it's initialized or not. You need to match on this enum to get
/// the appropriate types to call most of the other functions in
/// this module.
pub fn peek(self) -> BucketState<K, V, M> {
match unsafe { *self.raw.hash } {
EMPTY_BUCKET =>
Empty(EmptyBucket {
raw: self.raw,
idx: self.idx,
table: self.table
}),
_ =>
Full(FullBucket {
raw: self.raw,
idx: self.idx,
table: self.table
})
}
}
/// Modifies the bucket pointer in place to make it point to the next slot.
pub fn next(&mut self) {
// Branchless bucket iteration step.
// As we reach the end of the table...
// We take the current idx: 0111111b
// Xor it by its increment: ^ 1000000b
// ------------
// 1111111b
// Then AND with the capacity: & 1000000b
// ------------
// to get the backwards offset: 1000000b
// ... and it's zero at all other times.
let maybe_wraparound_dist = (self.idx ^ (self.idx + 1)) & self.table.capacity();
// Finally, we obtain the offset 1 or the offset -cap + 1.
let dist = 1 - (maybe_wraparound_dist as isize);
self.idx += 1;
unsafe {
self.raw = self.raw.offset(dist);
}
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>>> EmptyBucket<K, V, M> {
#[inline]
pub fn next(self) -> Bucket<K, V, M> {
let mut bucket = self.into_bucket();
bucket.next();
bucket
}
#[inline]
pub fn into_bucket(self) -> Bucket<K, V, M> {
Bucket {
raw: self.raw,
idx: self.idx,
table: self.table
}
}
pub fn gap_peek(self) -> Option<GapThenFull<K, V, M>> {
let gap = EmptyBucket {
raw: self.raw,
idx: self.idx,
table: ()
};
match self.next().peek() {
Full(bucket) => {
Some(GapThenFull {
gap: gap,
full: bucket
})
}
Empty(..) => None
}
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> EmptyBucket<K, V, M> {
/// Puts given key and value pair, along with the key's hash,
/// into this bucket in the hashtable. Note how `self` is 'moved' into
/// this function, because this slot will no longer be empty when
/// we return! A `FullBucket` is returned for later use, pointing to
/// the newly-filled slot in the hashtable.
///
/// Use `make_hash` to construct a `SafeHash` to pass to this function.
pub fn put(mut self, hash: SafeHash, key: K, value: V)
-> FullBucket<K, V, M> {
unsafe {
*self.raw.hash = hash.inspect();
ptr::write(self.raw.key, key);
ptr::write(self.raw.val, value);
}
self.table.size += 1;
FullBucket { raw: self.raw, idx: self.idx, table: self.table }
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>>> FullBucket<K, V, M> {
#[inline]
pub fn next(self) -> Bucket<K, V, M> {
let mut bucket = self.into_bucket();
bucket.next();
bucket
}
#[inline]
pub fn into_bucket(self) -> Bucket<K, V, M> {
Bucket {
raw: self.raw,
idx: self.idx,
table: self.table
}
}
/// Get the distance between this bucket and the 'ideal' location
/// as determined by the key's hash stored in it.
///
/// In the cited blog posts above, this is called the "distance to
/// initial bucket", or DIB. Also known as "probe count".
pub fn distance(&self) -> usize {
// Calculates the distance one has to travel when going from
// `hash mod capacity` onwards to `idx mod capacity`, wrapping around
// if the destination is not reached before the end of the table.
(self.idx.wrapping_sub(self.hash().inspect() as usize)) & (self.table.capacity() - 1)
}
#[inline]
pub fn hash(&self) -> SafeHash {
unsafe {
SafeHash {
hash: *self.raw.hash
}
}
}
/// Gets references to the key and value at a given index.
pub fn read(&self) -> (&K, &V) {
unsafe {
(&*self.raw.key,
&*self.raw.val)
}
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
/// Removes this bucket's key and value from the hashtable.
///
/// This works similarly to `put`, building an `EmptyBucket` out of the
/// taken bucket.
pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
self.table.size -= 1;
unsafe {
*self.raw.hash = EMPTY_BUCKET;
(
EmptyBucket {
raw: self.raw,
idx: self.idx,
table: self.table
},
ptr::read(self.raw.key),
ptr::read(self.raw.val)
)
}
}
pub fn replace(&mut self, h: SafeHash, k: K, v: V) -> (SafeHash, K, V) {
unsafe {
let old_hash = ptr::replace(self.raw.hash as *mut SafeHash, h);
let old_key = ptr::replace(self.raw.key, k);
let old_val = ptr::replace(self.raw.val, v);
(old_hash, old_key, old_val)
}
}
/// Gets mutable references to the key and value at a given index.
pub fn read_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
(&mut *self.raw.key,
&mut *self.raw.val)
}
}
}
impl<'t, K, V, M: Deref<Target=RawTable<K, V>> + 't> FullBucket<K, V, M> {
/// Exchange a bucket state for immutable references into the table.
/// Because the underlying reference to the table is also consumed,
/// no further changes to the structure of the table are possible;
/// in exchange for this, the returned references have a longer lifetime
/// than the references returned by `read()`.
pub fn into_refs(self) -> (&'t K, &'t V) {
unsafe {
(&*self.raw.key,
&*self.raw.val)
}
}
}
impl<'t, K, V, M: Deref<Target=RawTable<K, V>> + DerefMut + 't> FullBucket<K, V, M> {
/// This works similarly to `into_refs`, exchanging a bucket state
/// for mutable references into the table.
pub fn into_mut_refs(self) -> (&'t mut K, &'t mut V) {
unsafe {
(&mut *self.raw.key,
&mut *self.raw.val)
}
}
}
impl<K, V, M> BucketState<K, V, M> {
// For convenience.
pub fn expect_full(self) -> FullBucket<K, V, M> {
match self {
Full(full) => full,
Empty(..) => panic!("Expected full bucket")
}
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
#[inline]
pub fn full(&self) -> &FullBucket<K, V, M> {
&self.full
}
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
unsafe {
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
ptr::copy_nonoverlapping(self.full.raw.key, self.gap.raw.key, 1);
ptr::copy_nonoverlapping(self.full.raw.val, self.gap.raw.val, 1);
}
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
match self.full.next().peek() {
Full(bucket) => {
self.gap.raw = prev_raw;
self.gap.idx = prev_idx;
self.full = bucket;
Some(self)
}
Empty(..) => None
}
}
}
/// Rounds up to a multiple of a power of two. Returns the closest multiple
/// of `target_alignment` that is higher or equal to `unrounded`.
///
/// # Panics
///
/// Panics if `target_alignment` is not a power of two.
fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
assert!(target_alignment.is_power_of_two());
(unrounded + target_alignment - 1) & !(target_alignment - 1)
}
#[test]
fn test_rounding() {
assert_eq!(round_up_to_next(0, 4), 0);
assert_eq!(round_up_to_next(1, 4), 4);
assert_eq!(round_up_to_next(2, 4), 4);
assert_eq!(round_up_to_next(3, 4), 4);
assert_eq!(round_up_to_next(4, 4), 4);
assert_eq!(round_up_to_next(5, 4), 8);
}
// Returns a tuple of (key_offset, val_offset),
// from the start of a mallocated array.
#[inline]
fn calculate_offsets(hashes_size: usize,
keys_size: usize, keys_align: usize,
vals_align: usize)
-> (usize, usize, bool) {
let keys_offset = round_up_to_next(hashes_size, keys_align);
let (end_of_keys, oflo) = keys_offset.overflowing_add(keys_size);
let vals_offset = round_up_to_next(end_of_keys, vals_align);
(keys_offset, vals_offset, oflo)
}
// Returns a tuple of (minimum required malloc alignment, hash_offset,
// array_size), from the start of a mallocated array.
fn calculate_allocation(hash_size: usize, hash_align: usize,
keys_size: usize, keys_align: usize,
vals_size: usize, vals_align: usize)
-> (usize, usize, usize, bool) {
let hash_offset = 0;
let (_, vals_offset, oflo) = calculate_offsets(hash_size,
keys_size, keys_align,
vals_align);
let (end_of_vals, oflo2) = vals_offset.overflowing_add(vals_size);
let align = cmp::max(hash_align, cmp::max(keys_align, vals_align));
(align, hash_offset, end_of_vals, oflo || oflo2)
}
#[test]
fn test_offset_calculation() {
assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 0, 148, false));
assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 0, 6, false));
assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 0, 48, false));
assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144, false));
assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5, false));
assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24, false));
}
impl<K, V> RawTable<K, V> {
/// Does not initialize the buckets. The caller should ensure they,
/// at the very least, set every hash to EMPTY_BUCKET.
unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
if capacity == 0 {
return RawTable {
size: 0,
capacity: 0,
hashes: Unique::new(EMPTY as *mut u64),
marker: marker::PhantomData,
};
}
// No need for `checked_mul` before a more restrictive check performed
// later in this method.
let hashes_size = capacity * size_of::<u64>();
let keys_size = capacity * size_of::< K >();
let vals_size = capacity * size_of::< V >();
// Allocating hashmaps is a little tricky. We need to allocate three
// arrays, but since we know their sizes and alignments up front,
// we just allocate a single array, and then have the subarrays
// point into it.
//
// This is great in theory, but in practice getting the alignment
// right is a little subtle. Therefore, calculating offsets has been
// factored out into a different function.
let (malloc_alignment, hash_offset, size, oflo) =
calculate_allocation(
hashes_size, align_of::<u64>(),
keys_size, align_of::< K >(),
vals_size, align_of::< V >());
assert!(!oflo, "capacity overflow");
// One check for overflow that covers calculation and rounding of size.
let size_of_bucket = size_of::<u64>().checked_add(size_of::<K>()).unwrap()
.checked_add(size_of::<V>()).unwrap();
assert!(size >= capacity.checked_mul(size_of_bucket)
.expect("capacity overflow"),
"capacity overflow");
let buffer = allocate(size, malloc_alignment);
if buffer.is_null() { ::alloc::oom() }
let hashes = buffer.offset(hash_offset as isize) as *mut u64;
RawTable {
capacity: capacity,
size: 0,
hashes: Unique::new(hashes),
marker: marker::PhantomData,
}
}
fn first_bucket_raw(&self) -> RawBucket<K, V> {
let hashes_size = self.capacity * size_of::<u64>();
let keys_size = self.capacity * size_of::<K>();
let buffer = *self.hashes as *mut u8;
let (keys_offset, vals_offset, oflo) =
calculate_offsets(hashes_size,
keys_size, align_of::<K>(),
align_of::<V>());
debug_assert!(!oflo, "capacity overflow");
unsafe {
RawBucket {
hash: *self.hashes,
key: buffer.offset(keys_offset as isize) as *mut K,
val: buffer.offset(vals_offset as isize) as *mut V,
_marker: marker::PhantomData,
}
}
}
/// Creates a new raw table from a given capacity. All buckets are
/// initially empty.
pub fn new(capacity: usize) -> RawTable<K, V> {
unsafe {
let ret = RawTable::new_uninitialized(capacity);
ptr::write_bytes(*ret.hashes, 0, capacity);
ret
}
}<|fim▁hole|> pub fn capacity(&self) -> usize {
self.capacity
}
/// The number of elements ever `put` in the hashtable, minus the number
/// of elements ever `take`n.
pub fn size(&self) -> usize {
self.size
}
fn raw_buckets(&self) -> RawBuckets<K, V> {
RawBuckets {
raw: self.first_bucket_raw(),
hashes_end: unsafe {
self.hashes.offset(self.capacity as isize)
},
marker: marker::PhantomData,
}
}
pub fn iter(&self) -> Iter<K, V> {
Iter {
iter: self.raw_buckets(),
elems_left: self.size(),
}
}
pub fn iter_mut(&mut self) -> IterMut<K, V> {
IterMut {
iter: self.raw_buckets(),
elems_left: self.size(),
}
}
pub fn into_iter(self) -> IntoIter<K, V> {
let RawBuckets { raw, hashes_end, .. } = self.raw_buckets();
// Replace the marker regardless of lifetime bounds on parameters.
IntoIter {
iter: RawBuckets {
raw: raw,
hashes_end: hashes_end,
marker: marker::PhantomData,
},
table: self,
}
}
pub fn drain(&mut self) -> Drain<K, V> {
let RawBuckets { raw, hashes_end, .. } = self.raw_buckets();
// Replace the marker regardless of lifetime bounds on parameters.
Drain {
iter: RawBuckets {
raw: raw,
hashes_end: hashes_end,
marker: marker::PhantomData,
},
table: self,
}
}
/// Returns an iterator that copies out each entry. Used while the table
/// is being dropped.
unsafe fn rev_move_buckets(&mut self) -> RevMoveBuckets<K, V> {
let raw_bucket = self.first_bucket_raw();
RevMoveBuckets {
raw: raw_bucket.offset(self.capacity as isize),
hashes_end: raw_bucket.hash,
elems_left: self.size,
marker: marker::PhantomData,
}
}
}
/// A raw iterator. The basis for some other iterators in this module. Although
/// this interface is safe, it's not used outside this module.
struct RawBuckets<'a, K, V> {
raw: RawBucket<K, V>,
hashes_end: *mut u64,
// Strictly speaking, this should be &'a (K,V), but that would
// require that K:'a, and we often use RawBuckets<'static...> for
// move iterations, so that messes up a lot of other things. So
// just use `&'a (K,V)` as this is not a publicly exposed type
// anyway.
marker: marker::PhantomData<&'a ()>,
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
impl<'a, K, V> Clone for RawBuckets<'a, K, V> {
fn clone(&self) -> RawBuckets<'a, K, V> {
RawBuckets {
raw: self.raw,
hashes_end: self.hashes_end,
marker: marker::PhantomData,
}
}
}
impl<'a, K, V> Iterator for RawBuckets<'a, K, V> {
type Item = RawBucket<K, V>;
fn next(&mut self) -> Option<RawBucket<K, V>> {
while self.raw.hash != self.hashes_end {
unsafe {
// We are swapping out the pointer to a bucket and replacing
// it with the pointer to the next one.
let prev = ptr::replace(&mut self.raw, self.raw.offset(1));
if *prev.hash != EMPTY_BUCKET {
return Some(prev);
}
}
}
None
}
}
/// An iterator that moves out buckets in reverse order. It leaves the table
/// in an inconsistent state and should only be used for dropping
/// the table's remaining entries. It's used in the implementation of Drop.
struct RevMoveBuckets<'a, K, V> {
raw: RawBucket<K, V>,
hashes_end: *mut u64,
elems_left: usize,
// As above, `&'a (K,V)` would seem better, but we often use
// 'static for the lifetime, and this is not a publicly exposed
// type.
marker: marker::PhantomData<&'a ()>,
}
impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
if self.elems_left == 0 {
return None;
}
loop {
debug_assert!(self.raw.hash != self.hashes_end);
unsafe {
self.raw = self.raw.offset(-1);
if *self.raw.hash != EMPTY_BUCKET {
self.elems_left -= 1;
return Some((
ptr::read(self.raw.key),
ptr::read(self.raw.val)
));
}
}
}
}
}
/// Iterator over shared references to entries in a table.
pub struct Iter<'a, K: 'a, V: 'a> {
iter: RawBuckets<'a, K, V>,
elems_left: usize,
}
unsafe impl<'a, K: Sync, V: Sync> Sync for Iter<'a, K, V> {}
unsafe impl<'a, K: Sync, V: Sync> Send for Iter<'a, K, V> {}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
impl<'a, K, V> Clone for Iter<'a, K, V> {
fn clone(&self) -> Iter<'a, K, V> {
Iter {
iter: self.iter.clone(),
elems_left: self.elems_left
}
}
}
/// Iterator over mutable references to entries in a table.
pub struct IterMut<'a, K: 'a, V: 'a> {
iter: RawBuckets<'a, K, V>,
elems_left: usize,
}
unsafe impl<'a, K: Sync, V: Sync> Sync for IterMut<'a, K, V> {}
// Both K: Sync and K: Send are correct for IterMut's Send impl,
// but Send is the more useful bound
unsafe impl<'a, K: Send, V: Send> Send for IterMut<'a, K, V> {}
/// Iterator over the entries in a table, consuming the table.
pub struct IntoIter<K, V> {
table: RawTable<K, V>,
iter: RawBuckets<'static, K, V>
}
unsafe impl<K: Sync, V: Sync> Sync for IntoIter<K, V> {}
unsafe impl<K: Send, V: Send> Send for IntoIter<K, V> {}
/// Iterator over the entries in a table, clearing the table.
pub struct Drain<'a, K: 'a, V: 'a> {
table: &'a mut RawTable<K, V>,
iter: RawBuckets<'static, K, V>,
}
unsafe impl<'a, K: Sync, V: Sync> Sync for Drain<'a, K, V> {}
unsafe impl<'a, K: Send, V: Send> Send for Drain<'a, K, V> {}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<(&'a K, &'a V)> {
self.iter.next().map(|bucket| {
self.elems_left -= 1;
unsafe {
(&*bucket.key,
&*bucket.val)
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.elems_left, Some(self.elems_left))
}
}
impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {
fn len(&self) -> usize { self.elems_left }
}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
self.iter.next().map(|bucket| {
self.elems_left -= 1;
unsafe {
(&*bucket.key,
&mut *bucket.val)
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
(self.elems_left, Some(self.elems_left))
}
}
impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {
fn len(&self) -> usize { self.elems_left }
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (SafeHash, K, V);
fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|bucket| {
self.table.size -= 1;
unsafe {
(
SafeHash {
hash: *bucket.hash,
},
ptr::read(bucket.key),
ptr::read(bucket.val)
)
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.table.size();
(size, Some(size))
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
fn len(&self) -> usize { self.table.size() }
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (SafeHash, K, V);
#[inline]
fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|bucket| {
self.table.size -= 1;
unsafe {
(
SafeHash {
hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
},
ptr::read(bucket.key),
ptr::read(bucket.val)
)
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let size = self.table.size();
(size, Some(size))
}
}
impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> {
fn len(&self) -> usize { self.table.size() }
}
impl<'a, K: 'a, V: 'a> Drop for Drain<'a, K, V> {
fn drop(&mut self) {
for _ in self.by_ref() {}
}
}
impl<K: Clone, V: Clone> Clone for RawTable<K, V> {
fn clone(&self) -> RawTable<K, V> {
unsafe {
let mut new_ht = RawTable::new_uninitialized(self.capacity());
{
let cap = self.capacity();
let mut new_buckets = Bucket::first(&mut new_ht);
let mut buckets = Bucket::first(self);
while buckets.index() != cap {
match buckets.peek() {
Full(full) => {
let (h, k, v) = {
let (k, v) = full.read();
(full.hash(), k.clone(), v.clone())
};
*new_buckets.raw.hash = h.inspect();
ptr::write(new_buckets.raw.key, k);
ptr::write(new_buckets.raw.val, v);
}
Empty(..) => {
*new_buckets.raw.hash = EMPTY_BUCKET;
}
}
new_buckets.next();
buckets.next();
}
};
new_ht.size = self.size();
new_ht
}
}
}
impl<K, V> Drop for RawTable<K, V> {
fn drop(&mut self) {
if self.capacity == 0 || self.capacity == mem::POST_DROP_USIZE {
return;
}
// This is done in reverse because we've likely partially taken
// some elements out with `.into_iter()` from the front.
// Check if the size is 0, so we don't do a useless scan when
// dropping empty tables such as on resize.
// Also avoid double drop of elements that have been already moved out.
unsafe {
for _ in self.rev_move_buckets() {}
}
let hashes_size = self.capacity * size_of::<u64>();
let keys_size = self.capacity * size_of::<K>();
let vals_size = self.capacity * size_of::<V>();
let (align, _, size, oflo) =
calculate_allocation(hashes_size, align_of::<u64>(),
keys_size, align_of::<K>(),
vals_size, align_of::<V>());
debug_assert!(!oflo, "should be impossible");
unsafe {
deallocate(*self.hashes as *mut u8, size, align);
// Remember how everything was allocated out of one buffer
// during initialization? We only need one call to free here.
}
}
}<|fim▁end|>
|
/// The hashtable's capacity, similar to a vector's.
|
<|file_name|>listController.js<|end_file_name|><|fim▁begin|>TaskManager.module('ContentModule.List', function (List, App, Backbone) {
'use strict';
List.Controller = Marionette.Controller.extend({
initialize: function (options) {
var tasksList = App.request('taskList'),
listView = this.getView(tasksList);
if (options.region) {<|fim▁hole|> this.listenTo(listView, this.close);
this.region.show(listView);
}
},
getView: function (tasksList) {
return new List.View({collection: tasksList});
}
});
});<|fim▁end|>
|
this.region = options.region;
|
<|file_name|>jsb_boot.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 Chukong Technologies Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
//
// cocos2d boot
//
var cc = cc || {};
var window = window || this;
/**<|fim▁hole|> * @param {function} iterator
* @param {object} [context]
*/
cc.each = function (obj, iterator, context) {
if (!obj)
return;
if (obj instanceof Array) {
for (var i = 0, li = obj.length; i < li; i++) {
if (iterator.call(context, obj[i], i) === false)
return;
}
} else {
for (var key in obj) {
if (iterator.call(context, obj[key], key) === false)
return;
}
}
};
/**
* Copy all of the properties in source objects to target object and return the target object.
* @param {object} target
* @param {object} *sources
* @returns {object}
*/
cc.extend = function(target) {
var sources = arguments.length >= 2 ? Array.prototype.slice.call(arguments, 1) : [];
cc.each(sources, function(src) {
for(var key in src) {
if (src.hasOwnProperty(key)) {
target[key] = src[key];
}
}
});
return target;
};
/**
* Check the obj whether is function or not
* @param {*} obj
* @returns {boolean}
*/
cc.isFunction = function(obj) {
return typeof obj == 'function';
};
/**
* Check the obj whether is number or not
* @param {*} obj
* @returns {boolean}
*/
cc.isNumber = function(obj) {
return typeof obj == 'number' || Object.prototype.toString.call(obj) == '[object Number]';
};
/**
* Check the obj whether is string or not
* @param {*} obj
* @returns {boolean}
*/
cc.isString = function(obj) {
return typeof obj == 'string' || Object.prototype.toString.call(obj) == '[object String]';
};
/**
* Check the obj whether is array or not
* @param {*} obj
* @returns {boolean}
*/
cc.isArray = function(obj) {
return Object.prototype.toString.call(obj) == '[object Array]';
};
/**
* Check the obj whether is undefined or not
* @param {*} obj
* @returns {boolean}
*/
cc.isUndefined = function(obj) {
return typeof obj == 'undefined';
};
/**
* Check the obj whether is object or not
* @param {*} obj
* @returns {boolean}
*/
cc.isObject = function(obj) {
var type = typeof obj;
return type == 'function' || (obj && type == 'object');
};
/**
* Check the url whether cross origin
* @param {String} url
* @returns {boolean}
*/
cc.isCrossOrigin = function (url) {
return false;
};
/**
* Common getter setter configuration function
* @function
* @param {Object} proto A class prototype or an object to config
* @param {String} prop Property name
* @param {function} getter Getter function for the property
* @param {function} setter Setter function for the property
*/
cc.defineGetterSetter = function (proto, prop, getter, setter){
var desc = { enumerable: false, configurable: true };
getter && (desc.get = getter);
setter && (desc.set = setter);
Object.defineProperty(proto, prop, desc);
};
//+++++++++++++++++++++++++something about async begin+++++++++++++++++++++++++++++++
/**
* Async Pool class, a helper of cc.async
* @param {Object|Array} srcObj
* @param {Number} limit the limit of parallel number
* @param {function} iterator
* @param {function} onEnd
* @param {object} target
* @constructor
*/
cc.AsyncPool = function(srcObj, limit, iterator, onEnd, target){
var self = this;
self._srcObj = srcObj;
self._limit = limit;
self._pool = [];
self._iterator = iterator;
self._iteratorTarget = target;
self._onEnd = onEnd;
self._onEndTarget = target;
self._results = srcObj instanceof Array ? [] : {};
self._isErr = false;
cc.each(srcObj, function(value, index){
self._pool.push({index : index, value : value});
});
self.size = self._pool.length;
self.finishedSize = 0;
self._workingSize = 0;
self._limit = self._limit || self.size;
self.onIterator = function(iterator, target){
self._iterator = iterator;
self._iteratorTarget = target;
};
self.onEnd = function(endCb, endCbTarget){
self._onEnd = endCb;
self._onEndTarget = endCbTarget;
};
self._handleItem = function(){
var self = this;
if(self._pool.length == 0)
return; //return directly if the array's length = 0
if(self._workingSize >= self._limit)
return; //return directly if the working size great equal limit number
var item = self._pool.shift();
var value = item.value, index = item.index;
self._workingSize++;
self._iterator.call(self._iteratorTarget, value, index, function(err){
if(self._isErr)
return;
self.finishedSize++;
self._workingSize--;
if(err) {
self._isErr = true;
if(self._onEnd)
self._onEnd.call(self._onEndTarget, err);
return
}
var arr = Array.prototype.slice.call(arguments, 1);
self._results[this.index] = arr[0];
if(self.finishedSize == self.size) {
if(self._onEnd)
self._onEnd.call(self._onEndTarget, null, self._results);
return;
}
self._handleItem();
}.bind(item), self);
};
self.flow = function(){
var self = this;
if(self._pool.length == 0) {
if(self._onEnd)
self._onEnd.call(self._onEndTarget, null, []);
return;
}
for(var i = 0; i < self._limit; i++)
self._handleItem();
}
};
cc.async = {
/**
* Do tasks series.
* @param {Array|Object} tasks
* @param {function} [cb] callback
* @param {Object} [target]
* @return {cc.AsyncPool}
*/
series : function(tasks, cb, target){
var asyncPool = new cc.AsyncPool(tasks, 1, function(func, index, cb1){
func.call(target, cb1);
}, cb, target);
asyncPool.flow();
return asyncPool;
},
/**
* Do tasks parallel.
* @param {Array|Object} tasks
* @param {function} cb callback
* @param {Object} [target]
* @return {cc.AsyncPool}
*/
parallel : function(tasks, cb, target){
var asyncPool = new cc.AsyncPool(tasks, 0, function(func, index, cb1){
func.call(target, cb1);
}, cb, target);
asyncPool.flow();
return asyncPool;
},
/**
* Do tasks waterfall.
* @param {Array|Object} tasks
* @param {function} cb callback
* @param {Object} [target]
* @return {cc.AsyncPool}
*/
waterfall : function(tasks, cb, target){
var args = [];
var asyncPool = new cc.AsyncPool(tasks, 1,
function (func, index, cb1) {
args.push(function (err) {
args = Array.prototype.slice.call(arguments, 1);
cb1.apply(null, arguments);
});
func.apply(target, args);
}, function (err, results) {
if (!cb)
return;
if (err)
return cb.call(target, err);
cb.call(target, null, results[results.length - 1]);
});
asyncPool.flow();
return asyncPool;
},
/**
* Do tasks by iterator.
* @param {Array|Object} tasks
* @param {function|Object} iterator
* @param {function} cb callback
* @param {Object} [target]
* @return {cc.AsyncPool}
*/
map : function(tasks, iterator, cb, target){
var locIterator = iterator;
if(typeof(iterator) == "object"){
cb = iterator.cb;
target = iterator.iteratorTarget;
locIterator = iterator.iterator;
}
var asyncPool = new cc.AsyncPool(tasks, 0, locIterator, cb, target);
asyncPool.flow();
return asyncPool;
},
/**
* Do tasks by iterator limit.
* @param {Array|Object} tasks
* @param {Number} limit
* @param {function} iterator
* @param {function} cb callback
* @param {Object} [target]
*/
mapLimit : function(tasks, limit, iterator, cb, target){
var asyncPool = new cc.AsyncPool(tasks, limit, iterator, cb, target);
asyncPool.flow();
return asyncPool;
}
};
//+++++++++++++++++++++++++something about async end+++++++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about path begin++++++++++++++++++++++++++++++++
cc.path = {
/**
* Join strings to be a path.
* @example
cc.path.join("a", "b.png");//-->"a/b.png"
cc.path.join("a", "b", "c.png");//-->"a/b/c.png"
cc.path.join("a", "b");//-->"a/b"
cc.path.join("a", "b", "/");//-->"a/b/"
cc.path.join("a", "b/", "/");//-->"a/b/"
* @returns {string}
*/
join : function(){
var l = arguments.length;
var result = "";
for(var i = 0; i < l; i++) {
result = (result + (result == "" ? "" : "/") + arguments[i]).replace(/(\/|\\\\)$/, "");
}
return result;
},
/**
* Get the ext name of a path.
* @example
cc.path.extname("a/b.png");//-->".png"
cc.path.extname("a/b.png?a=1&b=2");//-->".png"
cc.path.extname("a/b");//-->null
cc.path.extname("a/b?a=1&b=2");//-->null
* @param pathStr
* @returns {*}
*/
extname : function(pathStr){
var index = pathStr.indexOf("?");
if(index > 0) pathStr = pathStr.substring(0, index);
index = pathStr.lastIndexOf(".");
if(index < 0) return null;
return pathStr.substring(index, pathStr.length);
},
/**
* Get the file name of a file path.
* @example
cc.path.basename("a/b.png");//-->"b.png"
cc.path.basename("a/b.png?a=1&b=2");//-->"b.png"
cc.path.basename("a/b.png", ".png");//-->"b"
cc.path.basename("a/b.png?a=1&b=2", ".png");//-->"b"
cc.path.basename("a/b.png", ".txt");//-->"b.png"
* @param pathStr
* @param extname
* @returns {*}
*/
basename : function(pathStr, extname){
var index = pathStr.indexOf("?");
if(index > 0) pathStr = pathStr.substring(0, index);
var reg = /(\/|\\\\)([^(\/|\\\\)]+)$/g;
var result = reg.exec(pathStr.replace(/(\/|\\\\)$/, ""));
if(!result) return null;
var baseName = result[2];
if(extname && pathStr.substring(pathStr.length - extname.length).toLowerCase() == extname.toLowerCase())
return baseName.substring(0, baseName.length - extname.length);
return baseName;
},
/**
* Get ext name of a file path.
* @example
cc.path.driname("a/b/c.png");//-->"a/b"
cc.path.driname("a/b/c.png?a=1&b=2");//-->"a/b"
* @param {String} pathStr
* @returns {*}
*/
dirname : function(pathStr){
return pathStr.replace(/(\/|\\\\)$/, "").replace(/(\/|\\\\)[^(\/|\\\\)]+$/, "");
},
/**
* Change extname of a file path.
* @example
cc.path.changeExtname("a/b.png", ".plist");//-->"a/b.plist"
cc.path.changeExtname("a/b.png?a=1&b=2", ".plist");//-->"a/b.plist?a=1&b=2"
* @param pathStr
* @param extname
* @returns {string}
*/
changeExtname : function(pathStr, extname){
extname = extname || "";
var index = pathStr.indexOf("?");
var tempStr = "";
if(index > 0) {
tempStr = pathStr.substring(index);
pathStr = pathStr.substring(0, index);
};
index = pathStr.lastIndexOf(".");
if(index < 0) return pathStr + extname + tempStr;
return pathStr.substring(0, index) + extname + tempStr;
},
/**
* Change file name of a file path.
* @example
cc.path.changeBasename("a/b/c.plist", "b.plist");//-->"a/b/b.plist"
cc.path.changeBasename("a/b/c.plist?a=1&b=2", "b.plist");//-->"a/b/b.plist?a=1&b=2"
cc.path.changeBasename("a/b/c.plist", ".png");//-->"a/b/c.png"
cc.path.changeBasename("a/b/c.plist", "b");//-->"a/b/b"
cc.path.changeBasename("a/b/c.plist", "b", true);//-->"a/b/b.plist"
* @param {String} pathStr
* @param {String} basename
* @param [{Boolean}] isSameExt
* @returns {string}
*/
changeBasename : function(pathStr, basename, isSameExt){
if(basename.indexOf(".") == 0) return this.changeExtname(pathStr, basename);
var index = pathStr.indexOf("?");
var tempStr = "";
var ext = isSameExt ? this.extname(pathStr) : "";
if(index > 0) {
tempStr = pathStr.substring(index);
pathStr = pathStr.substring(0, index);
};
index = pathStr.lastIndexOf("/");
index = index <= 0 ? 0 : index+1;
return pathStr.substring(0, index) + basename + ext + tempStr;
}
};
//+++++++++++++++++++++++++something about path end++++++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about loader start+++++++++++++++++++++++++++
cc.loader = {
_resPath : "",
_audioPath : "",
_register : {},//register of loaders
cache : {},//cache for data loaded
_langPathCache : {},//cache for lang path
/**
* Get XMLHttpRequest.
* @returns {XMLHttpRequest}
*/
getXMLHttpRequest : function () {
return new XMLHttpRequest();
},
//@MODE_BEGIN DEV
_jsCache : {},//cache for js
_getArgs4Js : function(args){
var a0 = args[0], a1 = args[1], a2 = args[2], results = ["", null, null];
if(args.length == 1){
results[1] = a0 instanceof Array ? a0 : [a0];
}else if(args.length == 2){
if(typeof a1 == "function"){
results[1] = a0 instanceof Array ? a0 : [a0];
results[2] = a1;
}else{
results[0] = a0 || "";
results[1] = a1 instanceof Array ? a1 : [a1];
}
}else if(args.length == 3){
results[0] = a0 || "";
results[1] = a1 instanceof Array ? a1 : [a1];
results[2] = a2;
}else throw "arguments error to load js!";
return results;
},
/**
* Load js files.
* @param {?string=} baseDir The pre path for jsList.
* @param {array.<string>} jsList List of js path.
* @param {function} cb Callback function
*
* If the arguments.length == 2, then the baseDir turns to be "".
* @returns {*}
*/
loadJs : function(baseDir, jsList, cb){
var self = this, localJsCache = self._jsCache,
args = self._getArgs4Js(arguments);
baseDir = args[0];
jsList = args[1];
cb = args[2];
var ccPath = cc.path;
for(var i = 0, li = jsList.length; i < li; ++i){
require(ccPath.join(baseDir, jsList[i]));
}
if(cb) cb();
},
/**
* Load js width loading image.
* @param {?string} baseDir
* @param {array} jsList
* @param {function} cb
*/
loadJsWithImg : function(baseDir, jsList, cb){
this.loadJs.apply(this, arguments);
},
//@MODE_END DEV
/**
* Load a single resource as txt.
* @param {!string} url
* @param {function} cb arguments are : err, txt
*/
loadTxt : function(url, cb){
cb(null, jsb.fileUtils.getStringFromFile(url));
},
loadJson : function(url, cb){
this.loadTxt(url, function(err, txt){
try{
err ? cb(err) : cb(null, JSON.parse(txt));
}catch(e){
throw e;
cb("load json [" + url + "] failed : " + e);
}
});
},
/**
* Load a single image.
* @param {!string} url
* @param [{object}] option
* @param {function} cb
* @returns {Image}
*/
loadImg : function(url, option, cb){
var l = arguments.length;
if(l == 2) cb = option;
var cachedTex = cc.textureCache.getTextureForKey(url);
if (cachedTex) {
cb && cb(null, cachedTex);
}
else if (url.match(jsb.urlRegExp)) {
jsb.loadRemoteImg(url, function(succeed, tex) {
if (succeed) {
cb && cb(null, tex);
}
else {
cb && cb("Load image failed");
}
});
}
else {
var tex = cc.textureCache._addImage(url);
if (tex instanceof cc.Texture2D)
cb && cb(null, tex);
else cb && cb("Load image failed");
}
},
/**
* Load binary data by url.
* @param {String} url
* @param {Function} cb
*/
loadBinary : function(url, cb){
cb(null, jsb.fileUtils.getDataFromFile(url));
},
loadBinarySync : function(url){
return jsb.fileUtils.getDataFromFile(url);
},
/**
* Iterator function to load res
* @param {object} item
* @param {number} index
* @param {function} cb
* @returns {*}
* @private
*/
_loadResIterator : function(item, index, cb){
var self = this, url = null;
var type = item.type;
if (type) {
type = "." + type.toLowerCase();
url = item.src ? item.src : item.name + type;
} else {
url = item;
type = cc.path.extname(url);
}
var obj = self.cache[url];
if (obj)
return cb(null, obj);
var loader = null;
if (type) {
loader = self._register[type.toLowerCase()];
}
if (!loader) {
cc.error("loader for [" + type + "] not exists!");
return cb();
}
var basePath = loader.getBasePath ? loader.getBasePath() : self.resPath;
var realUrl = self.getUrl(basePath, url);
var data = loader.load(realUrl, url);
if (data) {
self.cache[url] = data;
cb(null, data);
} else {
self.cache[url] = null;
delete self.cache[url];
cb();
}
},
/**
* Get url with basePath.
* @param [{string}] basePath
* @param {string} url
* @returns {*}
*/
getUrl : function(basePath, url){
var self = this, langPathCache = self._langPathCache, path = cc.path;
if(arguments.length == 1){
url = basePath;
var type = path.extname(url);
type = type ? type.toLowerCase() : "";
var loader = self._register[type];
if(!loader)
basePath = self.resPath;
else
basePath = loader.getBasePath ? loader.getBasePath() : self.resPath;
}
url = cc.path.join(basePath || "", url);
if(url.match(/[\/(\\\\)]lang[\/(\\\\)]/i)){
if(langPathCache[url])
return langPathCache[url];
var extname = path.extname(url) || "";
url = langPathCache[url] = url.substring(0, url.length - extname.length) + "_" + cc.sys.language + extname;
}
return url;
},
/**
* Load resources then call the callback.
* @param {[string]} res
* @param [{function}|{}] option
* @param {function} cb :
*/
load : function(resources, option, cb){
var self = this;
var len = arguments.length;
if(len == 0)
throw "arguments error!";
if(len == 3){
if(typeof option == "function"){
if(typeof cb == "function")
option = {trigger : option, cb : cb };
else
option = { cb : option, cbTarget : cb};
}
}else if(len == 2){
if(typeof option == "function")
option = {cb : option};
}else if(len == 1){
option = {};
}
if(!(resources instanceof Array))
resources = [resources];
var asyncPool = new cc.AsyncPool(resources, 0, function(value, index, cb1, aPool){
self._loadResIterator(value, index, function(err){
if(err)
return cb1(err);
var arr = Array.prototype.slice.call(arguments, 1);
if(option.trigger)
option.trigger.call(option.triggerTarget, arr[0], aPool.size, aPool.finishedSize); //call trigger
cb1(null, arr[0]);
});
}, option.cb, option.cbTarget);
asyncPool.flow();
return asyncPool;
},
/**
* <p>
* Loads alias map from the contents of a filename. <br/>
* <br/>
* @note The plist file name should follow the format below: <br/>
* <?xml version="1.0" encoding="UTF-8"?> <br/>
* <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <br/>
* <plist version="1.0"> <br/>
* <dict> <br/>
* <key>filenames</key> <br/>
* <dict> <br/>
* <key>sounds/click.wav</key> <br/>
* <string>sounds/click.caf</string> <br/>
* <key>sounds/endgame.wav</key> <br/>
* <string>sounds/endgame.caf</string> <br/>
* <key>sounds/gem-0.wav</key> <br/>
* <string>sounds/gem-0.caf</string> <br/>
* </dict> <br/>
* <key>metadata</key> <br/>
* <dict> <br/>
* <key>version</key> <br/>
* <integer>1</integer> <br/>
* </dict> <br/>
* </dict> <br/>
* </plist> <br/>
* </p>
* @param {String} filename The plist file name.
* @param {Function} cb callback
*/
loadAliases : function(url, cb){
jsb.fileUtils.loadFilenameLookup(url);
if(cb) cb();
},
/**
* Register a resource loader into loader.
* @param {string} extname
* @param {load : function} loader
*/
register : function(extNames, loader){
if(!extNames || !loader) return;
var self = this;
if(typeof extNames == "string")
return this._register[extNames.trim().toLowerCase()] = loader;
for(var i = 0, li = extNames.length; i < li; i++) {
self._register["." + extNames[i].trim().toLowerCase()] = loader;
}
},
/**
* Get resource data by url.
* @param url
* @returns {*}
*/
getRes : function(url){
var cached = this.cache[url];
if (cached)
return cached;
var type = cc.path.extname(url);
var loader = this._register[type.toLowerCase()];
if(!loader) return cc.log("loader for [" + type + "] not exists!");
var basePath = loader.getBasePath ? loader.getBasePath() : this.resPath;
var realUrl = this.getUrl(basePath, url);
return loader.load(realUrl, url);
},
/**
* Release the cache of resource by url.
* @param url
*/
release : function(url){//do nothing in jsb
},
/**
* Resource cache of all resources.
*/
releaseAll : function(){//do nothing in jsb
}
};
cc.defineGetterSetter(cc.loader, "resPath", function(){
return this._resPath;
}, function(resPath){
this._resPath = resPath || "";
jsb.fileUtils.addSearchPath(this._resPath);
});
cc.defineGetterSetter(cc.loader, "audioPath", function(){
return this._audioPath;
}, function(audioPath){
this._audioPath = audioPath || "";
jsb.fileUtils.addSearchPath(this._audioPath);
});
//+++++++++++++++++++++++++something about loader end+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about format string begin+++++++++++++++++++++++++++++
/**
* A string tool to construct a string with format string.
* for example: cc.formatStr("a: %d, b: %b", a, b);
* @param {String} formatStr format String
* @returns {String}
*/
cc.formatStr = function(){
var args = arguments;
var l = args.length;
if(l < 1)
return "";
var str = args[0];
var needToFormat = true;
if(typeof str == "object"){
needToFormat = false;
}
for(var i = 1; i < l; ++i){
var arg = args[i];
if(needToFormat){
while(true){
var result = null;
if(typeof arg == "number"){
result = str.match(/(%d)|(%s)/);
if(result){
str = str.replace(/(%d)|(%s)/, arg);
break;
}
}
result = str.match(/%s/);
if(result)
str = str.replace(/%s/, arg);
else
str += " " + arg;
break;
}
}else
str += " " + arg;
}
return str;
};
//+++++++++++++++++++++++Define singleton format string end+++++++++++++++++++++++++++
//+++++++++++++++++++++++Define singleton objects begin+++++++++++++++++++++++++++
// Define singleton objects
/**
* @type {cc.Director}
* @name cc.director
*/
cc.director = cc.Director.getInstance();
/**
* @type {cc.Size}
* @name cc.winSize
* cc.winSize is the alias object for the size of the current game window.
*/
cc.winSize = cc.director.getWinSize();
/**
* @type {cc.EGLView}
* @name cc.view
* cc.view is the shared view object.
*/
cc.view = cc.director.getOpenGLView();
cc.view.getDevicePixelRatio = function () {
var sys = cc.sys;
return (sys.os == sys.OS_IOS || sys.os == sys.OS_OSX) ? 2 : 1;
};
cc.view.convertToLocationInView = function (tx, ty, relatedPos) {
var _devicePixelRatio = cc.view.getDevicePixelRatio();
return {x: _devicePixelRatio * (tx - relatedPos.left), y: _devicePixelRatio * (relatedPos.top + relatedPos.height - ty)};
};
cc.view.enableRetina = function(enabled) {};
cc.view.isRetinaEnabled = function() {
var sys = cc.sys;
return (sys.os == sys.OS_IOS || sys.os == sys.OS_OSX) ? true : false;
};
cc.view.adjustViewPort = function() {};
cc.view.resizeWithBrowserSize = function () {return;};
cc.view.setResizeCallback = function() {return;};
cc.view.enableAutoFullScreen = function () {return;};
cc.view.isAutoFullScreenEnabled = function() {return true;};
cc.view._setDesignResolutionSize = cc.view.setDesignResolutionSize;
cc.view.setDesignResolutionSize = function(width,height,resolutionPolicy){
cc.view._setDesignResolutionSize(width,height,resolutionPolicy);
cc.winSize = cc.director.getWinSize();
cc.visibleRect.init();
};
cc.view.setResolutionPolicy = function(resolutionPolicy){
var size = cc.view.getDesignResolutionSize();
cc.view.setDesignResolutionSize(size.width,size.height,resolutionPolicy);
};
cc.view.setContentTranslateLeftTop = function(){return;};
cc.view.getContentTranslateLeftTop = function(){return null;};
cc.view.setFrameZoomFactor = function(){return;};
cc.DENSITYDPI_DEVICE = "device-dpi";
cc.DENSITYDPI_HIGH = "high-dpi";
cc.DENSITYDPI_MEDIUM = "medium-dpi";
cc.DENSITYDPI_LOW = "low-dpi";
cc.view.setTargetDensityDPI = function() {};
cc.view.getTargetDensityDPI = function() {return cc.DENSITYDPI_DEVICE;};
/**
* @type {Object}
* @name cc.eventManager
*/
cc.eventManager = cc.director.getEventDispatcher();
/**
* @type {cc.AudioEngine}
* @name cc.audioEngine
* A simple Audio Engine engine API.
*/
cc.audioEngine = cc.AudioEngine.getInstance();
cc.audioEngine.end = function(){
this.stopMusic();
this.stopAllEffects();
};
/**
* @type {Object}
* @name cc.configuration
* cc.configuration contains some openGL variables
*/
cc.configuration = cc.Configuration.getInstance();
/**
* @type {Object}
* @name cc.textureCache
* cc.textureCache is the global cache for cc.Texture2D
*/
cc.textureCache = cc.director.getTextureCache();
cc.TextureCache.prototype._addImage = cc.TextureCache.prototype.addImage;
cc.TextureCache.prototype.addImage = function(url, cb, target) {
var localTex = null;
cc.loader.loadImg(url, function(err, tex) {
if (err) tex = null;
if (cb) {
cb.call(target, tex);
}
localTex = tex;
});
return localTex;
};
/**
* @type {Object}
* @name cc.shaderCache
* cc.shaderCache is a singleton object that stores manages GL shaders
*/
cc.shaderCache = cc.ShaderCache.getInstance();
/**
* @type {Object}
* @name cc.animationCache
*/
cc.animationCache = cc.AnimationCache.getInstance();
/**
* @type {Object}
* @name cc.spriteFrameCache
*/
cc.spriteFrameCache = cc.SpriteFrameCache.getInstance();
/**
* @type {cc.PlistParser}
* @name cc.plistParser
* A Plist Parser
*/
cc.plistParser = cc.PlistParser.getInstance();
//cc.tiffReader;
//cc.imeDispatcher;
// File utils (Temporary, won't be accessible)
cc.fileUtils = cc.FileUtils.getInstance();
cc.fileUtils.setPopupNotify(false);
/**
* @type {Object}
* @name cc.screen
* The fullscreen API provides an easy way for web content to be presented using the user's entire screen.
* It's invalid on safari,QQbrowser and android browser
*/
cc.screen = {
init: function() {},
fullScreen: function() {
return true;
},
requestFullScreen: function(element, onFullScreenChange) {
onFullScreenChange.call();
},
exitFullScreen: function() {
return false;
},
autoFullScreen: function(element, onFullScreenChange) {
onFullScreenChange.call();
}
};
//+++++++++++++++++++++++Define singleton objects end+++++++++++++++++++++++++++
//+++++++++++++++++++++++++Redefine JSB only APIs+++++++++++++++++++++++++++
/**
* @namespace jsb
* @name jsb
*/
var jsb = jsb || {};
/**
* @type {Object}
* @name jsb.fileUtils
* jsb.fileUtils is the native file utils singleton object,
* please refer to Cocos2d-x API to know how to use it.
* Only available in JSB
*/
jsb.fileUtils = cc.fileUtils;
delete cc.FileUtils;
delete cc.fileUtils;
/**
* @type {Object}
* @name jsb.reflection
* jsb.reflection is a bridge to let you invoke Java static functions.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/reflection/en
* Only available on Android platform
*/
jsb.reflection = {
callStaticMethod : function(){
cc.log("not supported on current platform");
}
};
//+++++++++++++++++++++++++Redefine JSB only APIs+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about window events begin+++++++++++++++++++++++++++
cc.winEvents = {//TODO register hidden and show callback for window
hiddens : [],
shows : []
};
//+++++++++++++++++++++++++something about window events end+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about sys begin+++++++++++++++++++++++++++++
cc._initSys = function(config, CONFIG_KEY){
var locSys = cc.sys = sys || {};
/**
* English language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_ENGLISH = "en";
/**
* Chinese language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_CHINESE = "zh";
/**
* French language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_FRENCH = "fr";
/**
* Italian language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_ITALIAN = "it";
/**
* German language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_GERMAN = "de";
/**
* Spanish language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_SPANISH = "es";
/**
* Netherlands language code
* @type {string}
*/
locSys.LANGUAGE_DUTCH = "nl";
/**
* Dutch language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_DUTCH = "du";
/**
* Russian language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_RUSSIAN = "ru";
/**
* Korean language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_KOREAN = "ko";
/**
* Japanese language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_JAPANESE = "ja";
/**
* Hungarian language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_HUNGARIAN = "hu";
/**
* Portuguese language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_PORTUGUESE = "pt";
/**
* Arabic language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_ARABIC = "ar";
/**
* Norwegian language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_NORWEGIAN = "no";
/**
* Polish language code
* @constant
* @default
* @type {Number}
*/
locSys.LANGUAGE_POLISH = "pl";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_WINDOWS = "Windows";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_IOS = "iOS";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_OSX = "OS X";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_UNIX = "UNIX";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_LINUX = "Linux";
/**
* @constant
* @default
* @type {string}
*/
locSys.OS_ANDROID = "Android";
locSys.OS_UNKNOWN = "unknown";
/**
* @constant
* @default
* @type {Number}
*/
locSys.WINDOWS = 0;
/**
* @constant
* @default
* @type {Number}
*/
locSys.LINUX = 1;
/**
* @constant
* @default
* @type {Number}
*/
locSys.MACOS = 2;
/**
* @constant
* @default
* @type {Number}
*/
locSys.ANDROID = 3;
/**
* @constant
* @default
* @type {Number}
*/
locSys.IPHONE = 4;
/**
* @constant
* @default
* @type {Number}
*/
locSys.IPAD = 5;
/**
* @constant
* @default
* @type {Number}
*/
locSys.BLACKBERRY = 6;
/**
* @constant
* @default
* @type {Number}
*/
locSys.NACL = 7;
/**
* @constant
* @default
* @type {Number}
*/
locSys.EMSCRIPTEN = 8;
/**
* @constant
* @default
* @type {Number}
*/
locSys.TIZEN = 9;
/**
* @constant
* @default
* @type {Number}
*/
locSys.WINRT = 10;
/**
* @constant
* @default
* @type {Number}
*/
locSys.WP8 = 11;
/**
* @constant
* @default
* @type {Number}
*/
locSys.MOBILE_BROWSER = 100;
/**
* @constant
* @default
* @type {Number}
*/
locSys.DESKTOP_BROWSER = 101;
locSys.BROWSER_TYPE_WECHAT = "wechat";
locSys.BROWSER_TYPE_ANDROID = "androidbrowser";
locSys.BROWSER_TYPE_IE = "ie";
locSys.BROWSER_TYPE_QQ = "qqbrowser";
locSys.BROWSER_TYPE_MOBILE_QQ = "mqqbrowser";
locSys.BROWSER_TYPE_UC = "ucbrowser";
locSys.BROWSER_TYPE_360 = "360browser";
locSys.BROWSER_TYPE_BAIDU_APP = "baiduboxapp";
locSys.BROWSER_TYPE_BAIDU = "baidubrowser";
locSys.BROWSER_TYPE_MAXTHON = "maxthon";
locSys.BROWSER_TYPE_OPERA = "opera";
locSys.BROWSER_TYPE_MIUI = "miuibrowser";
locSys.BROWSER_TYPE_FIREFOX = "firefox";
locSys.BROWSER_TYPE_SAFARI = "safari";
locSys.BROWSER_TYPE_CHROME = "chrome";
locSys.BROWSER_TYPE_UNKNOWN = "unknown";
/**
* Is native ? This is set to be true in jsb auto.
* @constant
* @default
* @type {Boolean}
*/
locSys.isNative = true;
/** Get the os of system */
locSys.os = __getOS();
/** Get the target platform of system */
locSys.platform = __getPlatform();
// Forces the garbage collector
locSys.garbageCollect = function() {
__jsc__.garbageCollect();
};
// Dumps rooted objects
locSys.dumpRoot = function() {
__jsc__.dumpRoot();
};
// restarts the JS VM
locSys.restartVM = function() {
__restartVM();
};
// clean a singal js file
locSys.cleanScript = function(jsFile) {
__cleanScript(jsFile);
};
locSys.dump = function(){
var self = this;
var str = "";
str += "isMobile : " + self.isMobile + "\r\n";
str += "language : " + self.language + "\r\n";
str += "browserType : " + self.browserType + "\r\n";
str += "capabilities : " + JSON.stringify(self.capabilities) + "\r\n";
str += "os : " + self.os + "\r\n";
str += "platform : " + self.platform + "\r\n";
cc.log(str);
}
locSys.isMobile = (locSys.os == locSys.OS_ANDROID || locSys.os == locSys.OS_IOS) ? true : false;
locSys.language = (function(){
var language = cc.Application.getInstance().getCurrentLanguage();
switch(language){
case 0: return locSys.LANGUAGE_ENGLISH;
case 1: return locSys.LANGUAGE_CHINESE;
case 2: return locSys.LANGUAGE_FRENCH;
case 3: return locSys.LANGUAGE_ITALIAN;
case 4: return locSys.LANGUAGE_GERMAN;
case 5: return locSys.LANGUAGE_SPANISH;
case 6: return locSys.LANGUAGE_DUTCH;
case 7: return locSys.LANGUAGE_RUSSIAN;
case 8: return locSys.LANGUAGE_KOREAN;
case 9: return locSys.LANGUAGE_JAPANESE;
case 10: return locSys.LANGUAGE_HUNGARIAN;
case 11: return locSys.LANGUAGE_PORTUGUESE;
case 12: return locSys.LANGUAGE_ARABIC;
case 13: return locSys.LANGUAGE_NORWEGIAN;
case 14: return locSys.LANGUAGE_POLISH;
default : return locSys.LANGUAGE_ENGLISH;
}
})();
/** The type of browser */
locSys.browserType = null;//null in jsb
var capabilities = locSys.capabilities = {"opengl":true};
if( locSys.isMobile ) {
capabilities["accelerometer"] = true;
capabilities["touches"] = true;
} else {
// desktop
capabilities["keyboard"] = true;
capabilities["mouse"] = true;
}
};
//+++++++++++++++++++++++++something about sys end+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about log start++++++++++++++++++++++++++++
/**
* Init Debug setting.
* @function
*/
cc._initDebugSetting = function (mode) {
var ccGame = cc.game;
var bakLog = cc._cocosplayerLog || cc.log || log;
cc.log = cc.warn = cc.error = cc.assert = function(){};
if(mode == ccGame.DEBUG_MODE_NONE){
}else{
cc.error = function(){
bakLog.call(this, "ERROR : " + cc.formatStr.apply(cc, arguments));
};
cc.assert = function(cond, msg) {
if (!cond && msg) {
var args = [];
for (var i = 1; i < arguments.length; i++)
args.push(arguments[i]);
bakLog("Assert: " + cc.formatStr.apply(cc, args));
}
};
if(mode != ccGame.DEBUG_MODE_ERROR && mode != ccGame.DEBUG_MODE_ERROR_FOR_WEB_PAGE){
cc.warn = function(){
bakLog.call(this, "WARN : " + cc.formatStr.apply(cc, arguments));
};
}
if(mode == ccGame.DEBUG_MODE_INFO || mode == ccGame.DEBUG_MODE_INFO_FOR_WEB_PAGE){
cc.log = function(){
bakLog.call(this, cc.formatStr.apply(cc, arguments));
};
}
}
};
//+++++++++++++++++++++++++something about log end+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++something about CCGame begin+++++++++++++++++++++++++++
/**
* @type {Object}
* @name cc.game
* An object to boot the game.
*/
cc.game = {
DEBUG_MODE_NONE : 0,
DEBUG_MODE_INFO : 1,
DEBUG_MODE_WARN : 2,
DEBUG_MODE_ERROR : 3,
DEBUG_MODE_INFO_FOR_WEB_PAGE : 4,
DEBUG_MODE_WARN_FOR_WEB_PAGE : 5,
DEBUG_MODE_ERROR_FOR_WEB_PAGE : 6,
EVENT_HIDE: "game_on_hide",
EVENT_SHOW: "game_on_show",
/**
* Key of config
* @constant
* @default
* @type {Object}
*/
CONFIG_KEY : {
engineDir : "engineDir",
dependencies : "dependencies",
debugMode : "debugMode",
showFPS : "showFPS",
frameRate : "frameRate",
id : "id",
renderMode : "renderMode",
jsList : "jsList",
classReleaseMode : "classReleaseMode"
},
_prepareCalled : false,//whether the prepare function has been called
_prepared : false,//whether the engine has prepared
_paused : true,//whether the game is paused
_intervalId : null,//interval target of main
/**
* Config of game
* @type {Object}
*/
config : null,
/**
* Callback when the scripts of engine have been load.
* @type {Function}
*/
onStart : null,
/**
* Callback when game exits.
* @type {Function}
*/
onExit : null,
/**
* Callback before game resumes.
* @type {Function}
*/
onBeforeResume : null,
/**
* Callback after game resumes.
* @type {Function}
*/
onAfterResume : null,
/**
* Callback before game pauses.
* @type {Function}
*/
onBeforePause : null,
/**
* Callback after game pauses.
* @type {Function}
*/
onAfterPause : null,
/**
* Set frameRate of game.
* @param frameRate
*/
setFrameRate : function(frameRate){
var self = this, config = self.config, CONFIG_KEY = self.CONFIG_KEY;
config[CONFIG_KEY.frameRate] = frameRate;
cc.director.setAnimationInterval(1.0/frameRate);
},
/**
* Restart game.
*/
restart: function () {
__restartVM();
},
/**
* Run game.
*/
run : function(){
var self = this;
if(!self._prepareCalled){
self.prepare(function(){
self.onStart();
});
}else{
self.onStart();
}
},
/**
* Init config.
* @param cb
* @returns {*}
* @private
*/
_initConfig : function(){
cc._initDebugSetting(1);
var self = this, CONFIG_KEY = self.CONFIG_KEY;
var _init = function(cfg){
cfg[CONFIG_KEY.engineDir] = cfg[CONFIG_KEY.engineDir] || "frameworks/cocos2d-html5";
cfg[CONFIG_KEY.debugMode] = cfg[CONFIG_KEY.debugMode] || 0;
cfg[CONFIG_KEY.frameRate] = cfg[CONFIG_KEY.frameRate] || 60;
cfg[CONFIG_KEY.renderMode] = cfg[CONFIG_KEY.renderMode] || 0;
cfg[CONFIG_KEY.showFPS] = cfg[CONFIG_KEY.showFPS] === false ? false : true;
return cfg;
};
try{
var txt = jsb.fileUtils.getStringFromFile("project.json");
var data = JSON.parse(txt);
this.config = _init(data || {});
}catch(e){
cc.log("Failed to read or parse project.json");
this.config = _init({});
}
cc._initDebugSetting(this.config[CONFIG_KEY.debugMode]);
cc.director.setDisplayStats(this.config[CONFIG_KEY.showFPS]);
cc.director.setAnimationInterval(1.0/this.config[CONFIG_KEY.frameRate]);
cc._initSys(this.config, CONFIG_KEY);
},
//cache for js and module that has added into jsList to be loaded.
_jsAddedCache : {},
_getJsListOfModule : function(moduleMap, moduleName, dir){
var jsAddedCache = this._jsAddedCache;
if(jsAddedCache[moduleName]) return null;
dir = dir || "";
var jsList = [];
var tempList = moduleMap[moduleName];
if(!tempList) throw "can not find module [" + moduleName + "]";
var ccPath = cc.path;
for(var i = 0, li = tempList.length; i < li; i++){
var item = tempList[i];
if(jsAddedCache[item]) continue;
var extname = ccPath.extname(item);
if(!extname) {
var arr = this._getJsListOfModule(moduleMap, item, dir);
if(arr) jsList = jsList.concat(arr);
}else if(extname.toLowerCase() == ".js") jsList.push(ccPath.join(dir, item));
jsAddedCache[item] = true;
}
return jsList;
},
/**
* Prepare game.
* @param cb
*/
prepare : function(cb){
var self = this, config = self.config, CONFIG_KEY = self.CONFIG_KEY, loader = cc.loader;
require("script/jsb.js");
self._prepareCalled = true;
loader.loadJsWithImg("", config[CONFIG_KEY.jsList] || [], function(err){
if(err) throw err;
self._prepared = true;
if(cb) cb();
});
}
};
cc.game._initConfig();
//+++++++++++++++++++++++++something about CCGame end+++++++++++++++++++++++++++++
//+++++++++++++++++++++++++other initializations+++++++++++++++++++++++++++++
// JS to Native bridges
if(window.JavascriptJavaBridge && cc.sys.os == cc.sys.OS_ANDROID){
jsb.reflection = new JavascriptJavaBridge();
cc.sys.capabilities["keyboard"] = true;
}
else if(window.JavaScriptObjCBridge && (cc.sys.os == cc.sys.OS_IOS || cc.sys.os == cc.sys.OS_OSX)){
jsb.reflection = new JavaScriptObjCBridge();
}
jsb.urlRegExp = new RegExp(
"^" +
// protocol identifier
"(?:(?:https?|ftp)://)" +
// user:pass authentication
"(?:\\S+(?::\\S*)?@)?" +
"(?:" +
// IP address dotted notation octets
// excludes loopback network 0.0.0.0
// excludes reserved space >= 224.0.0.0
// excludes network & broacast addresses
// (first & last IP address of each class)
"(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])" +
"(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}" +
"(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))" +
"|" +
// host name
"(?:(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)" +
// domain name
"(?:\\.(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)*" +
// TLD identifier
"(?:\\.(?:[a-z\\u00a1-\\uffff]{2,}))" +
")" +
// port number
"(?::\\d{2,5})?" +
// resource path
"(?:/\\S*)?" +
"$", "i"
);
//+++++++++++++++++++++++++other initializations end+++++++++++++++++++++++++++++<|fim▁end|>
|
* Iterate over an object or an array, executing a function for each matched element.
* @param {object|array} obj
|
<|file_name|>SetLastBlock.ts<|end_file_name|><|fim▁begin|>import { RpcRequest } from '../RpcRequest'
import { RpcResponse } from '../RpcResponse'
/**
* JSON-RPC request for the *setlastblock* command.
*/
export interface SetLastBlockRequest extends RpcRequest {
readonly method: 'setlastblock'
readonly params?: any[]
}
/**
* JSON-RPC response for the *setlastblock* command.
*/
export interface SetLastBlockResponse extends RpcResponse {
readonly result: SetLastBlockResult | null
}
/**
* Result of the *setlastblock* command.
*/
export type SetLastBlockResult = any
export function SetLastBlock(...params: any[]): SetLastBlockRequest {
return params.length === 0 ? { method: 'setlastblock' } : { method: 'setlastblock', params }<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>xilinx_zcu104.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# This file is part of LiteX-Boards.
#
# Copyright (c) 2020 Antmicro <www.antmicro.com>
# Copyright (c) 2019 David Shah <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
import os
import argparse
from migen import *
from migen.genlib.resetsync import AsyncResetSynchronizer
from litex_boards.platforms import zcu104
from litex.soc.cores.clock import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litex.soc.cores.led import LedChaser<|fim▁hole|>from litedram.phy import usddrphy
# CRG ----------------------------------------------------------------------------------------------
class _CRG(Module):
def __init__(self, platform, sys_clk_freq):
self.rst = Signal()
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_pll4x = ClockDomain(reset_less=True)
self.clock_domains.cd_idelay = ClockDomain()
# # #
self.submodules.pll = pll = USMMCM(speedgrade=-2)
self.comb += pll.reset.eq(self.rst)
pll.register_clkin(platform.request("clk125"), 125e6)
pll.create_clkout(self.cd_pll4x, sys_clk_freq*4, buf=None, with_reset=False)
pll.create_clkout(self.cd_idelay, 500e6)
platform.add_false_path_constraints(self.cd_sys.clk, pll.clkin) # Ignore sys_clk to pll.clkin path created by SoC's rst.
self.specials += [
Instance("BUFGCE_DIV", name="main_bufgce_div",
p_BUFGCE_DIVIDE=4,
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys.clk),
Instance("BUFGCE", name="main_bufgce",
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys4x.clk),
]
self.submodules.idelayctrl = USIDELAYCTRL(cd_ref=self.cd_idelay, cd_sys=self.cd_sys)
# BaseSoC ------------------------------------------------------------------------------------------
class BaseSoC(SoCCore):
def __init__(self, sys_clk_freq=int(125e6), with_led_chaser=True, **kwargs):
platform = zcu104.Platform()
# SoCCore ----------------------------------------------------------------------------------
SoCCore.__init__(self, platform, sys_clk_freq,
ident = "LiteX SoC on ZCU104",
**kwargs)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = _CRG(platform, sys_clk_freq)
# DDR4 SDRAM -------------------------------------------------------------------------------
if not self.integrated_main_ram_size:
self.submodules.ddrphy = usddrphy.USPDDRPHY(platform.request("ddram"),
memtype = "DDR4",
sys_clk_freq = sys_clk_freq,
iodelay_clk_freq = 500e6)
self.add_sdram("sdram",
phy = self.ddrphy,
module = MTA4ATF51264HZ(sys_clk_freq, "1:4"),
size = 0x40000000,
l2_cache_size = kwargs.get("l2_size", 8192)
)
# Leds -------------------------------------------------------------------------------------
if with_led_chaser:
self.submodules.leds = LedChaser(
pads = platform.request_all("user_led"),
sys_clk_freq = sys_clk_freq)
# Build --------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="LiteX SoC on ZCU104")
parser.add_argument("--build", action="store_true", help="Build bitstream.")
parser.add_argument("--load", action="store_true", help="Load bitstream.")
parser.add_argument("--sys-clk-freq", default=125e6, help="System clock frequency.")
builder_args(parser)
soc_core_args(parser)
args = parser.parse_args()
soc = BaseSoC(
sys_clk_freq = int(float(args.sys_clk_freq)),
**soc_core_argdict(args)
)
builder = Builder(soc, **builder_argdict(args))
builder.build(run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, soc.build_name + ".bit"))
if __name__ == "__main__":
main()<|fim▁end|>
|
from litex.soc.cores.bitbang import I2CMaster
from litedram.modules import MTA4ATF51264HZ
|
<|file_name|>websockets.py<|end_file_name|><|fim▁begin|>import json
from django.core import serializers
from django.core.serializers.json import DjangoJSONEncoder
from .base import Binding
from ..generic.websockets import WebsocketDemultiplexer
from ..sessions import enforce_ordering
class WebsocketBinding(Binding):
"""
Websocket-specific outgoing binding subclass that uses JSON encoding
and the built-in JSON/WebSocket multiplexer.
To implement outbound, implement:
- group_names, which returns a list of group names to send to
To implement inbound, implement:
- has_permission, which says if the user can do the action on an instance
Optionally also implement:
- serialize_data, which returns JSON-safe data from a model instance
- create, which takes incoming data and makes a model instance
- update, which takes incoming data and a model instance and applies one to the other
"""
# Mark as abstract
model = None
# Stream multiplexing name
stream = None
# Decorators
strict_ordering = False
slight_ordering = False
# Outbound
@classmethod
def encode(cls, stream, payload):
return WebsocketDemultiplexer.encode(stream, payload)
def serialize(self, instance, action):
payload = {
"action": action,
"pk": instance.pk,
"data": self.serialize_data(instance),
"model": self.model_label,
}
return payload
def serialize_data(self, instance):
"""
Serializes model data into JSON-compatible types.
"""
if self.fields == ['__all__']:
fields = None
else:
fields = self.fields
data = serializers.serialize('json', [instance], fields=fields)
return json.loads(data)[0]['fields']
# Inbound
@classmethod
def get_handler(cls):
"""
Adds decorators to trigger_inbound.
"""
# Get super-handler
handler = super(WebsocketBinding, cls).get_handler()
# Ordering decorators
if cls.strict_ordering:
return enforce_ordering(handler, slight=False)
elif cls.slight_ordering:
return enforce_ordering(handler, slight=True)
else:
return handler
def deserialize(self, message):
"""
You must hook this up behind a Deserializer, so we expect the JSON
already dealt with.
"""
action = message['action']
pk = message.get('pk', None)
data = message.get('data', None)
return action, pk, data
<|fim▁hole|> def _hydrate(self, pk, data):
"""
Given a raw "data" section of an incoming message, returns a
DeserializedObject.
"""
s_data = [
{
"pk": pk,
"model": self.model_label,
"fields": data,
}
]
# TODO: Avoid the JSON roundtrip by using encoder directly?
return list(serializers.deserialize("json", json.dumps(s_data)))[0]
def create(self, data):
self._hydrate(None, data).save()
def update(self, pk, data):
instance = self.model.objects.get(pk=pk)
hydrated = self._hydrate(pk, data)
for name in data.keys():
if name in self.fields or self.fields == ['__all__']:
setattr(instance, name, getattr(hydrated.object, name))
instance.save()
class WebsocketBindingWithMembers(WebsocketBinding):
"""
Outgoing binding binding subclass based on WebsocketBinding.
Additionally enables sending of member variables, properties and methods.
Member methods can only have self as a required argument.
Just add the name of the member to the send_members-list.
Example:
class MyModel(models.Model):
my_field = models.IntegerField(default=0)
my_var = 3
@property
def my_property(self):
return self.my_var + self.my_field
def my_function(self):
return self.my_var - self.my_vield
class MyBinding(BindingWithMembersMixin, WebsocketBinding):
model = MyModel
stream = 'mystream'
send_members = ['my_var', 'my_property', 'my_function']
"""
model = None
send_members = []
encoder = DjangoJSONEncoder()
def serialize_data(self, instance):
data = super(WebsocketBindingWithMembers, self).serialize_data(instance)
member_data = {}
for m in self.send_members:
member = instance
for s in m.split('.'):
member = getattr(member, s)
if callable(member):
member_data[m.replace('.', '__')] = member()
else:
member_data[m.replace('.', '__')] = member
member_data = json.loads(self.encoder.encode(member_data))
# the update never overwrites any value from data,
# because an object can't have two attributes with the same name
data.update(member_data)
return data<|fim▁end|>
| |
<|file_name|>freq_scale.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
@file freq_scale.py
@brief Sandbox for various frequency scale generators
@author gm
@copyright gm 2014
This file is part of Chartreuse
Chartreuse is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Chartreuse is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|>
You should have received a copy of the GNU General Public License
along with Chartreuse. If not, see <http://www.gnu.org/licenses/>.
'''
import numpy
import pylab
class LogFreqScale(object):
'''
Log frequency scale
'''
def __init__(self, length, dft_length, sampling_freq):
self.length = length
self.dft_length = dft_length
self.sampling_freq = sampling_freq
self._Synthesize()
def _Synthesize(self):
'''
Actual processing function for generating the scale
'''
kLowBound = 2.0 * self.sampling_freq / self.dft_length
kHighBound = self.sampling_freq * 0.5
tmp = numpy.linspace(kLowBound, kHighBound, self.length)
tmp[0] = self.sampling_freq / (self.dft_length * (3.0 / 4.0))
self.data = numpy.log2(tmp * 0.001)
if __name__ == "__main__":
import utilities
sampling_freq = 48000.0
dft_bins_count = 2048
low_edge = 62.5
high_edge = 1500.0
low_edge_idx = numpy.ceil(low_edge * dft_bins_count / sampling_freq)
high_edge_idx = dft_bins_count / 2 + 1
length = high_edge_idx - low_edge_idx + 1
generator = LogFreqScale(length, dft_bins_count, sampling_freq)
out_data = generator.data
print(utilities.PrintMetadata(utilities.GetMetadata(out_data)))
pylab.plot(out_data, label = "out")
pylab.legend()
pylab.show()<|fim▁end|>
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
|
<|file_name|>enum_messages.rs<|end_file_name|><|fim▁begin|>use proc_macro2::TokenStream;
use quote::quote;
use syn::{Data, DeriveInput};
use crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};
pub fn enum_message_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
let name = &ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
let variants = match &ast.data {
Data::Enum(v) => &v.variants,
_ => return Err(non_enum_error()),
};
let type_properties = ast.get_type_properties()?;
let strum_module_path = type_properties.crate_module_path();
let mut arms = Vec::new();
let mut detailed_arms = Vec::new();
let mut serializations = Vec::new();
for variant in variants {
let variant_properties = variant.get_variant_properties()?;
let messages = variant_properties.message.as_ref();
let detailed_messages = variant_properties.detailed_message.as_ref();
let ident = &variant.ident;
use syn::Fields::*;
let params = match variant.fields {
Unit => quote! {},
Unnamed(..) => quote! { (..) },
Named(..) => quote! { {..} },
};
// You can't disable getting the serializations.
{
let serialization_variants =
variant_properties.get_serializations(type_properties.case_style);
let count = serialization_variants.len();
serializations.push(quote! {
&#name::#ident #params => {
static ARR: [&'static str; #count] = [#(#serialization_variants),*];
&ARR
}
});
}
// But you can disable the messages.
if variant_properties.disabled.is_some() {
continue;
}
if let Some(msg) = messages {
let params = params.clone();
// Push the simple message.
let tokens = quote! { &#name::#ident #params => ::core::option::Option::Some(#msg) };
arms.push(tokens.clone());
if detailed_messages.is_none() {
detailed_arms.push(tokens);
}
}
if let Some(msg) = detailed_messages {
let params = params.clone();
// Push the simple message.
detailed_arms
.push(quote! { &#name::#ident #params => ::core::option::Option::Some(#msg) });
}
}
if arms.len() < variants.len() {
arms.push(quote! { _ => ::core::option::Option::None });
}
if detailed_arms.len() < variants.len() {
detailed_arms.push(quote! { _ => ::core::option::Option::None });
}
Ok(quote! {
impl #impl_generics #strum_module_path::EnumMessage for #name #ty_generics #where_clause {
fn get_message(&self) -> ::core::option::Option<&'static str> {
match self {
#(#arms),*
}
}
fn get_detailed_message(&self) -> ::core::option::Option<&'static str> {
match self {
#(#detailed_arms),*
}
}
fn get_serializations(&self) -> &'static [&'static str] {
match self {<|fim▁hole|> }
}
}
})
}<|fim▁end|>
|
#(#serializations),*
|
<|file_name|>statstring.py<|end_file_name|><|fim▁begin|>#The Diablo II statstring parsing within this module makes use of findings by
#iago, DarkMinion, and RealityRipple, among others
from pbuffer import debug_output
long_name = {'SSHR': 'Starcraft Shareware',
'JSTR': 'Starcraft Japanese',
'STAR': 'Starcraft',
'SEXP': 'Starcraft Broodwar',
'DSHR': 'Diablo Shareware',
'DRTL': 'Diablo I',
'D2DV': 'Diablo II',
'D2XP': 'Diablo II: Lord of Destruction',
'W2BN': 'Warcraft II: Battle.net Edition',
'WAR3': 'Warcraft III',
'W3XP': 'Warcraft III: The Frozen Throne'}
titles = ['Sir', 'Lord', 'Baron', 'Dame', 'Lady', 'Baroness', #MMMFFF normal
'Count', 'Duke', 'King', 'Countess', 'Duchess', 'Queen', #MMMFFF hardcore
'Slayer', 'Champion', 'Patriarch', 'Destroyer', 'Conquerer',
'Guardian', 'Matriarch']
classes_d1 = ['Warrior', 'Rogue', 'Sorcerer', 'Unknown Class']
classes = ['Amazon', 'Sorceress', 'Necromancer', 'Paladin',
'Barbarian', 'Druid', 'Assassin', 'Unknown Class']
helms = {4: 'Cap', 57: 'Cap', 5: 'Skullcap', 58: 'Skullcap', 6: 'Helm',
59: 'Helm', 7: 'Fullhelm', 60: 'Fullhelm', 8: 'Greathelm',
61: 'Greathelm', 10: 'Mask', 63: 'Mask', 40: 'Bonehelm',
82: 'Bonehelm', 89: 'Fanged helm', 90: 'Warhelm', 91: 'Winged Helm',
255: 'No helm (Circlet?)'}
weapons = {4: 'Hatchet/Waraze', 5: 'Axe', 6: 'Large Axe 6', 7: 'Large Axe 7',
8: 'Greate Axe', 9: 'Wand 9', 10: 'Wand 10', 11: 'Wand 11',
12: 'Spiked Club', 13: 'Scepter', 14: 'Hammer', 15: 'Flail',
16: 'Maul', 17: 'Short Sword', 18: 'Scimitar/Saber', 19: 'Warsword',
20: 'Crystal Sword', 21: 'Sword 21', 22: 'Sword 22', 23: 'Sword 23',
24: 'Sword 24', 25: 'Dagger', 26: 'Dirk/Kris', 27: 'Unk 27',
28: 'Unk 28', 29: 'Unk 29', 30: 'Spear', 31: 'Trident', 32: 'Spetum',
33: 'Pike', 34: 'Bardiche/Halberd', 35: 'Sickle', 36: 'Poleaxe',
37: 'Staff 37', 38: 'Staff 38', 39: 'Staff 39', 40: 'Staff 40',
49: 'Unk 49', 50: 'Unk 50', 53: 'Orb', 56: 'Unk 56', 121: 'Unk 121',
122: 'Unk 122', 123: 'Unk 123', 124: 'Unk 124', 125: 'Poison Potion',
126: 'Fulmigating Potion', 127: 'Potion 3', 128: 'Potion 4',
129: 'Potion 5'}
shields = {79: 'Small Shield', 80: 'Buckler', 81: 'Kite Shield',
82: 'Tower Shield', 84: 'Bone Shield', 85: 'Spiked shield',
92: 'Rondache', 94: 'Crown Shield'}
chan_flags = {0x02: 'moderated',
0x04: 'restricted',
0x08: 'silent',
0x10: 'system',
0x20: 'product-specific',
0x1000: 'globally accessible'}
def kill_null(string):
res = string.find('\0')
if res == -1:
return string
else:
return string[:string.find('\0')]
def safe_int(string):
return int(kill_null(string).zfill(1))
def parse_chan_flags(flags):
if (flags & 0x01) == 0x01:
build = 'public'
else:
build = 'private'
for k, v in chan_flags.iteritems():
if (flags & k) == k:
build += ', ' + v
return build
def str_reverse(string):
rev = reversed(string)
build = ''
for char in rev:
build += char
return build
def statstring(text):
product = str_reverse(text[:4])
results = {'product': product,
'statstring': text}
try:
results['display'] = long_name[product]
except KeyError:
results['display'] = 'Unknown (%s)' % product
return results
if product in ['STAR', 'SEXP', 'JSTR', 'W2BN', 'SSHR']:
results.update(stats_star(product, text[5:]))
if results['wins'] != 0:
results['display'] += ' (' +\
str(results['wins']) + (results['wins'] == 1 and\
' win' or ' wins') +\
(results['spawned'] and ', spawned)' or ')')
elif product in ['DRTL', 'DSHR']:
results.update(stats_diablo(product, text[5:]))
if results['class_num'] != -1:
results['display'] += ' (' +\
results['class'] + \
', level ' + str(results['level']) + ')'
elif product in ['D2DV', 'D2XP']:
results.update(stats_diablo2(product, text[4:]))
if results['open']:
results['display'] += ' (Open character)'
else:
results['display'] += ' (Level ' + str(results['level']) + ' ' +\
results['class'] + ', ' +\
results['title'] +\
results['char_name'] + ' of ' +\
results['realm'] + ' using a ' +\
results['helm'] + ', ' +\
results['weapon'] + ', and ' +\
results['shield'] + ')'
elif product in ['WAR3', 'W3XP']:
results.update(stats_war3(product, text[5:]))
if results['clan'] == '':
if results['level'] != 0:
results['display'] += ' (Level %s)' % str(results['level'])
else:
if results['level'] == 0:
results['display'] += ' (Clan %s)' % results['clan']
else:
results['display'] += ' (Level %s of Clan %s)' % (str(results['level']),
results['clan'])
results['display'] += '.'
return results
def stats_star(product, text):
field = text.split(' ')
try:
return {'ladder_rating': safe_int(field[0]),
'ladder_rank': safe_int(field[1]),
'wins': safe_int(field[2]),
'spawned': bool(safe_int(field[3])),
#'unknown': field[4],
'high_ladder_rating': safe_int(field[5]),
#'unknown': field[6],
#'unknown': field[7],
'icon': str_reverse(field[8])}
except:
return {'ladder_rating': 0,
'ladder_rank': 0,
'wins': 0,
'spawned': False,
#'unknown': field[4],
'high_ladder_rating': 0,
#'unknown': field[6],
#'unknown': field[7],
'icon': ''}
def stats_diablo(product, text):
field = text.split(' ')
try:
char_class = classes_d1[safe_int(field[1])]
return {'level': safe_int(field[0]),
'class': char_class,
'class_num': safe_int(field[1]),<|fim▁hole|> 'vitality': safe_int(field[6]),
'gold': safe_int(field[7]),
#'unknown': field[8],
}
except: #You can't really trust these
return {'level': 0,
'class': 'Open character',
'class_num': -1,
'dots': 0,
'strength': 0,
'magic': 0,
'dexterity': 0,
'vitality': 0,
'gold': 0}
def stats_war3(product, text):
if text == '':
return {'icon': '',
'level': 0,
'clan': ''}
field = text.split(' ')
res = {'icon': str_reverse(field[0]),
'level': safe_int(field[1]),
'clan': ''}
if len(field) == 3:
clan = str_reverse(field[2].strip('\xFF\0 '))
if len(clan) in range(1, 5):
res['clan'] = clan
return res
def stats_diablo2(product, text):
if text == '':
return {'open': True}
field = text.split(',', 2)
if len(field) < 3:
return {'open': True}
realm = field[0]
char_name = field[1]
text = field[2][2:]
if len(text) < 29:
return {'open': True}
flags = ord(text[24])
hardcore = bool(flags & 0x04)
dead = bool(flags & 0x08)
expansion = bool(flags & 0x20)
char_class = ord(text[11]) - 1
char_class = char_class > 6 and 7 or char_class
gender = (char_class in [0, 1, 6]) and 'Female' or 'Male'
class_name = classes[char_class]
acts = (ord(text[25]) & 0x3E) >> 1
level = acts / 5 - 1
if level == -1:
title = ''
else:
title_idx = level
if expansion:
title_idx += 12
if hardcore:
title_idx += 3
else:
if gender == 'Female' and level == 2:
title_idx += 4
else:
if gender == 'Female':
title_idx += 3
if hardcore:
title_idx += 6
title = titles[title_idx] + ' '
helm_idx = ord(text[0])
try:
helm = helms[helm_idx]
except KeyError:
helm = 'No helm (' + str(helm_idx) + ')'
weap_idx = ord(text[5])
try:
weapon = weapons[weap_idx]
except KeyError:
weapon = 'No weapon (' + str(weap_idx) + ')'
shield_idx = ord(text[7])
try:
shield = shields[shield_idx]
except KeyError:
shield = 'No shield (' + str(shield_idx) + ')'
if class_name == 'Unknown Class':
icon = product
else:
if product == 'D2XP':
icon = 'DX' + class_name[:2].upper()
else:
icon = 'D2' + class_name[:2].upper()
return {'level': ord(text[23]),
'ladder': not ord(text[28]) == 255,
'class': class_name,
'title': title,
'hardcore': hardcore,
'dead': dead,
'expansion': expansion,
'realm': realm,
'char_name': char_name,
'open': False,
'gender': gender,
'acts': acts,
'helm': helm,
'weapon': weapon,
'shield': shield,
'icon': icon
}<|fim▁end|>
|
'dots': safe_int(field[2]),
'strength': safe_int(field[3]),
'magic': safe_int(field[4]),
'dexterity': safe_int(field[5]),
|
<|file_name|>v1_routing.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia<|fim▁hole|># www.navitia.io
from jormungandr.interfaces.v1 import Uri
from jormungandr.interfaces.v1 import Coverage
from jormungandr.interfaces.v1 import Journeys
from jormungandr.interfaces.v1 import Schedules
from jormungandr.interfaces.v1 import Places
from jormungandr.interfaces.v1 import Ptobjects
from jormungandr.interfaces.v1 import Coord
from jormungandr.interfaces.v1 import Disruptions
from jormungandr.interfaces.v1 import Calendars
from jormungandr.interfaces.v1 import converters_collection_type
from jormungandr.interfaces.v1 import Status
from werkzeug.routing import BaseConverter, FloatConverter, PathConverter
from jormungandr.modules_loader import AModule
from resources import Index
class RegionConverter(BaseConverter):
""" The region you want to query"""
def __init__(self, *args, **kwargs):
BaseConverter.__init__(self, *args, **kwargs)
self.type_ = "string"
self.regex = '[^(/;)]+'
class LonConverter(FloatConverter):
""" The longitude of where the coord you want to query"""
def __init__(self, *args, **kwargs):
FloatConverter.__init__(self, *args, **kwargs)
self.type_ = "float"
self.regex = '-?\\d+(\\.\\d+)?'
class LatConverter(FloatConverter):
""" The latitude of where the coord you want to query"""
def __init__(self, *args, **kwargs):
FloatConverter.__init__(self, *args, **kwargs)
self.type_ = "float"
self.regex = '-?\\d+(\\.\\d+)?'
class UriConverter(PathConverter):
"""First part of the uri"""
def __init__(self, *args, **kwargs):
PathConverter.__init__(self, *args, **kwargs)
self.type_ = "string"
class IdConverter(BaseConverter):
"""Id of the object you want to query"""
def __init__(self, *args, **kwargs):
BaseConverter.__init__(self, *args, **kwargs)
self.type_ = "string"
class V1Routing(AModule):
def __init__(self, api, name):
super(V1Routing, self).__init__(api, name,
description='Current version of navitia API',
status='current',
index_endpoint='index')
def setup(self):
self.api.app.url_map.converters['region'] = RegionConverter
self.api.app.url_map.converters['lon'] = LonConverter
self.api.app.url_map.converters['lat'] = LatConverter
self.api.app.url_map.converters['uri'] = UriConverter
self.api.app.url_map.converters['id'] = IdConverter
self.api.app.url_map.strict_slashes = False
self.module_resources_manager.register_resource(Index.Index())
self.add_resource(Index.Index,
'/',
'',
endpoint='index')
self.module_resources_manager.register_resource(Index.TechnicalStatus())
self.add_resource(Index.TechnicalStatus,
'/status',
endpoint='technical_status')
coverage = '/coverage/'
region = coverage + '<region:region>/'
coord = coverage + '<lon:lon>;<lat:lat>/'
self.add_resource(Coverage.Coverage,
coverage,
region,
coord,
endpoint='coverage')
self.add_resource(Coord.Coord,
'/coord/<lon:lon>;<lat:lat>',
'/coords/<lon:lon>;<lat:lat>',
endpoint='coord')
collecs = converters_collection_type.collections_to_resource_type.keys()
for collection in collecs:
self.add_resource(getattr(Uri, collection)(True),
region + collection,
coord + collection,
region + '<uri:uri>/' + collection,
coord + '<uri:uri>/' + collection,
endpoint=collection + '.collection')
self.add_resource(getattr(Uri, collection)(False),
region + collection + '/<id:id>',
coord + collection + '/<id:id>',
region + '<uri:uri>/' + collection + '/<id:id>',
coord + '<uri:uri>/' + collection + '/<id:id>',
endpoint=collection + '.id')
collecs = ["routes", "lines", "line_groups", "networks", "stop_areas", "stop_points",
"vehicle_journeys"]
for collection in collecs:
self.add_resource(getattr(Uri, collection)(True),
'/' + collection,
endpoint=collection + '.external_codes')
self.add_resource(Places.Places,
region + 'places',
coord + 'places',
'/places',
endpoint='places')
self.add_resource(Ptobjects.Ptobjects,
region + 'pt_objects',
coord + 'pt_objects',
endpoint='pt_objects')
self.add_resource(Places.PlaceUri,
region + 'places/<id:id>',
coord + 'places/<id:id>',
endpoint='place_uri')
self.add_resource(Places.PlacesNearby,
region + 'places_nearby',
coord + 'places_nearby',
region + '<uri:uri>/places_nearby',
coord + '<uri:uri>/places_nearby',
endpoint='places_nearby')
self.add_resource(Journeys.Journeys,
region + '<uri:uri>/journeys',
coord + '<uri:uri>/journeys',
region + 'journeys',
coord + 'journeys',
'/journeys',
endpoint='journeys')
self.add_resource(Schedules.RouteSchedules,
region + '<uri:uri>/route_schedules',
coord + '<uri:uri>/route_schedules',
'/route_schedules',
endpoint='route_schedules')
self.add_resource(Schedules.NextArrivals,
region + '<uri:uri>/arrivals',
coord + '<uri:uri>/arrivals',
region + 'arrivals',
coord + 'arrivals',
endpoint='arrivals')
self.add_resource(Schedules.NextDepartures,
region + '<uri:uri>/departures',
coord + '<uri:uri>/departures',
region + 'departures',
coord + 'departures',
endpoint='departures')
self.add_resource(Schedules.StopSchedules,
region + '<uri:uri>/stop_schedules',
coord + '<uri:uri>/stop_schedules',
'/stop_schedules',
endpoint='stop_schedules')
self.add_resource(Disruptions.TrafficReport,
region + 'traffic_reports',
region + '<uri:uri>/traffic_reports',
endpoint='traffic_reports')
self.add_resource(Status.Status,
region + 'status',
endpoint='status')
self.add_resource(Calendars.Calendars,
region + 'calendars',
region + '<uri:uri>/calendars',
region + "calendars/<id:id>",
endpoint="calendars")<|fim▁end|>
| |
<|file_name|>d07.py<|end_file_name|><|fim▁begin|>import re
with open('d07.txt') as f:
raw_input = f.readlines()
test_input = """abba[mnop]qrst
abcd[bddb]xyyx
aaaa[qwer]tyui
ioxxoj[asdfgh]zxcvbn
asdfasdf[qwerqwer]asdffdsa[12341234]zcxvzcv""".splitlines()
def group_finder(s):<|fim▁hole|> if tail:
yield from group_finder(tail)
re_abba = re.compile(r'.*([a-z])(?!\1)([a-z])\2\1')
total = 0
for line in raw_input:
line_groups = list(group_finder(line.replace(']', '[')))
ips = line_groups[::2]
hns = line_groups[1::2]
if any(re_abba.match(ip) for ip in ips) and not any(re_abba.match(hn) for hn in hns):
total += 1
print(total)
# part 2!
test_input = """aba[bab]xyz
xyx[xyx]xyx
aaa[kek]eke
zazbz[bzb]cdb""".splitlines()
import regex
re_aba = regex.compile(r'([a-z])(?!\1)([a-z])\1')
total = 0
for line in raw_input:
line_groups = list(group_finder(line.replace(']', '[')))
ips = line_groups[::2]
hns = line_groups[1::2]
match = False
for ip in ips:
for a, b in re_aba.findall(ip, overlapped=True):
if any(b + a + b in hn for hn in hns):
match = True
if match:
total += 1
print(total)<|fim▁end|>
|
head, _, tail = s.partition('[')
yield head
|
<|file_name|>gpu_test_expectations.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# mavericks, yosemite, linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Browser types:
# android-webview-shell, android-content-shell, debug
#
# ANGLE renderer:
# d3d9, d3d11, opengl
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
#
# Sample usage in SetExpectations in subclasses:
# self.Fail('gl-enable-vertex-attrib.html',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
ANGLE_MODIFIERS = ['d3d9', 'd3d11', 'opengl']
BROWSER_TYPE_MODIFIERS = [
'android-webview-shell', 'android-content-shell', 'debug' ]
class _FlakyExpectation(object):
def __init__(self, expectation, max_num_retries):
self.expectation = expectation
self.max_num_retries = max_num_retries
class GpuTestExpectations(test_expectations.TestExpectations):
def __init__(self):
self._flaky_expectations = []
super(GpuTestExpectations, self).__init__()
def Flaky(self, url_pattern, conditions=None, bug=None, max_num_retries=2):
expectation = _FlakyExpectation(self.CreateExpectation(
'pass', url_pattern, conditions, bug), max_num_retries)
self._flaky_expectations.append(expectation)
def GetFlakyRetriesForPage(self, page, browser):
for fe in self._flaky_expectations:
e = fe.expectation
if self.ExpectationAppliesToPage(e, browser, page):
return fe.max_num_retries
return 0
def IsValidUserDefinedCondition(self, condition):
# Add support for d3d9, d3d11 and opengl-specific expectations.
if condition in ANGLE_MODIFIERS:
return True
# Add support for browser-type-specific expectations.
if condition in BROWSER_TYPE_MODIFIERS:
return True
return super(GpuTestExpectations,
self).IsValidUserDefinedCondition(condition)
def ModifiersApply(self, browser, expectation):
if not super(GpuTestExpectations, self).ModifiersApply(
browser, expectation):
return False
# We'll only get here if the OS and GPU matched the expectation.
# TODO(kbr): refactor _Expectation to be a public class so that
# the GPU-specific properties can be moved into a subclass, and
# run the unit tests from this directory on the CQ and the bots.
# crbug.com/495868 crbug.com/495870
# Check for presence of Android WebView.
browser_expectations = [x for x in expectation.user_defined_conditions
if x in BROWSER_TYPE_MODIFIERS]
browser_matches = ((not browser_expectations) or
browser.browser_type in browser_expectations)
if not browser_matches:
return False<|fim▁hole|> if browser.supports_system_info:
gpu_info = browser.GetSystemInfo().gpu
if gpu_info and gpu_info.aux_attributes:
gl_renderer = gpu_info.aux_attributes.get('gl_renderer')
if gl_renderer:
if 'Direct3D11' in gl_renderer:
angle_renderer = 'd3d11'
elif 'Direct3D9' in gl_renderer:
angle_renderer = 'd3d9'
elif 'OpenGL' in gl_renderer:
angle_renderer = 'opengl'
angle_expectations = [x for x in expectation.user_defined_conditions
if x in ANGLE_MODIFIERS]
angle_matches = ((not angle_expectations) or
angle_renderer in angle_expectations)
return angle_matches<|fim▁end|>
|
angle_renderer = ''
gpu_info = None
|
<|file_name|>test_legacy_application.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from mock import patch
from oauthlib.oauth2 import LegacyApplicationClient
from ....unittest import TestCase
@patch('time.time', new=lambda: 1000)
class LegacyApplicationClientTest(TestCase):
client_id = "someclientid"
scope = ["/profile"]
kwargs = {
"some": "providers",
"require": "extra arguments"
}
username = "foo"
password = "bar"
body = "not=empty"
body_up = "not=empty&grant_type=password&username=%s&password=%s" % (username, password)
body_kwargs = body_up + "&some=providers&require=extra+arguments"
token_json = ('{ "access_token":"2YotnFZFEjr1zCsicMWpAA",'
' "token_type":"example",'
' "expires_in":3600,'
' "scope":"/profile",'
' "refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA",'
' "example_parameter":"example_value"}')
token = {
"access_token": "2YotnFZFEjr1zCsicMWpAA",
"token_type": "example",
"expires_in": 3600,
"expires_at": 4600,
"scope": scope,
"refresh_token": "tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter": "example_value"
}
def test_request_body(self):<|fim▁hole|> body = client.prepare_request_body(self.username, self.password,
body=self.body)
self.assertFormBodyEqual(body, self.body_up)
# With extra parameters
body = client.prepare_request_body(self.username, self.password,
body=self.body, **self.kwargs)
self.assertFormBodyEqual(body, self.body_kwargs)
def test_parse_token_response(self):
client = LegacyApplicationClient(self.client_id)
# Parse code and state
response = client.parse_request_body_response(self.token_json, scope=self.scope)
self.assertEqual(response, self.token)
self.assertEqual(client.access_token, response.get("access_token"))
self.assertEqual(client.refresh_token, response.get("refresh_token"))
self.assertEqual(client.token_type, response.get("token_type"))
# Mismatching state
self.assertRaises(Warning, client.parse_request_body_response, self.token_json, scope="invalid")<|fim▁end|>
|
client = LegacyApplicationClient(self.client_id)
# Basic, no extra arguments
|
<|file_name|>ScriptFieldsFetchSubPhase.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.script.FieldScript;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
if (context.hasScriptFields() == false) {
return;<|fim▁hole|> hits = hits.clone(); // don't modify the incoming hits
Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId));
int lastReaderId = -1;
FieldScript[] leafScripts = null;
List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields();
final IndexReader reader = context.searcher().getIndexReader();
for (SearchHit hit : hits) {
int readerId = ReaderUtil.subIndex(hit.docId(), reader.leaves());
LeafReaderContext leafReaderContext = reader.leaves().get(readerId);
if (readerId != lastReaderId) {
leafScripts = createLeafScripts(leafReaderContext, scriptFields);
lastReaderId = readerId;
}
int docId = hit.docId() - leafReaderContext.docBase;
for (int i = 0; i < leafScripts.length; i++) {
leafScripts[i].setDocument(docId);
final Object value;
try {
value = leafScripts[i].execute();
CollectionUtils.ensureNoSelfReferences(value, "ScriptFieldsFetchSubPhase leaf script " + i);
} catch (RuntimeException e) {
if (scriptFields.get(i).ignoreException()) {
continue;
}
throw e;
}
if (hit.fieldsOrNull() == null) {
hit.fields(new HashMap<>(2));
}
String scriptFieldName = scriptFields.get(i).name();
DocumentField hitField = hit.getFields().get(scriptFieldName);
if (hitField == null) {
final List<Object> values;
if (value instanceof Collection) {
values = new ArrayList<>((Collection<?>) value);
} else {
values = Collections.singletonList(value);
}
hitField = new DocumentField(scriptFieldName, values);
hit.getFields().put(scriptFieldName, hitField);
}
}
}
}
private FieldScript[] createLeafScripts(LeafReaderContext context,
List<ScriptFieldsContext.ScriptField> scriptFields) {
FieldScript[] scripts = new FieldScript[scriptFields.size()];
for (int i = 0; i < scripts.length; i++) {
try {
scripts[i] = scriptFields.get(i).script().newInstance(context);
} catch (IOException e1) {
throw new IllegalStateException("Failed to load script " + scriptFields.get(i).name(), e1);
}
}
return scripts;
}
}<|fim▁end|>
|
}
|
<|file_name|>inMageAzureV2EventDetails.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
const models = require('./index');
/**
* Model class for event details of a VMwareAzureV2 event.
*
* @extends models['EventProviderSpecificDetails']
*/
class InMageAzureV2EventDetails extends models['EventProviderSpecificDetails'] {
/**
* Create a InMageAzureV2EventDetails.
* @member {string} [eventType] InMage Event type. Takes one of the values of
* {InMageDataContract.InMageMonitoringEventType}.
* @member {string} [category] InMage Event Category.
* @member {string} [component] InMage Event Component.
* @member {string} [correctiveAction] Corrective Action string for the
* event.
* @member {string} [details] InMage Event Details.
* @member {string} [summary] InMage Event Summary.
* @member {string} [siteName] VMware Site name.
*/
constructor() {
super();
}
/**
* Defines the metadata of InMageAzureV2EventDetails
*<|fim▁hole|> *
*/
mapper() {
return {
required: false,
serializedName: 'InMageAzureV2',
type: {
name: 'Composite',
polymorphicDiscriminator: {
serializedName: 'instanceType',
clientName: 'instanceType'
},
uberParent: 'EventProviderSpecificDetails',
className: 'InMageAzureV2EventDetails',
modelProperties: {
instanceType: {
required: true,
serializedName: 'instanceType',
isPolymorphicDiscriminator: true,
type: {
name: 'String'
}
},
eventType: {
required: false,
serializedName: 'eventType',
type: {
name: 'String'
}
},
category: {
required: false,
serializedName: 'category',
type: {
name: 'String'
}
},
component: {
required: false,
serializedName: 'component',
type: {
name: 'String'
}
},
correctiveAction: {
required: false,
serializedName: 'correctiveAction',
type: {
name: 'String'
}
},
details: {
required: false,
serializedName: 'details',
type: {
name: 'String'
}
},
summary: {
required: false,
serializedName: 'summary',
type: {
name: 'String'
}
},
siteName: {
required: false,
serializedName: 'siteName',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = InMageAzureV2EventDetails;<|fim▁end|>
|
* @returns {object} metadata of InMageAzureV2EventDetails
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test_access_control
import test_users
import test_groups<|fim▁end|>
|
# You should have received a copy of the GNU General Public License
|
<|file_name|>qscintilla.py<|end_file_name|><|fim▁begin|>#############################################################################
##
## Copyright (c) 2011 Riverbank Computing Limited <[email protected]>
##
## This file is part of PyQt.
##
## This file may be used under the terms of the GNU General Public
## License versions 2.0 or 3.0 as published by the Free Software
## Foundation and appearing in the files LICENSE.GPL2 and LICENSE.GPL3
## included in the packaging of this file. Alternatively you may (at
## your option) use any later version of the GNU General Public
## License if such license has been publicly approved by Riverbank
## Computing Limited (or its successors, if any) and the KDE Free Qt
## Foundation. In addition, as a special exception, Riverbank gives you
## certain additional rights. These rights are described in the Riverbank
## GPL Exception version 1.1, which can be found in the file
## GPL_EXCEPTION.txt in this package.
##
## Please review the following information to ensure GNU General
## Public Licensing requirements will be met:
## http://trolltech.com/products/qt/licenses/licensing/opensource/. If
## you are unsure which license is appropriate for your use, please
## review the following information:
## http://trolltech.com/products/qt/licenses/licensing/licensingoverview
## or contact the sales department at [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
# If pluginType is MODULE, the plugin loader will call moduleInformation. The
# variable MODULE is inserted into the local namespace by the plugin loader.<|fim▁hole|>pluginType = MODULE
# moduleInformation() must return a tuple (module, widget_list). If "module"
# is "A" and any widget from this module is used, the code generator will write
# "import A". If "module" is "A[.B].C", the code generator will write
# "from A[.B] import C". Each entry in "widget_list" must be unique.
def moduleInformation():
return "PyQt4.Qsci", ("QsciScintilla", )<|fim▁end|>
| |
<|file_name|>filter.d.ts<|end_file_name|><|fim▁begin|>/**<|fim▁hole|>/**
* Filters a collection based on a filter function.
* Optionally a from & to key can be provided for partial filtering.
*
* @generator
*
* @param collection - Collection to filter.
* @param iteratee - Filter function.
* @param from - Key to start filtering.
* @param to - Key (exclusive) to stop filtering.
*
* @returns Array containing filtered values.
*/
export default function filter<T>(collection: Collection<T>, iteratee: (value, key, collection: Collection<T>) => boolean, from?: any, to?: any): any[];<|fim▁end|>
|
* Created by Rogier on 13/04/2017.
*/
import { Collection } from '../types';
|
<|file_name|>if_else_rob_brace.go<|end_file_name|><|fim▁begin|>/* If statements */
package main
func main() {
x := true
/* Robillard braces on else, no statement */
if x {<|fim▁hole|> }
}<|fim▁end|>
|
//Do nothing
} else
{
println(x)
|
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|>"""A simple example of Google Analytics batched user permissions."""
import json
from apiclient.errors import HttpError
from apiclient.http import BatchHttpRequest
def call_back(request_id, response, exception):
"""Handle batched request responses."""
print request_id
if exception is not None:
if isinstance(exception, HttpError):
message = json.loads(exception.content)['error']['message']
print ('Request %s returned API error : %s : %s ' %
(request_id, exception.resp.status, message))
else:
print response
def add_users(users, permissions):
"""Adds users to every view (profile) with the given permissions.
Args:
users: A list of user email addresses.
permissions: A list of user permissions.
Note: this code assumes you have MANAGE_USERS level permissions
to each profile and an authorized Google Analytics service object.
"""
# Get the a full set of account summaries.
account_summaries = analytics.management().accountSummaries().list().execute()
# Loop through each account.
for account in account_summaries.get('items', []):
account_id = account.get('id')
# Loop through each user.
for user in users:
# Create the BatchHttpRequest object.
batch = BatchHttpRequest(callback=call_back)
# Loop through each property.
for property_summary in account.get('webProperties', []):
property_id = property_summary.get('id')
# Loop through each view (profile).
for view in property_summary.get('profiles', []):
view_id = view.get('id')
# Construct the Profile User Link.
link = analytics.management().profileUserLinks().insert(
accountId=account_id,
webPropertyId=property_id,
profileId=view_id,
body={
'permissions': {
'local': permissions
},
'userRef': {
'email': user
}
}
)
batch.add(link)
# Execute the batch request for each user.<|fim▁hole|> # Construct a list of users.
emails = ['[email protected]', '[email protected]', '[email protected]', '[email protected]']
# call the add_users function with the list of desired permissions.
add_users(emails, ['READ_AND_ANALYZE'])<|fim▁end|>
|
batch.execute()
if __name__ == '__main__':
|
<|file_name|>EntityAIMoveIndoors.java<|end_file_name|><|fim▁begin|>package net.minecraft.entity.ai;
<|fim▁hole|>import net.minecraft.entity.EntityCreature;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.Vec3d;
import net.minecraft.village.Village;
import net.minecraft.village.VillageDoorInfo;
public class EntityAIMoveIndoors extends EntityAIBase
{
private final EntityCreature entityObj;
private VillageDoorInfo doorInfo;
private int insidePosX = -1;
private int insidePosZ = -1;
public EntityAIMoveIndoors(EntityCreature entityObjIn)
{
this.entityObj = entityObjIn;
this.setMutexBits(1);
}
/**
* Returns whether the EntityAIBase should begin execution.
*/
public boolean shouldExecute()
{
BlockPos blockpos = new BlockPos(this.entityObj);
if ((!this.entityObj.world.isDaytime() || this.entityObj.world.isRaining() && !this.entityObj.world.getBiome(blockpos).canRain()) && this.entityObj.world.provider.hasSkyLight())
{
if (this.entityObj.getRNG().nextInt(50) != 0)
{
return false;
}
else if (this.insidePosX != -1 && this.entityObj.getDistanceSq((double)this.insidePosX, this.entityObj.posY, (double)this.insidePosZ) < 4.0D)
{
return false;
}
else
{
Village village = this.entityObj.world.getVillageCollection().getNearestVillage(blockpos, 14);
if (village == null)
{
return false;
}
else
{
this.doorInfo = village.getDoorInfo(blockpos);
return this.doorInfo != null;
}
}
}
else
{
return false;
}
}
/**
* Returns whether an in-progress EntityAIBase should continue executing
*/
public boolean continueExecuting()
{
return !this.entityObj.getNavigator().noPath();
}
/**
* Execute a one shot task or start executing a continuous task
*/
public void startExecuting()
{
this.insidePosX = -1;
BlockPos blockpos = this.doorInfo.getInsideBlockPos();
int i = blockpos.getX();
int j = blockpos.getY();
int k = blockpos.getZ();
if (this.entityObj.getDistanceSq(blockpos) > 256.0D)
{
Vec3d vec3d = RandomPositionGenerator.findRandomTargetBlockTowards(this.entityObj, 14, 3, new Vec3d((double)i + 0.5D, (double)j, (double)k + 0.5D));
if (vec3d != null)
{
this.entityObj.getNavigator().tryMoveToXYZ(vec3d.xCoord, vec3d.yCoord, vec3d.zCoord, 1.0D);
}
}
else
{
this.entityObj.getNavigator().tryMoveToXYZ((double)i + 0.5D, (double)j, (double)k + 0.5D, 1.0D);
}
}
/**
* Resets the task
*/
public void resetTask()
{
this.insidePosX = this.doorInfo.getInsideBlockPos().getX();
this.insidePosZ = this.doorInfo.getInsideBlockPos().getZ();
this.doorInfo = null;
}
}<|fim▁end|>
| |
<|file_name|>step4.rs<|end_file_name|><|fim▁begin|>println!("11. Creating and storing CREDENTAIL DEFINITION using anoncreds as Trust Anchor, for the given Schema");
let config_json = r#"{ "support_revocation": false }"#;<|fim▁hole|>let tag = r#"TAG1"#;
let (_cred_def_id, _cred_def_json) = anoncreds::issuer_create_and_store_credential_def(wallet_handle, &trustee_did, &schema_json, tag, None, config_json).wait().unwrap();
// CLEAN UP
println!("12. Close and delete wallet");
indy::wallet::close_wallet(wallet_handle).wait().unwrap();
indy::wallet::delete_wallet(&config, USEFUL_CREDENTIALS).wait().unwrap();
println!("13. Close pool and delete pool ledger config");
pool::close_pool_ledger(pool_handle).wait().unwrap();
pool::delete_pool_ledger(&pool_name).wait().unwrap();<|fim▁end|>
| |
<|file_name|>control_manual.cpp<|end_file_name|><|fim▁begin|>#include "Sub.h"
// manual_init - initialise manual controller
bool Sub::manual_init()
{
// set target altitude to zero for reporting
pos_control.set_alt_target(0);
// attitude hold inputs become thrust inputs in manual mode
// set to neutral to prevent chaotic behavior (esp. roll/pitch)
set_neutral_controls();
return true;
}
// manual_run - runs the manual (passthrough) controller
// should be called at 100hz or more
void Sub::manual_run()
{
// if not armed set throttle to zero and exit immediately
if (!motors.armed()) {
motors.set_desired_spool_state(AP_Motors::DESIRED_GROUND_IDLE);
attitude_control.set_throttle_out_unstabilized(0,true,g.throttle_filt);
return;
}
<|fim▁hole|> motors.set_desired_spool_state(AP_Motors::DESIRED_THROTTLE_UNLIMITED);
motors.set_roll(channel_roll->norm_input());
motors.set_pitch(channel_pitch->norm_input());
motors.set_yaw(channel_yaw->norm_input() * g.acro_yaw_p / ACRO_YAW_P);
motors.set_throttle(channel_throttle->norm_input());
motors.set_forward(channel_forward->norm_input());
motors.set_lateral(channel_lateral->norm_input());
}<|fim▁end|>
| |
<|file_name|>thread.rs<|end_file_name|><|fim▁begin|>use std::thread::{Builder, JoinHandle};
/// Like `thread::spawn`, but with a `name` argument.
pub fn spawn_named<F, T, S>(name: S, f: F) -> JoinHandle<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,<|fim▁hole|> Builder::new().name(name.into()).spawn(f).expect("thread spawn works")
}<|fim▁end|>
|
S: Into<String>,
{
|
<|file_name|>commi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# -*- coding:utf-8 -*-
#
# Copyright (c) 2017 by Tsuyoshi Hamada. All rights reserved.
#
import os
import logging as LG
import random
import commands
import shelve
import pickle
import sys
import hashlib
import re as REGEXP
# -- set encode for your terminal --
config_term_encode = 'euc-jp'
# -- set filename for your database --
config_db_filename = '/t m p/g i t commit- '
def get_logger(str_position = ''):
log_basename = __file__
# Don't use Python's hasattr()
# unless you're writing Python 3-only code
# and understand how it works.
if getattr(get_logger, "__count_called", None) is not None:
log_basename = "%s @%s" % (__file__, str_position)
get_logger.__count_called = get_logger.__count_called + 1
'''
print "----------------- %d times called!!" % (get_logger.__count_called)<|fim▁hole|> '''
print "----------------- first time called!!"
'''
# create logger
logger = LG.getLogger(os.path.basename(log_basename))
logger.setLevel(LG.DEBUG)
# create console handler and set level to debug
ch = LG.StreamHandler()
ch.setLevel(LG.DEBUG)
# create formatter
formatter = LG.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
# 'application' code
## logger.debug('debug message')
## logger.info('info message')
## logger.warn('warn message')
## logger.error('error message')
## logger.critical('critical message')
return logger
def get_quotes():
result = [ ]
result.append(u"生きる -- 谷川俊太郎")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"それはのどがかわくということ")
result.append(u"木漏れ日がまぶしいということ")
result.append(u"ふっと或るメロディを思い出すということ")
result.append(u"くしゃみをすること")
result.append(u"あなたと手をつなぐこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"それはミニスカート")
result.append(u"それはプラネタリウム")
result.append(u"それはヨハン・シュトラウス")
result.append(u"それはピカソ")
result.append(u"それはアルプス")
result.append(u"すべての美しいものに出会うということ")
result.append(u"そして")
result.append(u"かくされた悪を注意深くこばむこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"泣けるということ")
result.append(u"笑えるということ")
result.append(u"怒れるということ")
result.append(u"自由ということ")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"いま遠くで犬が吠えるということ")
result.append(u"いま地球が廻っているということ")
result.append(u"いまどこかで産声があがるということ")
result.append(u"いまどこかで兵士が傷つくということ")
result.append(u"いまぶらんこがゆれているということ")
result.append(u"いまいまがすぎてゆくこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きてるということ")
result.append(u"鳥ははばたくということ")
result.append(u"海はとどろくということ")
result.append(u"かたつむりははうということ")
result.append(u"人は愛するということ")
result.append(u"あなたの手のぬくみ")
result.append(u"いのちということ")
result.append(u":-) ;-)")
return result
def get_shelve(fname, logger=None):
if logger is None: logger = get_logger('get_shelve()')
keyname = 'count'
pickle_protocol = pickle.HIGHEST_PROTOCOL
try :
dic = shelve.open(fname, protocol=pickle_protocol)
except Exception as e:
logger.error(e)
logger.error(fname)
sys.exit(-1)
keys = dic.keys()
if keyname not in keys: dic[keyname] = 0
count = dic[keyname]
dic[keyname] = count + 1
dic.close()
return count
def do_uncompress(filename, logger=None):
if logger is None: logger = get_logger('do_uncompress()')
check = commands.getoutput("hostname;time bzip2 -d %s.db.bz2" % filename )
# logger.debug("%s", check)
return True
def do_compress(filename, logger=None):
if logger is None: logger = get_logger('do_compress()')
check = commands.getoutput("hostname;time bzip2 -9 %s.db" % filename )
# logger.debug("%s", check)
return True
def get_id_git(logger=None):
if logger is None: logger = get_logger('get_id_git()')
check = commands.getoutput("git remote -v")
# logger.debug(check)
md5 = hashlib.md5()
md5.update(check)
md5sum = md5.hexdigest()
# logger.debug(md5sum)
return md5sum
def cut_space_str(str):
return REGEXP.sub(r' +', '', str)
if __name__ == "__main__":
msg = ''
logger = get_logger()
md5sum = get_id_git()
db_filename = cut_space_str(config_db_filename + md5sum)
do_uncompress(db_filename)
count = get_shelve(db_filename)
do_compress(db_filename)
qs = get_quotes()
msg = ("%d: %s" % (count+1, qs[count % len(qs)]))
logger.info('# %s', db_filename.encode(config_term_encode))
logger.info('# %s', msg.encode(config_term_encode))
cmd = 'git commit -m "' + msg + '"; git push origin master;'
print cmd.encode(config_term_encode)<|fim▁end|>
|
'''
else:
get_logger.__count_called = 1
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2011-2014 Swift Navigation Inc.
# Contact: Fergus Noble <[email protected]>
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
import base64
import struct
__version__ = "0.23"
SBP_PREAMBLE = 0x55
crc16_tab = [0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,
0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,
0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,
0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,
0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,
0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,
0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,
0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,
0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,
0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,
0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,
0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,
0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,
0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,
0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,
0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,
0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,
0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,
0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,
0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,
0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,
0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,
0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,
0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,
0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,
0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,
0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,
0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,
0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,
0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,
0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,
0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0]
def crc16(s, crc=0):
"""CRC16 implementation acording to CCITT standards.
"""
for ch in s:
crc = ((crc<<8)&0xFFFF) ^ crc16_tab[ ((crc>>8)&0xFF) ^ (ord(ch)&0xFF) ]
crc &= 0xFFFF
return crc
class SBP(object):
"""Swift Binary Protocol container.
"""
def __init__(self, msg_type=None, sender=None,
length=None, payload=None, crc=None):
self.preamble = SBP_PREAMBLE
self.msg_type = msg_type
self.sender = sender
self.length = length
self.payload = payload
self.crc = crc
def __eq__(self, other):
return self.__dict__ == other.__dict__
def pack(self):
"""Pack to framed binary message.
"""
framed_msg = struct.pack('<BHHB',
self.preamble,
self.msg_type,
self.sender,
len(self.payload))
framed_msg += self.payload
crc = crc16(framed_msg[1:], 0)
framed_msg += struct.pack('<H', crc)
return framed_msg
def __repr__(self):
p = (self.preamble, self.msg_type, self.sender, self.length,
self.payload, self.crc)
fmt = "<SBP (preamble=0x%X, msg_type=0x%X, sender=%s, length=%d, payload=%s, crc=0x%X)>"
return fmt % p
@staticmethod
def from_json_dict(data):
msg_type = data['msg_type']
sender = data['sender']
length = data['length']
payload = base64.standard_b64decode(data['payload'])
crc = data['crc']
return SBP(msg_type, sender, length, payload, crc)
def to_json_dict(self):
return {'preamble': self.preamble,
'msg_type': self.msg_type,
'sender': self.sender,
'length': self.length,
'payload': base64.standard_b64encode(self.payload),<|fim▁hole|> 'crc': self.crc}<|fim▁end|>
| |
<|file_name|>api.cpp<|end_file_name|><|fim▁begin|>#include "game/network/api.hpp"
namespace Game {
namespace Network {
Api::Api(QObject *parent) : QObject(parent) {
// Manager to send REST petitions
manager = new QNetworkAccessManager(this);
result = "";
limit = 5;
// URL information
QString host = Game::Settings::load("network:api:host").toString();
QString port = Game::Settings::load("network:api:port").toString();
base_url = QString("http://%1:%2").arg(host, port);
// Signals && Slots
connect(manager, SIGNAL(finished(QNetworkReply*)), this, SLOT(requestResult(QNetworkReply*)));
}
void Api::uploadScore(QString name, int score) {
// Name of resource
QString resource = "/games";
// Create a JSON object
QtJson::JsonObject json;
json["name"] = name;
json["score"] = score;
json["timestamp"] = QString::number(QDateTime::currentDateTimeUtc().toTime_t());
// Serialize object
QByteArray data = QtJson::serialize(json);
// Send post petition
postMethod(resource, data);
}
void Api::getHighscores(int limit) {
// Set up limit of top highscores
this->limit = limit;
// Name of resource
QString resource = QString("/topscores");
// Send get petition
getMethod(resource);
// Emit signal with response
QTimer *timer = new QTimer(this);
timer->setSingleShot(true);
connect(timer, SIGNAL(timeout()), this, SLOT(parseHighscores()));
timer->start(max_timeout);
}
void Api::requestResult(QNetworkReply *reply) {<|fim▁hole|> if (reply->error() != QNetworkReply::NoError) {
result = "";
return;
}
result = (QString)reply->readAll();
}
void Api::parseHighscores() {
if(result.isEmpty()) {
return;
}
// Deserialize JSON data
bool ok;
QList<QVariant> data = QtJson::parse(result, ok).toList();
if (!ok) {
return;
}
// Store top highscores
QVector<QVector<QString> > highscores;
for (int i = 0; i < limit; i++) {
QVector<QString> element;
element << data[i].toMap()["name"].toString();
element << data[i].toMap()["score"].toString();
highscores << element;
}
emit topHighscores(highscores);
}
void Api::getMethod(QString resource) {
QUrl url(base_url.append(resource));
QNetworkRequest request(url);
manager->get(request);
}
void Api::postMethod(QString resource, QByteArray data) {
QUrl url(base_url.append(resource));
QNetworkRequest request(url);
// Specify a JSON object is sent
request.setHeader(QNetworkRequest::ContentTypeHeader, "application/json");
manager->post(request, data);
}
} // namespace Network
} // namespace Game<|fim▁end|>
|
// Error response
|
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::*;
use core::hash::Streaming;
use core::hashmap::linear::LinearMap;
use rustc::driver::{driver, session};
use rustc::metadata::filesearch;
use std::getopts::groups::getopts;
use std::semver;
use std::{json, term, sort, getopts};
use syntax::ast_util::*;
use syntax::codemap::{dummy_sp, spanned};
use syntax::ext::base::{mk_ctxt, ext_ctxt};
use syntax::ext::build;
use syntax::{ast, attr, codemap, diagnostic, fold};
pub struct Package {
id: ~str,
vers: semver::Version,
bins: ~[~str],
libs: ~[~str],
}
pub fn root() -> Path {
match filesearch::get_rustpkg_root() {
result::Ok(path) => path,
result::Err(err) => fail!(err)
}
}
pub fn is_cmd(cmd: ~str) -> bool {
let cmds = &[~"build", ~"clean", ~"do", ~"info", ~"install", ~"prefer",
~"test", ~"uninstall", ~"unprefer"];
vec::contains(cmds, &cmd)
}
pub fn parse_name(id: ~str) -> result::Result<~str, ~str> {
let mut last_part = None;
for str::each_split_char(id, '.') |part| {
for str::each_char(part) |char| {
if char::is_whitespace(char) {
return result::Err(
~"could not parse id: contains whitespace");
} else if char::is_uppercase(char) {
return result::Err(
~"could not parse id: should be all lowercase");
}
}
last_part = Some(part.to_owned());
}
if last_part.is_none() { return result::Err(~"could not parse id: is empty"); }
result::Ok(last_part.unwrap())
}
struct ListenerFn {
cmds: ~[~str],
span: codemap::span,
path: ~[ast::ident]
}
struct ReadyCtx {
sess: session::Session,
crate: @ast::crate,
ext_cx: @ext_ctxt,
path: ~[ast::ident],
fns: ~[ListenerFn]
}
fn fold_mod(_ctx: @mut ReadyCtx,
m: &ast::_mod,
fold: @fold::ast_fold) -> ast::_mod {
fn strip_main(item: @ast::item) -> @ast::item {
@ast::item {
attrs: do item.attrs.filtered |attr| {
*attr::get_attr_name(attr) != ~"main"
},
.. copy *item
}
}
fold::noop_fold_mod(&ast::_mod {
items: do m.items.map |item| {
strip_main(*item)
},
.. copy *m
}, fold)
}
fn fold_item(ctx: @mut ReadyCtx,
item: @ast::item,
fold: @fold::ast_fold) -> Option<@ast::item> {
ctx.path.push(item.ident);
let attrs = attr::find_attrs_by_name(item.attrs, ~"pkg_do");
if attrs.len() > 0 {
let mut cmds = ~[];
for attrs.each |attr| {
match attr.node.value.node {
ast::meta_list(_, mis) => {
for mis.each |mi| {
match mi.node {
ast::meta_word(cmd) => cmds.push(copy *cmd),
_ => {}
};
}
}
_ => cmds.push(~"build")
};
}
ctx.fns.push(ListenerFn {
cmds: cmds,
span: item.span,
path: /*bad*/copy ctx.path
});
}
let res = fold::noop_fold_item(item, fold);
ctx.path.pop();
res
}
fn add_pkg_module(ctx: @mut ReadyCtx, m: ast::_mod) -> ast::_mod {
let listeners = mk_listener_vec(ctx);
let ext_cx = ctx.ext_cx;
let item = quote_item! (
mod __pkg {
extern mod rustpkg (vers="0.6");
static listeners : &[rustpkg::Listener] = $listeners;<|fim▁hole|> rustpkg::run(listeners);
}
}
);
ast::_mod {
items: vec::append_one(/*bad*/copy m.items, item.get()),
.. m
}
}
fn mk_listener_vec(ctx: @mut ReadyCtx) -> @ast::expr {
let fns = ctx.fns;
let descs = do fns.map |listener| {
mk_listener_rec(ctx, *listener)
};
let ext_cx = ctx.ext_cx;
build::mk_slice_vec_e(ext_cx, dummy_sp(), descs)
}
fn mk_listener_rec(ctx: @mut ReadyCtx, listener: ListenerFn) -> @ast::expr {
let span = listener.span;
let cmds = do listener.cmds.map |&cmd| {
let ext_cx = ctx.ext_cx;
build::mk_base_str(ext_cx, span, cmd)
};
let ext_cx = ctx.ext_cx;
let cmds_expr = build::mk_slice_vec_e(ext_cx, span, cmds);
let cb_expr = build::mk_path(ext_cx, span, copy listener.path);
quote_expr!(
Listener {
cmds: $cmds_expr,
cb: $cb_expr
}
)
}
/// Generate/filter main function, add the list of commands, etc.
pub fn ready_crate(sess: session::Session,
crate: @ast::crate) -> @ast::crate {
let ctx = @mut ReadyCtx {
sess: sess,
crate: crate,
ext_cx: mk_ctxt(sess.parse_sess, copy sess.opts.cfg),
path: ~[],
fns: ~[]
};
let precursor = @fold::AstFoldFns {
// fold_crate: fold::wrap(|a, b| fold_crate(ctx, a, b)),
fold_item: |a, b| fold_item(ctx, a, b),
fold_mod: |a, b| fold_mod(ctx, a, b),
.. *fold::default_ast_fold()
};
let fold = fold::make_fold(precursor);
@fold.fold_crate(crate)
}
pub fn parse_vers(vers: ~str) -> result::Result<semver::Version, ~str> {
match semver::parse(vers) {
Some(vers) => result::Ok(vers),
None => result::Err(~"could not parse version: invalid")
}
}
pub fn need_dir(s: &Path) {
if !os::path_is_dir(s) && !os::make_dir(s, 493_i32) {
fail!(fmt!("can't create dir: %s", s.to_str()));
}
}
pub fn note(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_green);
out.write_str(~"note: ");
term::reset(out);
out.write_line(msg);
} else {
out.write_line(~"note: " + msg);
}
}
pub fn warn(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_yellow);
out.write_str(~"warning: ");
term::reset(out);
out.write_line(msg);
} else {
out.write_line(~"warning: " + msg);
}
}
pub fn error(msg: ~str) {
let out = io::stdout();
if term::color_supported() {
term::fg(out, term::color_red);
out.write_str(~"error: ");
term::reset(out);
out.write_line(msg);
} else {
out.write_line(~"error: " + msg);
}
}
pub fn hash(data: ~str) -> ~str {
let hasher = &hash::default_state();
hasher.write_str(data);
hasher.result_str()
}
pub fn temp_change_dir<T>(dir: &Path, cb: &fn() -> T) {
let cwd = os::getcwd();
os::change_dir(dir);
cb();
os::change_dir(&cwd);
}
pub fn touch(path: &Path) {
match io::mk_file_writer(path, ~[io::Create]) {
result::Ok(writer) => writer.write_line(~""),
_ => {}
}
}
pub fn remove_dir_r(path: &Path) {
for os::walk_dir(path) |&file| {
let mut cdir = file;
loop {
if os::path_is_dir(&cdir) {
os::remove_dir(&cdir);
} else {
os::remove_file(&cdir);
}
cdir = cdir.dir_path();
if cdir == *path { break; }
}
}
os::remove_dir(path);
}
pub fn wait_for_lock(path: &Path) {
if os::path_exists(path) {
warn(fmt!("the database appears locked, please wait (or rm %s)",
path.to_str()));
loop {
if !os::path_exists(path) { break; }
}
}
}
fn _add_pkg(packages: ~[json::Json], pkg: &Package) -> ~[json::Json] {
for packages.each |&package| {
match &package {
&json::Object(ref map) => {
let mut has_id = false;
match map.get(&~"id") {
&json::String(ref str) => {
if pkg.id == *str {
has_id = true;
}
}
_ => {}
}
match map.get(&~"vers") {
&json::String(ref str) => {
if has_id && pkg.vers.to_str() == *str {
return copy packages;
}
}
_ => {}
}
}
_ => {}
}
}
let mut map = ~LinearMap::new();
map.insert(~"id", json::String(pkg.id));
map.insert(~"vers", json::String(pkg.vers.to_str()));
map.insert(~"bins", json::List(do pkg.bins.map |&bin| {
json::String(bin)
}));
map.insert(~"libs", json::List(do pkg.libs.map |&lib| {
json::String(lib)
}));
vec::append(packages, ~[json::Object(map)])
}
fn _rm_pkg(packages: ~[json::Json], pkg: &Package) -> ~[json::Json] {
do packages.filter_mapped |&package| {
match &package {
&json::Object(ref map) => {
let mut has_id = false;
match map.get(&~"id") {
&json::String(str) => {
if pkg.id == str {
has_id = true;
}
}
_ => {}
}
match map.get(&~"vers") {
&json::String(ref str) => {
if has_id && pkg.vers.to_str() == *str {
None
} else {
Some(copy package)
}
}
_ => { Some(copy package) }
}
}
_ => { Some(copy package) }
}
}
}
pub fn load_pkgs() -> result::Result<~[json::Json], ~str> {
let root = root();
let db = root.push(~"db.json");
let db_lock = root.push(~"db.json.lck");
wait_for_lock(&db_lock);
touch(&db_lock);
let packages = if os::path_exists(&db) {
match io::read_whole_file_str(&db) {
result::Ok(str) => {
match json::from_str(str) {
result::Ok(json) => {
match json {
json::List(list) => list,
_ => {
os::remove_file(&db_lock);
return result::Err(
~"package db's json is not a list");
}
}
}
result::Err(err) => {
os::remove_file(&db_lock);
return result::Err(
fmt!("failed to parse package db: %s",
err.to_str()));
}
}
}
result::Err(err) => {
os::remove_file(&db_lock);
return result::Err(fmt!("failed to read package db: %s",
err));
}
}
} else { ~[] };
os::remove_file(&db_lock);
result::Ok(packages)
}
pub fn get_pkg(id: ~str,
vers: Option<~str>) -> result::Result<Package, ~str> {
let name = match parse_name(id) {
result::Ok(name) => name,
result::Err(err) => return result::Err(err)
};
let packages = match load_pkgs() {
result::Ok(packages) => packages,
result::Err(err) => return result::Err(err)
};
let mut sel = None;
let mut possibs = ~[];
let mut err = None;
for packages.each |&package| {
match package {
json::Object(map) => {
let pid = match map.get(&~"id") {
&json::String(str) => str,
_ => loop
};
let pname = match parse_name(pid) {
result::Ok(pname) => pname,
result::Err(perr) => {
err = Some(perr);
break;
}
};
let pvers = match map.get(&~"vers") {
&json::String(str) => str,
_ => loop
};
if pid == id || pname == name {
let bins = match map.get(&~"bins") {
&json::List(ref list) => {
do list.map |&bin| {
match bin {
json::String(str) => str,
_ => ~""
}
}
}
_ => ~[]
};
let libs = match map.get(&~"libs") {
&json::List(ref list) => {
do list.map |&lib| {
match lib {
json::String(str) => str,
_ => ~""
}
}
}
_ => ~[]
};
let package = Package {
id: pid,
vers: match parse_vers(pvers) {
result::Ok(vers) => vers,
result::Err(verr) => {
err = Some(verr);
break;
}
},
bins: bins,
libs: libs
};
if !vers.is_none() && vers.get() == pvers {
sel = Some(package);
}
else {
possibs.push(package);
}
}
}
_ => {}
}
}
if !err.is_none() {
return result::Err(err.get());
}
if !sel.is_none() {
return result::Ok(sel.get());
}
if !vers.is_none() || possibs.len() < 1 {
return result::Err(~"package not found");
}
let possibs = sort::merge_sort(possibs, |v1, v2| {
v1.vers <= v2.vers
});
result::Ok(copy *possibs.last())
}
pub fn add_pkg(pkg: &Package) -> bool {
let root = root();
let db = root.push(~"db.json");
let db_lock = root.push(~"db.json.lck");
let packages = match load_pkgs() {
result::Ok(packages) => packages,
result::Err(err) => {
error(err);
return false;
}
};
wait_for_lock(&db_lock);
touch(&db_lock);
os::remove_file(&db);
match io::mk_file_writer(&db, ~[io::Create]) {
result::Ok(writer) => {
writer.write_line(json::to_pretty_str(&json::List(
_add_pkg(packages, pkg))));
}
result::Err(err) => {
error(fmt!("failed to dump package db: %s", err));
os::remove_file(&db_lock);
return false;
}
}
os::remove_file(&db_lock);
true
}
pub fn remove_pkg(pkg: &Package) -> bool {
let root = root();
let db = root.push(~"db.json");
let db_lock = root.push(~"db.json.lck");
let packages = match load_pkgs() {
result::Ok(packages) => packages,
result::Err(err) => {
error(err);
return false;
}
};
wait_for_lock(&db_lock);
touch(&db_lock);
os::remove_file(&db);
match io::mk_file_writer(&db, ~[io::Create]) {
result::Ok(writer) => {
writer.write_line(json::to_pretty_str(&json::List(
_rm_pkg(packages, pkg))));
}
result::Err(err) => {
error(fmt!("failed to dump package db: %s", err));
os::remove_file(&db_lock);
return false;
}
}
os::remove_file(&db_lock);
true
}
pub fn compile_input(sysroot: Option<Path>, input: driver::input, dir: &Path,
flags: ~[~str], cfgs: ~[~str], opt: bool, test: bool) -> bool {
let lib_dir = dir.push(~"lib");
let bin_dir = dir.push(~"bin");
let test_dir = dir.push(~"test");
let binary = os::args()[0];
let matches = getopts(flags, driver::optgroups()).get();
let options = @session::options {
crate_type: session::unknown_crate,
optimize: if opt { session::Aggressive } else { session::No },
test: test,
maybe_sysroot: sysroot,
.. *driver::build_session_options(binary, &matches, diagnostic::emit)
};
let mut crate_cfg = options.cfg;
for cfgs.each |&cfg| {
crate_cfg.push(attr::mk_word_item(@cfg));
}
let options = @session::options {
cfg: vec::append(options.cfg, crate_cfg),
.. *options
};
let sess = driver::build_session(options, diagnostic::emit);
let cfg = driver::build_configuration(sess, binary, input);
let mut outputs = driver::build_output_filenames(input, &None, &None,
sess);
let (crate, _) = driver::compile_upto(sess, cfg, input, driver::cu_parse,
Some(outputs));
let mut name = None;
let mut vers = None;
let mut uuid = None;
let mut crate_type = None;
fn load_link_attr(mis: ~[@ast::meta_item]) -> (Option<~str>,
Option<~str>,
Option<~str>) {
let mut name = None;
let mut vers = None;
let mut uuid = None;
for mis.each |a| {
match a.node {
ast::meta_name_value(v, spanned {node: ast::lit_str(s),
span: _}) => {
match *v {
~"name" => name = Some(*s),
~"vers" => vers = Some(*s),
~"uuid" => uuid = Some(*s),
_ => { }
}
}
_ => {}
}
}
(name, vers, uuid)
}
for crate.node.attrs.each |a| {
match a.node.value.node {
ast::meta_name_value(v, spanned {node: ast::lit_str(s),
span: _}) => {
match *v {
~"crate_type" => crate_type = Some(*s),
_ => {}
}
}
ast::meta_list(v, mis) => {
match *v {
~"link" => {
let (n, v, u) = load_link_attr(mis);
name = n;
vers = v;
uuid = u;
}
_ => {}
}
}
_ => {}
}
}
if name.is_none() || vers.is_none() || uuid.is_none() {
error(~"link attr without (name, vers, uuid) values");
return false;
}
let name = name.get();
let vers = vers.get();
let uuid = uuid.get();
let is_bin = match crate_type {
Some(crate_type) => {
match crate_type {
~"bin" => true,
~"lib" => false,
_ => {
warn(~"unknown crate_type, falling back to lib");
false
}
}
}
None => {
warn(~"missing crate_type attr, assuming lib");
false
}
};
if test {
need_dir(&test_dir);
outputs = driver::build_output_filenames(input, &Some(test_dir),
&None, sess)
}
else if is_bin {
need_dir(&bin_dir);
let path = bin_dir.push(fmt!("%s-%s-%s%s", name,
hash(name + uuid + vers),
vers, exe_suffix()));
outputs = driver::build_output_filenames(input, &None, &Some(path),
sess);
} else {
need_dir(&lib_dir);
outputs = driver::build_output_filenames(input, &Some(lib_dir),
&None, sess)
}
driver::compile_rest(sess, cfg, driver::cu_everything,
Some(outputs), Some(crate));
true
}
#[cfg(windows)]
pub fn exe_suffix() -> ~str { ~".exe" }
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "macos")]
pub fn exe_suffix() -> ~str { ~"" }
// FIXME (#4432): Use workcache to only compile when needed
pub fn compile_crate(sysroot: Option<Path>, crate: &Path, dir: &Path,
flags: ~[~str], cfgs: ~[~str], opt: bool,
test: bool) -> bool {
compile_input(sysroot, driver::file_input(*crate), dir, flags, cfgs,
opt, test)
}
pub fn compile_str(sysroot: Option<Path>, code: ~str, dir: &Path,
flags: ~[~str], cfgs: ~[~str], opt: bool,
test: bool) -> bool {
compile_input(sysroot, driver::str_input(code), dir, flags, cfgs,
opt, test)
}
#[cfg(windows)]
pub fn link_exe(_src: &Path, _dest: &Path) -> bool {
/* FIXME (#1768): Investigate how to do this on win32
Node wraps symlinks by having a .bat,
but that won't work with minGW. */
false
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
#[cfg(target_os = "macos")]
pub fn link_exe(src: &Path, dest: &Path) -> bool {
unsafe {
do str::as_c_str(src.to_str()) |src_buf| {
do str::as_c_str(dest.to_str()) |dest_buf| {
libc::link(src_buf, dest_buf) == 0 as libc::c_int &&
libc::chmod(dest_buf, 755) == 0 as libc::c_int
}
}
}
}
#[test]
fn test_is_cmd() {
assert!(is_cmd(~"build"));
assert!(is_cmd(~"clean"));
assert!(is_cmd(~"do"));
assert!(is_cmd(~"info"));
assert!(is_cmd(~"install"));
assert!(is_cmd(~"prefer"));
assert!(is_cmd(~"test"));
assert!(is_cmd(~"uninstall"));
assert!(is_cmd(~"unprefer"));
}
#[test]
fn test_parse_name() {
assert!(parse_name(~"org.mozilla.servo").get() == ~"servo");
assert!(parse_name(~"org. mozilla.servo 2131").is_err());
}<|fim▁end|>
|
#[main]
fn main() {
|
<|file_name|>XmlResultParser.UnitTests.cpp<|end_file_name|><|fim▁begin|>/*
DISKSPD
Copyright(c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#include "StdAfx.h"
#include "XmlResultParser.UnitTests.h"
#include "Common.h"
#include "xmlresultparser.h"
#include <stdlib.h>
#include <vector><|fim▁hole|>using namespace std;
namespace UnitTests
{
void XmlResultParserUnitTests::Test_ParseResults()
{
Profile profile;
TimeSpan timeSpan;
Target target;
XmlResultParser parser;
Results results;
results.fUseETW = false;
double fTime = 120.0;
results.ullTimeCount = PerfTimer::SecondsToPerfTime(fTime);
// First group has 1 core
SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION systemProcessorInfo = {};
systemProcessorInfo.UserTime.QuadPart = static_cast<LONGLONG>(fTime * 30 * 100000);
systemProcessorInfo.IdleTime.QuadPart = static_cast<LONGLONG>(fTime * 45 * 100000);
systemProcessorInfo.KernelTime.QuadPart = static_cast<LONGLONG>(fTime * 70 * 100000);
results.vSystemProcessorPerfInfo.push_back(systemProcessorInfo);
// Second group has a maximum of 4 cores with 2 active
SYSTEM_PROCESSOR_PERFORMANCE_INFORMATION zeroSystemProcessorInfo = { 0 };
zeroSystemProcessorInfo.UserTime.QuadPart = static_cast<LONGLONG>(fTime * 0 * 100000);
zeroSystemProcessorInfo.IdleTime.QuadPart = static_cast<LONGLONG>(fTime * 100 * 100000);
zeroSystemProcessorInfo.KernelTime.QuadPart = static_cast<LONGLONG>(fTime * 100 * 100000);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
results.vSystemProcessorPerfInfo.push_back(zeroSystemProcessorInfo);
// TODO: multiple target cases, full profile/result variations
target.SetPath("testfile1.dat");
target.SetCacheMode(TargetCacheMode::DisableOSCache);
target.SetWriteThroughMode(WriteThroughMode::On);
target.SetThroughputIOPS(1000);
timeSpan.AddTarget(target);
timeSpan.SetCalculateIopsStdDev(true);
TargetResults targetResults;
targetResults.sPath = "testfile1.dat";
targetResults.ullFileSize = 10 * 1024 * 1024;
targetResults.ullReadBytesCount = 4 * 1024 * 1024;
targetResults.ullReadIOCount = 6;
targetResults.ullWriteBytesCount = 2 * 1024 * 1024;
targetResults.ullWriteIOCount = 10;
targetResults.ullBytesCount = targetResults.ullReadBytesCount + targetResults.ullWriteBytesCount;
targetResults.ullIOCount = targetResults.ullReadIOCount + targetResults.ullWriteIOCount;
// TODO: Histogram<float> readLatencyHistogram;
// TODO: Histogram<float> writeLatencyHistogram;
// TODO: IoBucketizer writeBucketizer;
targetResults.readBucketizer.Initialize(1000, timeSpan.GetDuration());
for (size_t i = 0; i < timeSpan.GetDuration(); i++)
{
// add an io halfway through the bucket's time interval
targetResults.readBucketizer.Add(i*1000 + 500, 0);
}
ThreadResults threadResults;
threadResults.vTargetResults.push_back(targetResults);
results.vThreadResults.push_back(threadResults);
vector<Results> vResults;
vResults.push_back(results);
// just throw away the computername and reset the timestamp - for the ut, it's
// as useful (and simpler) to verify statics as anything else. Reconstruct
// processor topo to a fixed example as well.
SystemInformation system;
system.ResetTime();
system.sComputerName.clear();
system.processorTopology._ulProcCount = 5;
system.processorTopology._ulActiveProcCount = 3;
system.processorTopology._vProcessorGroupInformation.clear();
system.processorTopology._vProcessorGroupInformation.emplace_back((BYTE)1, (BYTE)1, (WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorGroupInformation.emplace_back((BYTE)4, (BYTE)2, (WORD)1, (KAFFINITY)0x6);
system.processorTopology._vProcessorNumaInformation.clear();
system.processorTopology._vProcessorNumaInformation.emplace_back((DWORD)0, (WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorNumaInformation.emplace_back((DWORD)1, (WORD)1, (KAFFINITY)0x6);
ProcessorSocketInformation socket;
socket._vProcessorMasks.emplace_back((WORD)0, (KAFFINITY)0x1);
socket._vProcessorMasks.emplace_back((WORD)1, (KAFFINITY)0x6);
system.processorTopology._vProcessorSocketInformation.clear();
system.processorTopology._vProcessorSocketInformation.push_back(socket);
system.processorTopology._vProcessorHyperThreadInformation.clear();
system.processorTopology._vProcessorHyperThreadInformation.emplace_back((WORD)0, (KAFFINITY)0x1);
system.processorTopology._vProcessorHyperThreadInformation.emplace_back((WORD)1, (KAFFINITY)0x6);
// finally, add the timespan to the profile and dump.
profile.AddTimeSpan(timeSpan);
string sResults = parser.ParseResults(profile, system, vResults);
// stringify random text, quoting "'s and adding newline/preserving tabs
// gc some.txt |% { write-host $("`"{0}\n`"" -f $($_ -replace "`"","\`"" -replace "`t","\t")) }
const char *pcszExpectedOutput = \
"<Results>\n"
" <System>\n"
" <ComputerName></ComputerName>\n"
" <Tool>\n"
" <Version>" DISKSPD_NUMERIC_VERSION_STRING "</Version>\n"
" <VersionDate>" DISKSPD_DATE_VERSION_STRING "</VersionDate>\n"
" </Tool>\n"
" <RunTime></RunTime>\n"
" <ProcessorTopology>\n"
" <Group Group=\"0\" MaximumProcessors=\"1\" ActiveProcessors=\"1\" ActiveProcessorMask=\"0x1\"/>\n"
" <Group Group=\"1\" MaximumProcessors=\"4\" ActiveProcessors=\"2\" ActiveProcessorMask=\"0x6\"/>\n"
" <Node Node=\"0\" Group=\"0\" Processors=\"0x1\"/>\n"
" <Node Node=\"1\" Group=\"1\" Processors=\"0x6\"/>\n"
" <Socket>\n"
" <Group Group=\"0\" Processors=\"0x1\"/>\n"
" <Group Group=\"1\" Processors=\"0x6\"/>\n"
" </Socket>\n"
" <HyperThread Group=\"0\" Processors=\"0x1\"/>\n"
" <HyperThread Group=\"1\" Processors=\"0x6\"/>\n"
" </ProcessorTopology>\n"
" </System>\n"
" <Profile>\n"
" <Progress>0</Progress>\n"
" <ResultFormat>text</ResultFormat>\n"
" <Verbose>false</Verbose>\n"
" <TimeSpans>\n"
" <TimeSpan>\n"
" <CompletionRoutines>false</CompletionRoutines>\n"
" <MeasureLatency>false</MeasureLatency>\n"
" <CalculateIopsStdDev>true</CalculateIopsStdDev>\n"
" <DisableAffinity>false</DisableAffinity>\n"
" <Duration>10</Duration>\n"
" <Warmup>5</Warmup>\n"
" <Cooldown>0</Cooldown>\n"
" <ThreadCount>0</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <IoBucketDuration>1000</IoBucketDuration>\n"
" <RandSeed>0</RandSeed>\n"
" <Targets>\n"
" <Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <DisableOSCache>true</DisableOSCache>\n"
" <WriteThrough>true</WriteThrough>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput unit=\"IOPS\">1000</Throughput>\n"
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
" </Target>\n"
" </Targets>\n"
" </TimeSpan>\n"
" </TimeSpans>\n"
" </Profile>\n"
" <TimeSpan>\n"
" <TestTimeSeconds>120.00</TestTimeSeconds>\n"
" <ThreadCount>1</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <ProcCount>3</ProcCount>\n"
" <CpuUtilization>\n"
" <CPU>\n"
" <Group>0</Group>\n"
" <Id>0</Id>\n"
" <UsagePercent>55.00</UsagePercent>\n"
" <UserPercent>30.00</UserPercent>\n"
" <KernelPercent>25.00</KernelPercent>\n"
" <IdlePercent>45.00</IdlePercent>\n"
" </CPU>\n"
" <CPU>\n"
" <Group>1</Group>\n"
" <Id>1</Id>\n"
" <UsagePercent>0.00</UsagePercent>\n"
" <UserPercent>0.00</UserPercent>\n"
" <KernelPercent>0.00</KernelPercent>\n"
" <IdlePercent>100.00</IdlePercent>\n"
" </CPU>\n"
" <CPU>\n"
" <Group>1</Group>\n"
" <Id>2</Id>\n"
" <UsagePercent>0.00</UsagePercent>\n"
" <UserPercent>0.00</UserPercent>\n"
" <KernelPercent>0.00</KernelPercent>\n"
" <IdlePercent>100.00</IdlePercent>\n"
" </CPU>\n"
" <Average>\n"
" <UsagePercent>18.33</UsagePercent>\n"
" <UserPercent>10.00</UserPercent>\n"
" <KernelPercent>8.33</KernelPercent>\n"
" <IdlePercent>81.67</IdlePercent>\n"
" </Average>\n"
" </CpuUtilization>\n"
" <Iops>\n"
" <ReadIopsStdDev>0.000</ReadIopsStdDev>\n"
" <IopsStdDev>0.000</IopsStdDev>\n"
" <Bucket SampleMillisecond=\"1000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"2000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"3000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"4000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"5000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"6000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"7000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"8000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"9000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"10000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" </Iops>\n"
" <Thread>\n"
" <Id>0</Id>\n"
" <Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BytesCount>6291456</BytesCount>\n"
" <FileSize>10485760</FileSize>\n"
" <IOCount>16</IOCount>\n"
" <ReadBytes>4194304</ReadBytes>\n"
" <ReadCount>6</ReadCount>\n"
" <WriteBytes>2097152</WriteBytes>\n"
" <WriteCount>10</WriteCount>\n"
" <Iops>\n"
" <ReadIopsStdDev>0.000</ReadIopsStdDev>\n"
" <IopsStdDev>0.000</IopsStdDev>\n"
" <Bucket SampleMillisecond=\"1000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"2000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"3000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"4000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"5000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"6000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"7000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"8000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"9000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" <Bucket SampleMillisecond=\"10000\" Read=\"1\" Write=\"0\" Total=\"1\" ReadMinLatencyMilliseconds=\"0.000\" ReadMaxLatencyMilliseconds=\"0.000\" ReadAvgLatencyMilliseconds=\"0.000\" ReadLatencyStdDev=\"0.000\" WriteMinLatencyMilliseconds=\"0.000\" WriteMaxLatencyMilliseconds=\"0.000\" WriteAvgLatencyMilliseconds=\"0.000\" WriteLatencyStdDev=\"0.000\"/>\n"
" </Iops>\n"
" </Target>\n"
" </Thread>\n"
" </TimeSpan>\n"
"</Results>";
#if 0
HANDLE h;
DWORD written;
h = CreateFileW(L"g:\\xmlresult-received.txt", GENERIC_WRITE, FILE_SHARE_READ, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
WriteFile(h, sResults.c_str(), (DWORD)sResults.length(), &written, NULL);
VERIFY_ARE_EQUAL(sResults.length(), written);
CloseHandle(h);
h = CreateFileW(L"g:\\xmlresult-expected.txt", GENERIC_WRITE, FILE_SHARE_READ, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
WriteFile(h, pcszExpectedOutput, (DWORD)strlen(pcszExpectedOutput), &written, NULL);
VERIFY_ARE_EQUAL((DWORD)strlen(pcszExpectedOutput), written);
CloseHandle(h);
printf("--\n%s\n", sResults.c_str());
printf("-------------------------------------------------\n");
printf("--\n%s\n", pcszExpectedOutput);
#endif
VERIFY_ARE_EQUAL(0, strcmp(sResults.c_str(), pcszExpectedOutput));
}
void XmlResultParserUnitTests::Test_ParseProfile()
{
Profile profile;
XmlResultParser parser;
TimeSpan timeSpan;
Target target;
timeSpan.AddTarget(target);
profile.AddTimeSpan(timeSpan);
string s = parser.ParseProfile(profile);
const char *pcszExpectedOutput = "<Profile>\n"
" <Progress>0</Progress>\n"
" <ResultFormat>text</ResultFormat>\n"
" <Verbose>false</Verbose>\n"
" <TimeSpans>\n"
" <TimeSpan>\n"
" <CompletionRoutines>false</CompletionRoutines>\n"
" <MeasureLatency>false</MeasureLatency>\n"
" <CalculateIopsStdDev>false</CalculateIopsStdDev>\n"
" <DisableAffinity>false</DisableAffinity>\n"
" <Duration>10</Duration>\n"
" <Warmup>5</Warmup>\n"
" <Cooldown>0</Cooldown>\n"
" <ThreadCount>0</ThreadCount>\n"
" <RequestCount>0</RequestCount>\n"
" <IoBucketDuration>1000</IoBucketDuration>\n"
" <RandSeed>0</RandSeed>\n"
" <Targets>\n"
" <Target>\n"
" <Path></Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput>0</Throughput>\n"
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
" </Target>\n"
" </Targets>\n"
" </TimeSpan>\n"
" </TimeSpans>\n"
"</Profile>\n";
//VERIFY_ARE_EQUAL(pcszExpectedOutput, s.c_str());
VERIFY_ARE_EQUAL(strlen(pcszExpectedOutput), s.length());
VERIFY_IS_TRUE(!strcmp(pcszExpectedOutput, s.c_str()));
}
void XmlResultParserUnitTests::Test_ParseTargetProfile()
{
Target target;
string sResults;
char pszExpectedOutput[4096];
int nWritten;
const char *pcszOutputTemplate = \
"<Target>\n"
" <Path>testfile1.dat</Path>\n"
" <BlockSize>65536</BlockSize>\n"
" <BaseFileOffset>0</BaseFileOffset>\n"
" <SequentialScan>false</SequentialScan>\n"
" <RandomAccess>false</RandomAccess>\n"
" <TemporaryFile>false</TemporaryFile>\n"
" <UseLargePages>false</UseLargePages>\n"
" <DisableOSCache>true</DisableOSCache>\n"
" <WriteThrough>true</WriteThrough>\n"
" <WriteBufferContent>\n"
" <Pattern>sequential</Pattern>\n"
" </WriteBufferContent>\n"
" <ParallelAsyncIO>false</ParallelAsyncIO>\n"
" <StrideSize>65536</StrideSize>\n"
" <InterlockedSequential>false</InterlockedSequential>\n"
" <ThreadStride>0</ThreadStride>\n"
" <MaxFileSize>0</MaxFileSize>\n"
" <RequestCount>2</RequestCount>\n"
" <WriteRatio>0</WriteRatio>\n"
" <Throughput%s>%s</Throughput>\n" // 2 param
" <ThreadsPerFile>1</ThreadsPerFile>\n"
" <IOPriority>3</IOPriority>\n"
" <Weight>1</Weight>\n"
"</Target>\n";
target.SetPath("testfile1.dat");
target.SetCacheMode(TargetCacheMode::DisableOSCache);
target.SetWriteThroughMode(WriteThroughMode::On);
// Base case - no limit
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, "", "0");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
// IOPS - with units
target.SetThroughputIOPS(1000);
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, " unit=\"IOPS\"", "1000");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
// BPMS - not specified with units in output
target.SetThroughput(1000);
nWritten = sprintf_s(pszExpectedOutput, sizeof(pszExpectedOutput),
pcszOutputTemplate, "", "1000");
VERIFY_IS_GREATER_THAN(nWritten, 0);
sResults = target.GetXml(0);
VERIFY_ARE_EQUAL(sResults, pszExpectedOutput);
}
}<|fim▁end|>
|
using namespace WEX::TestExecution;
using namespace WEX::Logging;
|
<|file_name|>rectification_geometry.py<|end_file_name|><|fim▁begin|>################################################################################
# #
# Copyright (C) 2010,2011,2012,2013,2014, 2015,2016 The ESPResSo project #
# #
# This file is part of ESPResSo. #
# #
# ESPResSo is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# ESPResSo is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
# #
# Active Matter: Rectification System Setup #
# #
################################################################################
from __future__ import print_function
from math import cos, pi, sin
import numpy as np
import os
import sys
from espressomd import assert_features, lb
from espressomd.lbboundaries import LBBoundary
from espressomd.shapes import Cylinder, Wall, HollowCone
assert_features(["LB_GPU","LB_BOUNDARIES_GPU"])
# Setup constants
outdir = "./RESULTS_RECTIFICATION_GEOMETRY/"
try:
os.makedirs(outdir)
except:
print("INFO: Directory \"{}\" exists".format(outdir))
# Setup the box (we pad the diameter to ensure that the LB boundaries
# and therefore the constraints, are away from the edge of the box)
length = 100
diameter = 20
dt = 0.01
# Setup the MD parameters
system = espressomd.System(box_l=[length, dieameter+4, diameter+4])
system.cell_system.skin = 0.1
system.time_step = dt
system.min_global_cut = 0.5
# Setup LB parameters (these are irrelevant here) and fluid
agrid = 1
vskin = 0.1
frict = 20.0
<|fim▁hole|>visco = 1.0
densi = 1.0
lbf = lb.LBFluidGPU(agrid=agrid, dens=densi, visc=visco, tau=dt, fric=frict)
system.actors.add(lbf)
################################################################################
#
# Now we set up the three LB boundaries that form the rectifying geometry.
# The cylinder boundary/constraint is actually already capped, but we put
# in two planes for safety's sake. If you want to create an cylinder of
# 'infinite length' using the periodic boundaries, then the cylinder must
# extend over the boundary.
#
################################################################################
# Setup cylinder
cylinder = LBBoundary(shape=Cylinder(center=[length/2.0, (diameter+4)/2.0, (diameter+4)/2.0],
axis=[1,0,0],
radius=diameter/2.0,
length=length,
direction=-1))
system.lbboundaries.add(cylinder)
# Setup walls
wall = LBBoundary(shape=Wall(dist=2, normal=[1,0,0]))
system.lbboundaries.add(wall)
wall = LBBoundary(shape=Wall(dist=-(length - 2), normal=[-1,0,0]))
system.lbboundaries.add(wall)
# Setup cone
irad = 4.0
angle = pi/4.0
orad = (diameter - irad)/sin(angle)
shift = 0.25*orad*cos(angle)
hollow_cone = LBBoundary(shape=HollowCone(position_x=length/2.0 - shift,
position_y=(diameter+4)/2.0,
position_z=(diameter+4)/2.0,
orientation_x=1,
orientation_y=0,
orientation_z=0,
outer_radius=orad,
inner_radius=irad,
width=2.0,
opening_angle=angle,
direction=1))
system.lbboundaries.add(hollow_cone)
################################################################################
# Output the geometry
lbf.print_vtk_boundary("{}/boundary.vtk".format(outdir))
################################################################################<|fim▁end|>
| |
<|file_name|>ExcludeInternalRepoByRegexTest.java<|end_file_name|><|fim▁begin|>/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.indyrepositorymanager;
import org.apache.commons.io.IOUtils;
import org.commonjava.indy.client.core.Indy;
import org.commonjava.indy.client.core.util.UrlUtils;
import org.commonjava.indy.model.core.Group;
import org.commonjava.indy.model.core.RemoteRepository;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.indy.model.core.StoreType;
import org.jboss.pnc.enums.RepositoryType;
import org.jboss.pnc.indyrepositorymanager.fixture.TestBuildExecution;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.spi.repositorymanager.BuildExecution;
import org.jboss.pnc.spi.repositorymanager.RepositoryManagerResult;
import org.jboss.pnc.spi.repositorymanager.model.RepositorySession;
import org.jboss.pnc.test.category.ContainerTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.commonjava.indy.pkg.maven.model.MavenPackageTypeDescriptor.MAVEN_PKG_KEY;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.PUBLIC_GROUP_ID;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.SHARED_IMPORTS_ID;
@Category(ContainerTest.class)
public class ExcludeInternalRepoByRegexTest extends AbstractImportTest {
private static final String INTERNAL = "internal";
private static final String EXTERNAL = "external";
@Override
protected List<String> getIgnoredRepoPatterns() {<|fim▁hole|> List<String> result = new ArrayList<>();
result.add("maven:.+:in.+");
return result;
}
@Test
public void extractBuildArtifacts_ContainsTwoDownloads() throws Exception {
// create a remote repo pointing at our server fixture's 'repo/test' directory.
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, INTERNAL, server.formatUrl(INTERNAL)),
"Creating internal test remote repo",
RemoteRepository.class);
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, EXTERNAL, server.formatUrl(EXTERNAL)),
"Creating external test remote repo",
RemoteRepository.class);
StoreKey publicKey = new StoreKey(MAVEN_PKG_KEY, StoreType.group, PUBLIC_GROUP_ID);
StoreKey internalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, INTERNAL);
StoreKey externalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, EXTERNAL);
Group publicGroup = indy.stores().load(publicKey, Group.class);
if (publicGroup == null) {
publicGroup = new Group(MAVEN_PKG_KEY, PUBLIC_GROUP_ID, internalKey, externalKey);
indy.stores().create(publicGroup, "creating public group", Group.class);
} else {
publicGroup.setConstituents(Arrays.asList(internalKey, externalKey));
indy.stores().update(publicGroup, "adding test remotes to public group");
}
String internalPath = "org/foo/internal/1.0/internal-1.0.pom";
String externalPath = "org/foo/external/1.1/external-1.1.pom";
String content = "This is a test " + System.currentTimeMillis();
// setup the expectation that the remote repo pointing at this server will request this file...and define its
// content.
server.expect(server.formatUrl(INTERNAL, internalPath), 200, content);
server.expect(server.formatUrl(EXTERNAL, externalPath), 200, content);
// create a dummy non-chained build execution and repo session based on it
BuildExecution execution = new TestBuildExecution();
RepositorySession rc = driver.createBuildRepository(
execution,
accessToken,
accessToken,
RepositoryType.MAVEN,
Collections.emptyMap(),
false);
assertThat(rc, notNullValue());
String baseUrl = rc.getConnectionInfo().getDependencyUrl();
// download the two files via the repo session's dependency URL, which will proxy the test http server
// using the expectations above
assertThat(download(UrlUtils.buildUrl(baseUrl, internalPath)), equalTo(content));
assertThat(download(UrlUtils.buildUrl(baseUrl, externalPath)), equalTo(content));
// extract the build artifacts, which should contain the two imported deps.
// This will also trigger promoting imported artifacts into the shared-imports hosted repo
RepositoryManagerResult repositoryManagerResult = rc.extractBuildArtifacts(true);
List<Artifact> deps = repositoryManagerResult.getDependencies();
System.out.println(deps);
assertThat(deps, notNullValue());
assertThat(deps.size(), equalTo(2));
Indy indy = driver.getIndy(accessToken);
StoreKey sharedImportsKey = new StoreKey(MAVEN_PKG_KEY, StoreType.hosted, SHARED_IMPORTS_ID);
// check that the imports from external locations are available from shared-imports
InputStream stream = indy.content().get(sharedImportsKey, externalPath);
String downloaded = IOUtils.toString(stream, (String) null);
assertThat(downloaded, equalTo(content));
stream.close();
// check that the imports from internal/trusted locations are NOT available from shared-imports
stream = indy.content().get(sharedImportsKey, internalPath);
assertThat(stream, nullValue());
}
}<|fim▁end|>
| |
<|file_name|>angle.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Computed angles.<|fim▁hole|>use euclid::Radians;
use std::{f32, f64, fmt};
use std::f64::consts::PI;
use style_traits::ToCss;
use values::CSSFloat;
use values::animated::{Animate, Procedure};
use values::distance::{ComputeSquaredDistance, SquaredDistance};
/// A computed angle.
#[animate(fallback = "Self::animate_fallback")]
#[cfg_attr(feature = "servo", derive(HeapSizeOf, Deserialize, Serialize))]
#[derive(Animate, Clone, Copy, Debug, PartialEq)]
#[derive(PartialOrd, ToAnimatedZero)]
pub enum Angle {
/// An angle with degree unit.
Degree(CSSFloat),
/// An angle with gradian unit.
Gradian(CSSFloat),
/// An angle with radian unit.
Radian(CSSFloat),
/// An angle with turn unit.
Turn(CSSFloat),
}
impl Angle {
/// Creates a computed `Angle` value from a radian amount.
pub fn from_radians(radians: CSSFloat) -> Self {
Angle::Radian(radians)
}
/// Returns the amount of radians this angle represents.
#[inline]
pub fn radians(&self) -> CSSFloat {
self.radians64().min(f32::MAX as f64).max(f32::MIN as f64) as f32
}
/// Returns the amount of radians this angle represents as a `f64`.
///
/// Gecko stores angles as singles, but does this computation using doubles.
/// See nsCSSValue::GetAngleValueInRadians.
/// This is significant enough to mess up rounding to the nearest
/// quarter-turn for 225 degrees, for example.
#[inline]
pub fn radians64(&self) -> f64 {
const RAD_PER_DEG: f64 = PI / 180.0;
const RAD_PER_GRAD: f64 = PI / 200.0;
const RAD_PER_TURN: f64 = PI * 2.0;
let radians = match *self {
Angle::Degree(val) => val as f64 * RAD_PER_DEG,
Angle::Gradian(val) => val as f64 * RAD_PER_GRAD,
Angle::Turn(val) => val as f64 * RAD_PER_TURN,
Angle::Radian(val) => val as f64,
};
radians.min(f64::MAX).max(f64::MIN)
}
/// Returns an angle that represents a rotation of zero radians.
pub fn zero() -> Self {
Angle::Radian(0.0)
}
/// https://drafts.csswg.org/css-transitions/#animtype-number
#[inline]
fn animate_fallback(&self, other: &Self, procedure: Procedure) -> Result<Self, ()> {
Ok(Angle::from_radians(self.radians().animate(&other.radians(), procedure)?))
}
}
impl ComputeSquaredDistance for Angle {
#[inline]
fn compute_squared_distance(&self, other: &Self) -> Result<SquaredDistance, ()> {
// Use the formula for calculating the distance between angles defined in SVG:
// https://www.w3.org/TR/SVG/animate.html#complexDistances
self.radians64().compute_squared_distance(&other.radians64())
}
}
impl ToCss for Angle {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result
where
W: fmt::Write,
{
let mut write = |value: CSSFloat, unit: &str| {
value.to_css(dest)?;
dest.write_str(unit)
};
match *self {
Angle::Degree(val) => write(val, "deg"),
Angle::Gradian(val) => write(val, "grad"),
Angle::Radian(val) => write(val, "rad"),
Angle::Turn(val) => write(val, "turn"),
}
}
}
impl From<Angle> for Radians<CSSFloat> {
#[inline]
fn from(a: Angle) -> Self {
Radians::new(a.radians())
}
}<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|><|fim▁hole|> self.name = obj[u'full_name']
def __getattr__(self, attr):
if attr in self.__dict__:
return getattr(self, attr)
else:
return getattr(self._wrapped_obj, attr)<|fim▁end|>
|
class Repository(object):
def __init__(self, obj):
self._wrapped_obj = obj
self.language = obj[u'language'] or u'unknown'
|
<|file_name|>pushy_left.js<|end_file_name|><|fim▁begin|>/*! Pushy - v0.9.1 - 2013-9-16
* Pushy is a responsive off-canvas navigation menu using CSS transforms & transitions.
* https://github.com/christophery/pushy/
* by Christopher Yee */
$(window).load(function () {
var e = false;
if (/Android|webOS|iPhone|iPad|iPod|BlackBerry/i.test(navigator.userAgent)) {
e = true
}
if (e == false) {
menuBtn = $('.menu-btn-left') //css classes to toggle the menu
} else {
menuBtn = $('.menu-btn-left, .pushy a') //css classes to toggle the menu
}
$(function() {
var pushy = $('.pushy'), //menu css class
body = $('body'),
container = $('#wrapper'), //container css class
container2 = $('section#home'), //container css class
push = $('.push-left'), //css class to add pushy capability
siteOverlay = $('.site-overlay'), //site overlay
pushyClass = "pushy-left pushy-open-left", //menu position & menu open class
pushyActiveClass = "pushy-active", //css class to toggle site overlay
containerClass = "container-push-left", //container open class
pushClass = "push-push-left", //css class to add pushy capability
//menuBtn = $('.menu-btn-left'), //css classes to toggle the menu
menuSpeed = 200, //jQuery fallback menu speed
menuWidth = pushy.width() + "px"; //jQuery fallback menu width
function togglePushy(){
body.toggleClass(pushyActiveClass); //toggle site overlay
pushy.toggleClass(pushyClass);
container.toggleClass(containerClass);
container2.toggleClass(containerClass);
push.toggleClass(pushClass); //css class to add pushy capability
}
function openPushyFallback(){
body.addClass(pushyActiveClass);
pushy.animate({left: "0px"}, menuSpeed);
container.animate({left: menuWidth}, menuSpeed);
push.animate({left: menuWidth}, menuSpeed); //css class to add pushy capability
}
function closePushyFallback(){
body.removeClass(pushyActiveClass);
pushy.animate({left: "-" + menuWidth}, menuSpeed);
container.animate({left: "0px"}, menuSpeed);
push.animate({left: "0px"}, menuSpeed); //css class to add pushy capability<|fim▁hole|> menuBtn.click(function() {
togglePushy();
});
//close menu when clicking site overlay
siteOverlay.click(function(){
togglePushy();
});
}else{
//jQuery fallback
pushy.css({left: "-" + menuWidth}); //hide menu by default
container.css({"overflow-x": "hidden"}); //fixes IE scrollbar issue
//keep track of menu state (open/close)
var state = true;
//toggle menu
menuBtn.click(function() {
if (state) {
openPushyFallback();
state = false;
} else {
closePushyFallback();
state = true;
}
});
//close menu when clicking site overlay
siteOverlay.click(function(){
if (state) {
openPushyFallback();
state = false;
} else {
closePushyFallback();
state = true;
}
});
}
});
});<|fim▁end|>
|
}
if(Modernizr.csstransforms3d){
//toggle menu
|
<|file_name|>BakedTextRender.java<|end_file_name|><|fim▁begin|>package alexiil.mc.mod.load.baked.render;
import org.lwjgl.opengl.GL11;
import net.minecraft.client.gui.FontRenderer;
import alexiil.mc.mod.load.render.MinecraftDisplayerRenderer;
import buildcraft.lib.expression.api.IExpressionNode.INodeDouble;
import buildcraft.lib.expression.api.IExpressionNode.INodeLong;
import buildcraft.lib.expression.node.value.NodeVariableDouble;
import buildcraft.lib.expression.node.value.NodeVariableObject;
public abstract class BakedTextRender extends BakedRenderPositioned {
protected final NodeVariableObject<String> varText;
protected final INodeDouble scale;
protected final INodeDouble x;
protected final INodeDouble y;
protected final INodeLong colour;
protected final String fontTexture;
private String _text;
private double _scale;
private double _width;
private long _colour;
private double _x, _y;
public BakedTextRender(
NodeVariableObject<String> varText, NodeVariableDouble varWidth, NodeVariableDouble varHeight,
INodeDouble scale, INodeDouble x, INodeDouble y, INodeLong colour, String fontTexture
) {
super(varWidth, varHeight);
this.varText = varText;
this.scale = scale;
this.x = x;
this.y = y;
this.colour = colour;
this.fontTexture = fontTexture;
}
@Override
public void evaluateVariables(MinecraftDisplayerRenderer renderer) {
_text = getText();
_scale = scale.evaluate();
FontRenderer font = renderer.fontRenderer(fontTexture);
_width = (int) (font.getStringWidth(_text) * _scale);
varWidth.value = _width;
varHeight.value = font.FONT_HEIGHT * _scale;
_x = x.evaluate();
_y = y.evaluate();
_colour = colour.evaluate();
if ((_colour & 0xFF_00_00_00) == 0) {
_colour |= 0xFF_00_00_00;
} else if ((_colour & 0xFF_00_00_00) == 0x01_00_00_00) {
_colour &= 0xFF_FF_FF;
}
}
@Override
public void render(MinecraftDisplayerRenderer renderer) {
FontRenderer font = renderer.fontRenderer(fontTexture);
GL11.glPushMatrix();
GL11.glTranslated(_x, _y, 0);
GL11.glScaled(_scale, _scale, _scale);
font.drawString(_text, 0, 0, (int) _colour, false);
GL11.glPopMatrix();
GL11.glColor4f(1, 1, 1, 1);
}
<|fim▁hole|> return fontTexture;
}
}<|fim▁end|>
|
public abstract String getText();
@Override
public String getLocation() {
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Simple library for working with N-dimensional arrays (where N=2)
#![crate_name="arrays"]
#![warn(missing_docs)]
<|fim▁hole|><|fim▁end|>
|
/// Two dimensional array module
pub mod array2d;
// re-export
pub use array2d::Array2D;
|
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|>import logging
import traceback
import sys
from celery import Celery
from .callbacks import STATUS_LOADING_DATA
from .config import get_engine, _set_connection_string
from .loader import FDPLoader
from .callbacks import do_request, STATUS_INITIALIZING, STATUS_FAIL, STATUS_DONE
app = Celery('fdp_loader')
app.config_from_object('babbage_fiscal.celeryconfig')
root = logging.getLogger()
root.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stderr)
ch.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
class ProgressSender(object):
def __init__(self, callback, package):
self.count = 0
self.callback = callback
self.package = package
self.error = None
def __call__(self, status=STATUS_LOADING_DATA, count=None, data=None, error=None):
if error is not None:
self.error = error
if count is None:
count = self.count
else:
self.count = count
logging.info('CALLBACK: %s %s (%s / %s)',
'/'.join(self.package.split('/')[4:]),
status, count, error)
do_request(self.callback, self.package, status,
progress=count, error=error, data=data)
@app.task
def load_fdp_task(package, callback, connection_string=None):
send_progress = ProgressSender(callback, package)
if connection_string is not None:
_set_connection_string(connection_string)
try:<|fim▁hole|>
except:
exc = traceback.format_exc()
send_progress(status=STATUS_FAIL, error=str(exc))
success = False
print("Failed to load %s: %s" % (package, exc))
if not success:
raise RuntimeError(send_progress.error)<|fim▁end|>
|
logging.info("Starting to load %s" % package)
send_progress(status=STATUS_INITIALIZING)
success = FDPLoader(get_engine()).load_fdp_to_db(package, send_progress)
logging.info("Finished to load %s" % package)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.