prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>nlpserver.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import socket
import nlp
class NLPServer(object):
def __init__(self, ip, port):
self.sock = socket.socket()
self.sock.bind((ip, port))
self.processor = nlp.NLPProcessor()
print "Established Server"
def listen(self):
import thread
self.sock.listen(5)
print "Started listening at port."
while True:
c = self.sock.accept()
cli_sock, cli_addr = c
<๏ฝfimโhole๏ฝ> thread.start_new_thread(self.manageRequest, (cli_sock,))
except Exception, Argument:
print Argument
self.sock.close()
quit()
def manageRequest(self, cli_sock):
data = cli_sock.recv(8192)
result = self.processor.processQuestion(data)
cli_sock.send(str(result))
cli_sock.close()
# server = NLPServer('127.0.0.1', 3369)
import sys
server = NLPServer(str(sys.argv[1]), int(sys.argv[2]))
server.listen()<๏ฝfimโend๏ฝ> | try:
print 'Got connection from', cli_addr |
<|file_name|>serializers.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from django.apps import apps
from rest_framework.serializers import SlugRelatedField, ModelSerializer, ValidationError
from rest_framework_gis.serializers import GeoFeatureModelSerializer
from rest_polymorphic.serializers import PolymorphicSerializer
from taxonomy.models import Community, Taxon
from occurrence.models import (
AreaEncounter,
TaxonAreaEncounter,
CommunityAreaEncounter,
Landform,
RockType,
SoilType,
SoilColour,
Drainage,
SurveyMethod,
SoilCondition,
CountAccuracy,
CountMethod,
CountSubject,
PlantCondition,
DetectionMethod,
Confidence,
ReproductiveMaturity,
AnimalHealth,
AnimalSex,
CauseOfDeath,
SecondarySigns,
SampleType,
SampleDestination,
PermitType,
ObservationGroup,
FireHistory,
FileAttachment,
PlantCount,
AssociatedSpecies,
VegetationClassification,
HabitatComposition,
HabitatCondition,
AreaAssessment,
PhysicalSample,
AnimalObservation,
)
class OccurrenceAreaEncounterPolySerializer(GeoFeatureModelSerializer):
"""Serializer for Occurrence AreaEncounter.
"""
class Meta:
model = AreaEncounter
fields = (
"id", "code", "label", "name", "description", "as_html", "source", "source_id", "status",
"encountered_on", "encountered_by", "area_type", "accuracy", "northern_extent",
)
geo_field = "geom"
class OccurrenceAreaEncounterPointSerializer(GeoFeatureModelSerializer):
"""Serializer for Occurrence Area.
"""
class Meta:
model = AreaEncounter
fields = (
"id", "code", "label", "name", "description", "as_html", "source", "source_id", "status",
"encountered_on", "encountered_by", "area_type", "accuracy", "northern_extent",
)
geo_field = "point"
class OccurrenceTaxonAreaEncounterPolyInlineSerializer(GeoFeatureModelSerializer):
"""Serializer for Occurrence TaxonAreaEncounter to be used inline in TaxonSerializer.
"""
class Meta:
exclude = ("taxon", )
model = TaxonAreaEncounter
id_field = "id"
geo_field = "geom"
class OccurrenceTaxonAreaEncounterPolySerializer(GeoFeatureModelSerializer):
"""Serializer for Occurrence TaxonAreaEncounter.
"""
taxon = SlugRelatedField(
queryset=Taxon.objects.all(), slug_field="name_id")
class Meta:
model = TaxonAreaEncounter
fields = (
"taxon",
"id",
"code",
"label",
"name",
"description",
"as_html",
"source",
"source_id",
"status",
"encounter_type",
"encountered_on",
"encountered_by",
"area_type",
"geolocation_capture_method",
"accuracy",
"northern_extent",
"point"
)
id_field = "id"
geo_field = "geom"
class OccurrenceTaxonAreaEncounterPointSerializer(OccurrenceTaxonAreaEncounterPolySerializer):
"""Serializer for Occurrence TaxonAreaEncounter.
"""
class Meta(OccurrenceTaxonAreaEncounterPolySerializer.Meta):
geo_field = "point"
class OccurrenceCommunityAreaEncounterPolyInlineSerializer(GeoFeatureModelSerializer):
"""Serializer for Occurrence CommunityAreaEncounter to be used inline in CommunitySerializer.
"""
class Meta:
model = CommunityAreaEncounter
exclude = ("community", )
id_field = "id"
geo_field = "geom"
class OccurrenceCommunityAreaEncounterPolySerializer(GeoFeatureModelSerializer):
community = SlugRelatedField(
queryset=Community.objects.all(), slug_field="code")
class Meta:
model = CommunityAreaEncounter
fields = (
"community", "id", "code", "label", "name", "description", "as_html", "source", "source_id",
"status", "encountered_on", "encountered_by", "area_type", "accuracy", "northern_extent",
"point",
)
id_field = "id"
geo_field = "geom"
class OccurrenceCommunityAreaEncounterPointSerializer(OccurrenceCommunityAreaEncounterPolySerializer):
community = SlugRelatedField(
queryset=Community.objects.all(), slug_field="code")
class Meta(OccurrenceCommunityAreaEncounterPolySerializer.Meta):
geo_field = "point"
class LandformSerializer(ModelSerializer):
class Meta:
model = Landform
fields = "__all__"
class RockTypeSerializer(ModelSerializer):
class Meta:
model = RockType
fields = "__all__"
class SoilTypeSerializer(ModelSerializer):
class Meta:
model = SoilType
fields = "__all__"
class SoilColourSerializer(ModelSerializer):
class Meta:
model = SoilColour
fields = "__all__"
class DrainageSerializer(ModelSerializer):
class Meta:
model = Drainage
fields = "__all__"
class SurveyMethodSerializer(ModelSerializer):
class Meta:
model = SurveyMethod
fields = "__all__"
class SoilConditionSerializer(ModelSerializer):
class Meta:
model = SoilCondition
fields = "__all__"
class CountAccuracySerializer(ModelSerializer):
class Meta:
model = CountAccuracy
fields = "__all__"
<๏ฝfimโhole๏ฝ> class Meta:
model = CountMethod
fields = "__all__"
class CountSubjectSerializer(ModelSerializer):
class Meta:
model = CountSubject
fields = "__all__"
class PlantConditionSerializer(ModelSerializer):
class Meta:
model = PlantCondition
fields = "__all__"
class DetectionMethodSerializer(ModelSerializer):
class Meta:
model = DetectionMethod
fields = "__all__"
class ConfidenceSerializer(ModelSerializer):
class Meta:
model = Confidence
fields = "__all__"
class ReproductiveMaturitySerializer(ModelSerializer):
class Meta:
model = ReproductiveMaturity
fields = "__all__"
class AnimalHealthSerializer(ModelSerializer):
class Meta:
model = AnimalHealth
fields = "__all__"
class AnimalSexSerializer(ModelSerializer):
class Meta:
model = AnimalSex
fields = "__all__"
class CauseOfDeathSerializer(ModelSerializer):
class Meta:
model = CauseOfDeath
fields = "__all__"
class SecondarySignsSerializer(ModelSerializer):
class Meta:
model = SecondarySigns
fields = "__all__"
class SampleTypeSerializer(ModelSerializer):
class Meta:
model = SampleType
fields = "__all__"
class SampleDestinationSerializer(ModelSerializer):
class Meta:
model = SampleDestination
fields = "__all__"
class PermitTypeSerializer(ModelSerializer):
class Meta:
model = PermitType
fields = "__all__"
class ObservationGroupSerializer(ModelSerializer):
"""The ObservationGroup serializer resolves its polymorphic subclasses.
ObservationGroups have polymorphic subclasses.
A plain DRF serializer would simply return the shared ObservationGroup
fields, but not the individual fields partial to its subclasses.
Overriding the `to_representation` method, this serializer tests the
object to display for its real instance, and calls the `to_representation`
from the subclasses serializer.
`Credits <http://stackoverflow.com/a/19976203/2813717>`_
`Author <http://stackoverflow.com/users/1514427/michael-van-de-waeter>`_
"""
# as_latex = ReadOnlyField()
encounter = OccurrenceAreaEncounterPointSerializer(read_only=True)
class Meta:
model = ObservationGroup
fields = "__all__"
def validate(self, data):
"""Raise ValidateError on missing AreaEncounter(source, source_id).
"""
if not AreaEncounter.objects.filter(
source=int(self.initial_data["source"]),
source_id=str(self.initial_data["source_id"])
).exists():
raise ValidationError(
"AreaEncounter with source {0} and source_id {1}"
" does not exist, skipping.".format(
int(self.initial_data["source"]),
str(self.initial_data["source_id"])))
return data
def create(self, validated_data):
"""Create one new object, resolve AreaEncounter from source and source_id.
"""
validated_data["encounter"] = AreaEncounter.objects.get(
source=int(self.initial_data["source"]),
source_id=str(self.initial_data["source_id"]))
return self.Meta.model.objects.create(**validated_data)
class FileAttachmentSerializer(ObservationGroupSerializer):
class Meta:
model = FileAttachment
fields = "__all__"
class HabitatCompositionSerializer(ObservationGroupSerializer):
class Meta:
model = HabitatComposition
fields = "__all__"
class HabitatConditionSerializer(ObservationGroupSerializer):
class Meta:
model = HabitatCondition
fields = "__all__"
class AreaAssessmentSerializer(ObservationGroupSerializer):
class Meta:
model = AreaAssessment
fields = "__all__"
class FireHistorySerializer(ObservationGroupSerializer):
class Meta:
model = FireHistory
fields = "__all__"
class VegetationClassificationSerializer(ObservationGroupSerializer):
class Meta:
model = VegetationClassification
fields = "__all__"
class PlantCountSerializer(ObservationGroupSerializer):
count_method = SlugRelatedField(
queryset=CountMethod.objects.all(), slug_field='code', required=False)
count_accuracy = SlugRelatedField(
queryset=CountAccuracy.objects.all(), slug_field='code', required=False)
class Meta:
model = PlantCount
fields = "__all__"
class AssociatedSpeciesSerializer(ObservationGroupSerializer):
class Meta:
model = AssociatedSpecies
fields = "__all__"
class AnimalObservationSerializer(ObservationGroupSerializer):
class Meta:
model = AnimalObservation
fields = "__all__"
def to_internal_value(self, data):
"""Override to_internal_value and check the value of the optional `secondary_signs` key.
This key value might be present in a couple of different ways, which all need to be handled:
- /api/path/?secondary_signs=eggs
- /api/path/?secondary_signs=eggs,fur
- /api/path/?secondary_signs=eggs&secondary_signs=fur
We also need to convert comma-separated strings into a list of PKs for the equivalent
SecondarySign objects, for the purposes of setting M2M relationships.
References:
- https://www.django-rest-framework.org/api-guide/serializers/#read-write-baseserializer-classes
- https://stackoverflow.com/questions/31281938/overriding-django-rest-framework-serializer-is-valid-method
"""
data_update = dict(data)
if 'secondary_signs' in data_update:
# I.e. ['eggs,fur'] instead of ['eggs', 'fur']
if len(data_update['secondary_signs']) == 1:
data_update['secondary_signs'] = data_update[
'secondary_signs'][0].split(',')
# Change secondary_signs from a comma-separated list of strings
# into a list of PKs.
data_update['secondary_signs'] = [
SecondarySigns.objects.get(
code=i).pk for i in data_update['secondary_signs']]
return super(AnimalObservationSerializer, self).to_internal_value(data_update)
return super(AnimalObservationSerializer, self).to_internal_value(data)
def create(self, validated_data):
"""Create new object, resolve AreaEncounter from source and source_id.
"""
validated_data["encounter"] = AreaEncounter.objects.get(
source=int(self.initial_data["source"]),
source_id=str(self.initial_data["source_id"]))
# Pop the secondary_signs list out of validated data so that we can use set() after creating the new object
# because we can't make the M2M link before the object exists.
# At this point, it should be a list of PKs.
secondary_signs = validated_data.pop(
'secondary_signs') if 'secondary_signs' in validated_data else []
obj = self.Meta.model.objects.create(**validated_data)
if secondary_signs:
obj.secondary_signs.add(*secondary_signs)
return obj
class PhysicalSampleSerializer(ObservationGroupSerializer):
sample_type = SlugRelatedField(
queryset=SampleType.objects.all(), slug_field="code", required=False, allow_null=True)
sample_destination = SlugRelatedField(
queryset=SampleDestination.objects.all(), slug_field="code", required=False, allow_null=True)
permit_type = SlugRelatedField(
queryset=PermitType.objects.all(), slug_field='code', required=False, allow_null=True)
class Meta:
model = PhysicalSample
fields = "__all__"
class ObservationGroupPolymorphicSerializer(PolymorphicSerializer):
"""Polymorphic seralizer for ObservationGroup.
https://github.com/apirobot/django-rest-polymorphic
https://django-polymorphic.readthedocs.io/en/stable/third-party.html#django-rest-framework-support
"""
model_serializer_mapping = {
ObservationGroup: ObservationGroupSerializer,
FireHistory: FireHistorySerializer,
FileAttachment: FileAttachmentSerializer,
PlantCount: PlantCountSerializer,
AssociatedSpecies: AssociatedSpeciesSerializer,
VegetationClassification: VegetationClassificationSerializer,
HabitatCondition: HabitatConditionSerializer,
AreaAssessment: AreaAssessmentSerializer,
HabitatComposition: HabitatCompositionSerializer,
PhysicalSample: PhysicalSampleSerializer,
AnimalObservation: AnimalObservationSerializer
}
resource_type_field_name = 'obstype'
def to_internal_value(self, data):
"""Gate checks for data sanity."""
return super(ObservationGroupPolymorphicSerializer, self).to_internal_value(data)<๏ฝfimโend๏ฝ> | class CountMethodSerializer(ModelSerializer):
|
<|file_name|>index.d.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>// Type definitions for jui-core 2.0
// Project: https://github.com/juijs/jui-core
// Definitions by: JinHo Park <https://github.com/easylogic>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="jquery"/>
export const jui: JuiStatic;
export interface UtilBase {
/**
* @property browser check browser agent
* @property {Boolean} browser.webkit Webkit ๋ธ๋ผ์ฐ์ ์ฒดํฌ
* @property {Boolean} browser.mozilla Mozilla ๋ธ๋ผ์ฐ์ ์ฒดํฌ
* @property {Boolean} browser.msie IE ๋ธ๋ผ์ฐ์ ์ฒดํฌ
*/
browser: {
webkit: boolean,
mozilla: boolean,
msie: boolean
};
/**
* @property {Boolean} isTouch
* check touch device
*/
isTouch: boolean;
/**
* @method inherit
*
* ํ๋กํ ํ์
๊ธฐ๋ฐ์ ์์ ์ ๊ณต
*
* @param {Function} ctor base Class
* @param {Function} superCtor super Class
*/
inherit(ctor: ((...args: any[]) => any), superCtor: ((...args: any[]) => any)): void;
/**
* @method extend
*
* implements object extend
*
* @param {Object|Function} origin
* @param {Object|Function} add
* @param {Boolean} skip
* @return {Object}
*/
extend(origin: any, add: any, skip: boolean): any;
/**
* convert px to integer
* @param {String or Number} px
* @return {Number}
*/
pxToInt(px: string | number): number;
/**
* @method clone
* implements object clone
* @param {Array/Object} obj ๋ณต์ฌํ ๊ฐ์ฒด
* @return {Array}
*/
clone(obj: any): any[];
/**
* @method deepClone
* implements object deep clone
* @param obj
* @param emit
* @return {*}
*/
deepClone(obj: any, emit: any): any[];
/**
* @method sort
* use QuickSort
* @param {Array} array
* @return {QuickSort}
*/
sort(array: any[]): UtilQuickSort;
/**
* @method runtime
*
* caculate callback runtime
*
* @param {String} name
* @param {Function} callback
*/
runtime(name: string, callback: ((...args: any[]) => void)): void;
/**
* @method template
* parsing template string
* @param html
* @param obj
*/
template(html: string, obj?: any): ((obj: any) => string) | string;
/**
* @method resize
* add event in window resize event
* @param {Function} callback
* @param {Number} ms delay time
*/
resize(callback: ((...args: any[]) => void), ms: number): void;
/**
* @method index
*
* IndexParser ๊ฐ์ฒด ์์ฑ
*
* @return {KeyParser}
*/
index(): UtilKeyParser;
/**
* @method chunk
* split array by length
* @param {Array} arr
* @param {Number} len
* @return {Array}
*/
chunk(arr: any[], len: number): any[];
/**
* @method typeCheck
* check data type
* @param {String} t type string
* @param {Object} v value object
* @return {Boolean}
*/
typeCheck(typeName: string, value: any): boolean;
typeCheckObj(uiObj: any, list: any): void;
/**
* @method dataToCsv
*
* data ๋ฅผ csv ๋ก ๋ณํํ๋ค.
*
* @param {Array} keys
* @param {Array} dataList
* @param {Number} dataSize
* @return {String} ๋ณํ๋ csv ๋ฌธ์์ด
*/
dataToCsv(keys: string[], dataList: any[], dataSize: number): string;
/**
* @method dataToCsv2
*
* @param {Object} options
* @return {String}
*/
dataToCsv2(options: any): string;
/**
* @method fileToCsv
*
* file ์์ csv ์ปจํ
์ธ ๋ก๋
*
* @param {File} file
* @param {Function} callback
*/
fileToCsv(fileText: string, callback: ((data: any) => void)): void;
/**
* @method csvToBase64
*
* csv ๋ค์ด๋ก๋ ๋งํฌ๋ก ๋ณํ
*
* @param {String} csv
* @return {String} data uri string
*/
csvToBase64(csv: string): string;
/**
* @method csvToData
*
* @param {Array} keys
* @param {String} csv
* @param {Number} csvNumber
* @return {Array}
*/
csvToData(keys: string[], csv: string, csvNumber: number): any[];
/**
* @method getCsvFields
*
* csv ์์ ํ๋ ์ป์ด์ค๊ธฐ
*
* @param {Array} fields
* @param {Array} csvFields
* @return {Array}
*/
getCsvFields(fields: string[], csvFields: string[]): string[];
/**
* @method svgToBase64
*
* xml ๋ฌธ์์ด๋ก svg datauri ์์ฑ
*
* @param {String} xml
* @return {String} ๋ณํ๋ data uri ๋งํฌ
*/
svgToBase64(xml: string): string;
/**
* @method dateFormat
*
* implements date format function
*
* yyyy : 4 digits year
* yy : 2 digits year
* y : 1 digit year
*
* @param {Date} date
* @param {String} format date format string
* @param {Boolean} utc
* @return {string}
*/
dateFormat(date: Date, format: string, utc?: boolean): string;
/**
* @method createId
*
* ์ ๋ํฌ ์์ด๋ ์์ฑ
*
* @param {String} key prefix string
* @return {String} ์์ฑ๋ ์์ด๋ ๋ฌธ์์ด
*/
createId(key: string): string;
/**
* @method btoa
*
* Base64 ์ธ์ฝ๋ฉ
*
* @return {String}
*/
btoa(input: any): string;
/**
* @method atob
*
* Base64 ๋์ฝ๋ฉ
*
* @return {Any}
*/
atob(input: string): any;
/**
* implement async loop without blocking ui
*
* @param {Number} total loop count
* @param {Object} context
* @returns {Function}
*/
timeLoop(total: number, context?: any): ((index: number) => void);
/**
* @method loop
*
* ์ต์ ํ๋ ๋ฃจํ ์์ฑ (5๋จ๊ณ๋ก ๋๋)
*
* @param {Number} total loop count
* @param {Object} [context=null]
* @return {Function} ์ต์ ํ๋ ๋ฃจํ ์ฝ๋ฐฑ (index, groupIndex 2๊ฐ์ง ํ๋ผ๋ฏธํฐ๋ฅผ ๋ฐ๋๋ค.)
*/
loop(total: number, context?: any): ((index: number, groupIndex: number) => void);
/**
* @method loopArray
*
* ๋ฐฐ์ด์ ์ฌ์ฉํด์ ์ต์ ํ๋ ๋ฃจํ๋ก ์์ฑํ๋ค.
*
*
* @param {Array} data ๋ฃจํ๋ก ์์ฑ๋ ๋ฐฐ์ด
* @param {Object} [context=null]
* @return {Function} ์ต์ ํ๋ ๋ฃจํ ์ฝ๋ฐฑ (data, index, groupIndex 3๊ฐ์ง ํ๋ผ๋ฏธํฐ๋ฅผ ๋ฐ๋๋ค.)
*/
loopArray(data: any[], context?: any): ((data: any, index: number, groupIndex: number) => void);
/**
* @method makeIndex
*
* ๋ฐฐ์ด์ ํค ๊ธฐ๋ฐ ์ธ๋ฑ์ค๋ฅผ ์์ฑํ๋ค.
*
* ๊ฐ๋ณ ๊ฐ ๋ณ๋ก ๋ฉํฐ ์ธ๋ฑ์ค๋ฅผ ์์ฑํ๋ค.
*
* @param {Array} data
* @param {String} keyField
* @return {Object} ์์ฑ๋ ์ธ๋ฑ์ค
*/
makeIndex(data: any[], keyField: string): any;
/**
* @method startsWith
* Check that it matches the starting string search string.
*
* @param {String} string
* @param {String} searchString
* @param {Number} [position=0]
* @return {Number} position
*/
startsWith(str: string, searchString: string, position?: number): number;
/**
* @method endsWith
* Check that it matches the end of a string search string.
*
* @param {String} string
* @param {String} searchString
* @return {Number} position
*/
endsWith(str: string, searchString: string, position?: number): number;
inArray(target: any, list: any[]): number;
trim(text: string): string;
ready: ((...args: any[]) => void);
param(data: any): string;
ajax(data: any): void;
scrollWidth(): number;
}
export interface JuiStatic {
/**
* @method ready
*
* ready ํ์์ ์คํ๋ callback ์ ์
*
* @param {Function} callback
*/
ready(depends?: string[], callback?: (...args: any[]) => void): void;
/**
* @method defineUI
*
* ์ฌ์ฉ์๊ฐ ์ค์ ๋ก ์ฌ์ฉํ ์ ์๋ UI ํด๋์ค๋ฅผ ์ ์
*
* @param {String} name ๋ชจ๋ ๋ก๋์ ์์์ ์ฌ์ฉ๋ ์ด๋ฆ์ ์ ํ๋ค.
* @param {Array} depends 'define'์ด๋ 'defineUI'๋ก ์ ์๋ ํด๋์ค๋ ๊ฐ์ฒด๋ฅผ ์ธ์๋ก ๋ฐ์ ์ ์๋ค.
* @param {Function} callback UI ํด๋์ค๋ฅผ ํด๋น ์ฝ๋ฐฑ ํจ์ ๋ด์์ ํด๋์ค ํํ๋ก ๊ตฌํํ๊ณ ๋ฆฌํดํด์ผ ํ๋ค.
*/
defineUI(name: string, depends: string[], callback: () => void, parent?: string): void;
/**
* @method define
*
* UI ํด๋์ค์์ ์ฌ์ฉ๋ ํด๋์ค๋ฅผ ์ ์ํ๊ณ , ์์ ๋กญ๊ฒ ์์ํ ์ ์๋ ํด๋์ค๋ฅผ ์ ์
*
* @param {String} name ๋ชจ๋ ๋ก๋์ ์์์ ์ฌ์ฉ๋ ์ด๋ฆ์ ์ ํ๋ค.
* @param {Array} depends 'define'์ด๋ 'defineUI'๋ก ์ ์๋ ํด๋์ค๋ ๊ฐ์ฒด๋ฅผ ์ธ์๋ก ๋ฐ์ ์ ์๋ค.
* @param {Function} callback UI ํด๋์ค๋ฅผ ํด๋น ์ฝ๋ฐฑ ํจ์ ๋ด์์ ํด๋์ค ํํ๋ก ๊ตฌํํ๊ณ ๋ฆฌํดํด์ผ ํ๋ค.
* @param {String} parent ์์๋ฐ์ ํด๋์ค
*/
define(name: string, depends: string[], callback: () => void, parent?: string): void;
/**
* @method redefine
*
* UI ํด๋์ค์์ ์ฌ์ฉ๋ ํด๋์ค๋ฅผ ์ ์ํ๊ณ , ์์ ๋กญ๊ฒ ์์ํ ์ ์๋ ํด๋์ค๋ฅผ ์ ์
*
* @param {String} name ๋ชจ๋ ๋ก๋์ ์์์ ์ฌ์ฉ๋ ์ด๋ฆ์ ์ ํ๋ค.
* @param {Array} depends 'define'์ด๋ 'defineUI'๋ก ์ ์๋ ํด๋์ค๋ ๊ฐ์ฒด๋ฅผ ์ธ์๋ก ๋ฐ์ ์ ์๋ค.
* @param {Function} callback UI ํด๋์ค๋ฅผ ํด๋น ์ฝ๋ฐฑ ํจ์ ๋ด์์ ํด๋์ค ํํ๋ก ๊ตฌํํ๊ณ ๋ฆฌํดํด์ผ ํ๋ค.
* @param {String} parent ์์๋ฐ์ ํด๋์ค
*/
redefine(name: string, depends: string[], callback: () => void, parent?: string): void;
/**
* define๊ณผ defineUI๋ก ์ ์๋ ํด๋์ค ๋๋ ๊ฐ์ฒด๋ฅผ ๊ฐ์ ธ์จ๋ค.
*
* @param name ๊ฐ์ ธ์จ ํด๋์ค ๋๋ ๊ฐ์ฒด์ ์ด๋ฆ
* @return {*}
*/
include(name: string): any;
/**
* define๊ณผ defineUI๋ก ์ ์๋ ๋ชจ๋ ํด๋์ค์ ๊ฐ์ฒด๋ฅผ ๊ฐ์ ธ์จ๋ค.
*
* @return {Array}
*/
includeAll(): any[];
/**
* @method add
* Adds a component object created
*
* @param {Object} ui UI instance
*/
add(uiIns: any): void;
/**
* @method emit
* Generates a custom event to an applicable component
*
* @param {String} key Selector or UI type
* @param {String} type Event type
* @param {Array} args Event arguments
*/
emit(key: string, type: string, args: any[]): void;
/**
* @method get
* Gets a component currently created
*
* @param {Integer/String} key
* @returns {Object/Array} UI instance
*/
get(key: number | string): any;
/**
* @method getAll
* Gets all components currently created
*
* @return {Array} UI instances
*/
getAll(): any[];
/**
* @method create
* It is possible to create a component dynamically after the ready point
*
* @param {String} type UI type
* @param {String/DOMElement} selector
* @param {Object} options
* @return {Object}
*/
create(type: string, selector: any, options?: {}): any;
}
export interface UICollection {
destroy(): void;
}
export interface UICore {
tpl?: any;
event?: any;
root?: any;
/**
* @method emit
* Generates a custom event. The first parameter is the type of a custom event. A function defined as an option or on method is called
*
* @param {String} type Event type
* @param {Function} args Event Arguments
* @return {Mixed}
*/
emit(type: string, args: () => void): any;
/**
* @method on
* A callback function defined as an on method is run when an emit method is called
*
* @param {String} type Event type
* @param {Function} callback
*/
on(type: string, callback: () => void): void;
/**
* @method off
* Removes a custom event of an applicable type or callback handler
*
* @param {String} type Event type
*/
off(type: string): void;
/**
* @method addValid
* Check the parameter type of a UI method and generates an alarm when a wrong value is entered
*
* @param {String} name Method name
* @param {Array} params Parameters
*/
addValid(name: string, params: any[]): void;
/**
* @method callBefore
* Sets a callback function that is called before a UI method is run
*
* @param {String} name Method name
* @param {Function} callback
* @return {Mixed}
*/
callBefore(name: string, callback: () => void): void;
/**
* @method callAfter
* Sets a callback function that is called after a UI method is run
*
* @param {String} name Method name
* @param {Function} callback
* @return {Mixed}
*/
callAfter(name: string, callback: () => void): void;
/**
* @method callDelay
* Sets a callback function and the delay time before/after a UI method is run
*
* @param {String} name Method name
* @param {Function} callback
*/
callDelay(name: string, callObj: () => void): void;
/**
* @method setTpl
* Dynamically defines the template method of a UI
*
* @param {String} name Template name
* @param {String} html Template markup
*/
setTpl(name: string, html: string): void;
/**
* @method setOption
* Dynamically defines the options of a UI
*
* @param {String} key
* @param {Mixed} value
*/
setOption(key: string, value: any): void;
/**
* @method destroy
* Removes all events set in a UI obejct and the DOM element
*
*/
destroy(): void;
}
//noinspection TypeScriptUnresolvedVariable
export interface UIEvent extends UICore {
root?: any;
/**
* @method find
* Get the child element of the root element
*
* @param {String/HTMLElement} Selector
* @returns {*|jQuery}
*/
find(selector: any): JQuery;
}
export class ColorScale {
domain(color: string, color2: string): this;
ticks(max: number): this;
}
export interface UtilColor {
/**
* @method format
*
* convert color to format string
*
* // hex
* color.format({ r : 255, g : 255, b : 255 }, 'hex') // #FFFFFF
*
* // rgb
* color.format({ r : 255, g : 255, b : 255 }, 'rgb') // rgba(255, 255, 255, 0.5);
*
* // rgba
* color.format({ r : 255, g : 255, b : 255, a : 0.5 }, 'rgb') // rgba(255, 255, 255, 0.5);
*
* @param {Object} obj obj has r, g, b and a attributes
* @param {"hex"/"rgb"} type format string type
* @returns {*}
*/
format(obj: any, type: string): string;
/**
* @method scale
*
* get color scale
*
* var c = color.scale().domain('#FF0000', '#00FF00');
*
* // get middle color
* c(0.5) == #808000
*
* // get middle color list
* c.ticks(20); // return array , [startColor, ......, endColor ]
*
* @returns {func} scale function
*/
scale(): ColorScale;
/**
* @method map
*
* create color map
*
* var colorList = color.map(['#352a87', '#0f5cdd', '#00b5a6', '#ffc337', '#fdff00'], count)
*
* @param {Array} color_list
* @param {Number} count a divide number
* @returns {Array} converted color list
*/
map(color_list: string[], count: number): string[];
/**
* @method rgb
*
* parse string to rgb color
*
* color.rgb("#FF0000") === { r : 255, g : 0, b : 0 }
*
* color.rgb("rgb(255, 0, 0)") == { r : 255, g : 0, b : }
*
* @param {String} str color string
* @returns {Object} rgb object
*/
rgb(str: string): any;
/**
* @method HSVtoRGB
*
* convert hsv to rgb
*
* color.HSVtoRGB(0,0,1) === #FFFFF === { r : 255, g : 0, b : 0 }
*
* @param {Number} H hue color number (min : 0, max : 360)
* @param {Number} S Saturation number (min : 0, max : 1)
* @param {Number} V Value number (min : 0, max : 1 )
* @returns {Object}
*/
HSVtoRGB(H: number, S: number, V: number): any;
/**
* @method RGBtoHSV
*
* convert rgb to hsv
*
* color.RGBtoHSV(0, 0, 255) === { h : 240, s : 1, v : 1 } === '#FFFF00'
*
* @param {Number} R red color value
* @param {Number} G green color value
* @param {Number} B blue color value
* @return {Object} hsv color code
*/
RGBtoHSV(R: number, G: number, B: number): any;
/**
* @method lighten
*
* rgb ์ปฌ๋ฌ ๋ฐ์ ๋๋๋ก ๋ณํ
*
* @param {String} color RGB color code
* @param {Number} rate ๋ฐ์ ๋๋
* @return {String}
*/
lighten(color: string, rate: number): string;
/**
* @method darken
*
* rgb ์ปฌ๋ฌ ์ด๋์ด ๋๋๋ก ๋ณํ
*
* @param {String} color RGB color code
* @param {Number} rate ์ด๋์ด ๋๋
* @return {String}
*/
darken(color: string, rate: number): string;
}
export interface UtilBase64 {
encode(input: string): string;
decode(input: string): string;
}
export interface UtilKeyParser {
/**
* @method isIndexDepth
*
* @param {String} index
* @return {Boolean}
*/
isIndexDepth(index: string): boolean;
/**
* @method getIndexList
*
* @param {String} index
* @return {Array}
*/
getIndexList(index: string): boolean;
/**
* @method changeIndex
*
*
* @param {String} index
* @param {String} targetIndex
* @param {String} rootIndex
* @return {String}
*/
changeIndex(index: string, targetIndex: string, rootIndex: string): string;
/**
* @method getNextIndex
*
* @param {String} index
* @return {String}
*/
getNextIndex(index: string): string;
/**
* @method getParentIndex
*
*
* @param {String} index
* @returns {*}
*/
getParentIndex(index: string): string;
}
export interface UtilMath {
/**
* @method rotate
*
* 2d rotate
*
* @param {Number} x
* @param {Number} y
* @param {Number} radian roate ํ radian
* @return {Object}
* @return {Number} return.x ๋ณํ๋ x
* @return {Number} return.y ๋ณํ๋ y
*
*/
rotate(x: number, y: number, radian: number): any;
resize(maxWidth: number, maxHeight: number, objectWidth: number, objectHeight: number): any;
<๏ฝfimโhole๏ฝ> * convert degree to radian
*
* @param {Number} degree
* @return {Number} radian
*/
radian(degree: number): number;
/**
* @method degree
*
* convert radian to degree
*
* @param {Number} radian
* @return {Number} degree
*/
degree(radian: number): number;
angle(x1: number, y1: number, x2: number, y2: number): number;
/**
* @method interpolateNumber
*
* a, b ์ ์ค๊ฐ๊ฐ ๊ณ์ฐ์ ์ํ callback ํจ์ ๋ง๋ค๊ธฐ
*
* @param {Number} a first value
* @param {Number} b second value
* @return {Function}
*/
interpolateNumber(a: number, b: number): () => void;
// ์ค๊ฐ๊ฐ round ํด์ ๊ณ์ฐํ๊ธฐ
interpolateRound(a: number, b: number): () => void;
round(num: number, fixed: number): number;
plus(a: number, b: number): number;
minus(a: number, b: number): number;
multi(a: number, b: number): number;
div(a: number, b: number): number;
remain(a: number, b: number): number;
/**
* ํน์ ๊ตฌ๊ฐ์ ๊ฐ์ ์๋์ผ๋ก ๊ณ์ฐ
*
* @param {Object} min
* @param {Object} max
* @param {Object} ticks
* @param {Object} isNice
*/
nice(min: number, max: number, ticks: number, isNice: boolean): any;
matrix<T>(a: T[], b: T[]): T[];
matrix3d<T>(a: T[], b: T[]): T[];
inverseMatrix3d(a: any[]): any[];
}
export interface UtilScaleOrdinal extends Function {
(x: number): number;
domain(values: any[]): UtilScaleOrdinal;
range(values: any[]): UtilScaleOrdinal;
rangePoints(interval: number, padding?: number): () => void;
rangeBands(interval: number, padding?: number, outerPadding?: number): () => void;
invert(x: number): number;
}
export type UtilQuickSort = (array: number[], isClone: boolean) => this;<๏ฝfimโend๏ฝ> | /**
* @method radian
* |
<|file_name|>completionsImport_notFromIndex.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>/// <reference path="fourslash.ts" />
// @Filename: /src/a.ts
////export const x = 0;
// @Filename: /src/index.ts
////export { x } from "./a";
// @Filename: /0.ts
////x/*0*/
// @Filename: /src/1.ts
////x/*1*/
// @Filename: /src/inner/2.ts
////x/*2*/
for (const [marker, sourceDisplay] of [["0", "./src"], ["1", "./a"], ["2", "../a"]]) {
verify.completions({
marker,
includes: {
name: "x",
source: "/src/a",
sourceDisplay,
text: "const x: 0",
kind: "const",
kindModifiers: "export",
hasAction: true,
sortText: completion.SortText.AutoImportSuggestions
<๏ฝfimโhole๏ฝ> verify.applyCodeActionFromCompletion(marker, {
name: "x",
source: "/src/a",
description: `Add import from "${sourceDisplay}"`,
newFileContent: `import { x } from "${sourceDisplay}";\n\nx`,
});
}<๏ฝfimโend๏ฝ> | },
preferences: { includeCompletionsForModuleExports: true },
});
|
<|file_name|>WebDriverExtensionsTests.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package io.ravitej.selenium.extensions.tests;
import io.ravitej.selenium.extensions.WebDriverExtensions;
import org.apache.commons.lang3.tuple.Pair;
import org.assertj.core.api.SoftAssertions;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.openqa.selenium.*;
import org.openqa.selenium.WebDriver.TargetLocator;
import java.io.File;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Tests for WebDriverExtensions.
*
* @author Ravitej Aluru
*/
public class WebDriverExtensionsTests {
private WebDriver mockWebDriver = mock(WebDriver.class, Mockito.withSettings().extraInterfaces(TakesScreenshot.class));
private TargetLocator mockTargetLocator = mock(TargetLocator.class);
private Alert mockAlert = mock(Alert.class);
private File mockFile = mock(File.class);
@Before
public void beforeTest() {
when(mockWebDriver.switchTo()).thenReturn(mockTargetLocator);
when(mockTargetLocator.alert()).thenReturn(mockAlert);
}
@Test<๏ฝfimโhole๏ฝ> public void is_alert_displayed_should_return_true_and_alert_text_if_alert_displayed() {
final String alertText = "some mockAlert text";
when(mockAlert.getText()).thenReturn(alertText);
Pair<Boolean, String> p = WebDriverExtensions.isAlertDisplayed(mockWebDriver);
assertThat(p).extracting("key", "value").containsExactly(true, alertText);
}
@Test
public void is_alert_displayed_should_return_false_and_empty_string_if_alert_is_not_displayed() {
when(mockAlert.getText()).thenThrow(NoAlertPresentException.class);
Pair<Boolean, String> p = WebDriverExtensions.isAlertDisplayed(mockWebDriver);
assertThat(p).extracting("key", "value").containsExactly(false, "");
}
@Test
public void take_screenshot_should_return_file_if_successful_on_first_attempt() {
when(((TakesScreenshot) mockWebDriver).getScreenshotAs(OutputType.FILE)).thenReturn(mockFile);
File screenshot = WebDriverExtensions.takeScreenshot(mockWebDriver);
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(screenshot)
.isInstanceOf(File.class)
.isNotNull();
});
}
@Test
public void take_screenshot_should_return_file_if_successful_on_second_attempt() {
when(((TakesScreenshot) mockWebDriver).getScreenshotAs(OutputType.FILE))
.thenThrow(new WebDriverException())
.thenReturn(mockFile);
File screenshot = WebDriverExtensions.takeScreenshot(mockWebDriver);
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(screenshot)
.isInstanceOf(File.class)
.isNotNull();
});
}
@Test
public void take_screenshot_should_throw_webdriverexception_if_not_successful_on_second_attempt() {
when(((TakesScreenshot) mockWebDriver).getScreenshotAs(OutputType.FILE))
.thenThrow(new WebDriverException());
assertThatExceptionOfType(WebDriverException.class).isThrownBy(() -> {
WebDriverExtensions.takeScreenshot(mockWebDriver);
});
}
@Test
public void take_screenshot_should_wait_for_given_amount_of_time_if_not_successful_on_first_attempt() {
when(((TakesScreenshot) mockWebDriver).getScreenshotAs(OutputType.FILE))
.thenThrow(new WebDriverException())
.thenReturn(mockFile);
final long waitTime = 1000;
long before = System.currentTimeMillis();
WebDriverExtensions.takeScreenshot(mockWebDriver, waitTime);
long after = System.currentTimeMillis();
//checking that the method took at least 1 second to execute since the waitTime is 1 second.
assertThat(after - before).isGreaterThanOrEqualTo(waitTime);
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>system.hpp<|end_file_name|><๏ฝfimโbegin๏ฝ>//
// Author: Vladimir Migashko <[email protected]>, (C) 2007
//
// Copyright: See COPYING file that comes with this distribution
//
#ifndef FAS_SYSTEM_SYSTEM_H
#define FAS_SYSTEM_SYSTEM_H
#include <fas/unp.h>
#include <fas/system/types.hpp>
#include <errno.h>
#include <string>
#include <cstdlib>
#include <stdexcept>
namespace fas{ namespace system {
inline int error_code()
{
#ifdef WIN32
return ::GetLastError();
#else
return errno;
#endif
}
inline std::string strerror(int lasterror)
{
#ifdef WIN32
LPVOID lpMsgBuf;
FormatMessageA(
FORMAT_MESSAGE_ALLOCATE_BUFFER |
FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS,
NULL,
lasterror,
0, // Default language
(LPSTR) &lpMsgBuf,
0,
NULL
);
char errbuf[256] = {0};
_snprintf(errbuf, 255, "%d - %s", lasterror, reinterpret_cast<char*>(lpMsgBuf));
LocalFree( lpMsgBuf );
return errbuf;
/*
std::string message;
message = reinterpret_cast<char*>(lpMsgBuf);
LocalFree( lpMsgBuf );
return message;
*/
#else
return ::strerror(lasterror);
#endif
}
struct system_error
: public std::runtime_error
{
explicit system_error(const std::string& msg)
: std::runtime_error(msg + strerror(error_code()))
{
}
};
inline ssize_t read(const descriptor_t& d, char* buff, size_t s)
{
#ifndef WIN32
ssize_t ret = ::read(d, buff, s);
#else
ssize_t ret = ::_read(d, buff, static_cast<unsigned int>(s));
#endif
if ( ret < 0 )
{
#ifndef WIN32
int err = error_code();
if (err==EWOULDBLOCK || err==EAGAIN || err == EINTR )
return ret;
else if (err==EBADF || err == EFAULT || err==EINVAL ||
err == ENOMEM || err == ENOTCONN || err == ENOTSOCK)
throw system_error("fas::system::read/_read: ");
else
return 0;
#endif
throw system_error("fas::system::read/_read: ");
}
return ret;
}
inline ssize_t write(const descriptor_t& d, const char* buff, size_t s)
{
#ifndef WIN32
ssize_t ret = ::write(d, buff, s);
#else
ssize_t ret = ::_write(d, buff, static_cast<unsigned int>(s) );
#endif
if ( ret < 0 )
{
#ifndef WIN32
int err = error_code();
if ( err==EWOULDBLOCK || err==EAGAIN || err == EINTR )return ret;
else if (err==EBADF || err == EFAULT || err==EINVAL ||
err == ENOMEM || err == ENOTCONN || err == ENOTSOCK)
{
throw system_error("fas::system::_write/write: ");
}
else
return 0;
#endif
throw system_error("fas::system::write/_write: ");
}
return ret;
}
inline void close(const descriptor_t& d)
{
#ifdef WIN32 <๏ฝfimโhole๏ฝ> if ( -1 == ::close(d))
#endif
throw system_error("fas::system::close: ");;
}
inline void sleep(int ms)
{
#ifdef WIN32
::Sleep(ms);
#else
timeval tv={ms/1000, (ms%1000)*1000};
::select(0, 0, 0, 0, &tv);
#endif
}
inline int dumpable()
{
#if HAVE_SYS_PRCTL_H
rlimit core = { RLIM_INFINITY, RLIM_INFINITY };
return ::prctl(PR_SET_DUMPABLE, 1) || ::setrlimit(RLIMIT_CORE, &core) ? -1 : 0;
#endif
return -1;
}
inline void daemonize()
{
#ifdef WIN32
return ;
#else
int null = ::open("/dev/null", O_RDWR);
if(-1 == null)
{
::perror("/dev/null");
::exit(EXIT_FAILURE);
}
switch(::fork())
{
case 0:
::setsid();
::umask(0);
::close(0);
::close(1);
::close(2);
::dup2(null, 0);
::dup2(null, 1);
::dup2(null, 2);
break;
case -1:
::perror("fork()");
::exit(EXIT_FAILURE);
default:
::exit(EXIT_SUCCESS);
}
#endif
}
}}
#endif // FAS_SYSTEM_SYSTEM_H<๏ฝfimโend๏ฝ> | if ( -1 == ::_close(d))
#else |
<|file_name|>cases.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import contextlib
import functools
import socket
import ssl
import tempfile
import time
from typing import (
Any,
Callable,
Container,
Dict,
Generic,
Hashable,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
import pytest
from . import basecontrollers, client_mock, patma, runner, tls
from .authentication import Authentication
from .basecontrollers import TestCaseControllerConfig
from .exceptions import ConnectionClosed
from .irc_utils import capabilities, message_parser
from .irc_utils.message_parser import Message
from .irc_utils.sasl import sasl_plain_blob
from .numerics import (
ERR_BADCHANNELKEY,
ERR_BANNEDFROMCHAN,
ERR_INVITEONLYCHAN,
ERR_NEEDREGGEDNICK,
ERR_NOSUCHCHANNEL,
ERR_TOOMANYCHANNELS,
RPL_HELLO,
)
from .specifications import Capabilities, IsupportTokens, Specifications
__tracebackhide__ = True # Hide from pytest tracebacks on test failure.
CHANNEL_JOIN_FAIL_NUMERICS = frozenset(
[
ERR_NOSUCHCHANNEL,
ERR_TOOMANYCHANNELS,
ERR_BADCHANNELKEY,
ERR_INVITEONLYCHAN,
ERR_BANNEDFROMCHAN,
ERR_NEEDREGGEDNICK,
]
)
# typevar for decorators
TCallable = TypeVar("TCallable", bound=Callable)
TClass = TypeVar("TClass", bound=Type)
# typevar for the client name used by tests (usually int or str)
TClientName = TypeVar("TClientName", bound=Union[Hashable, int])
TController = TypeVar("TController", bound=basecontrollers._BaseController)
# general-purpose typevar
T = TypeVar("T")
class ChannelJoinException(Exception):
def __init__(self, code: str, params: List[str]):
super().__init__(f"Failed to join channel ({code}): {params}")
self.code = code
self.params = params
class _IrcTestCase(Generic[TController]):
"""Base class for test cases.
It implements various `assert*` method that look like unittest's,
but is actually based on the `assert` statement so derived classes are
pytest-style rather than unittest-style.
It also calls setUp() and tearDown() like unittest would."""
# Will be set by __main__.py
controllerClass: Type[TController]
show_io: bool
controller: TController
__new__ = object.__new__ # pytest won't collect Generic subclasses otherwise
@staticmethod
def config() -> TestCaseControllerConfig:
"""Some configuration to pass to the controllers.
For example, Oragono only enables its MySQL support if
config()["chathistory"]=True.
"""
return TestCaseControllerConfig()
def setUp(self) -> None:
if self.controllerClass is not None:
self.controller = self.controllerClass(self.config())
if self.show_io:
print("---- new test ----")
def tearDown(self) -> None:
pass
def setup_method(self, method: Callable) -> None:
self.setUp()
def teardown_method(self, method: Callable) -> None:
self.tearDown()
def assertMessageMatch(self, msg: Message, **kwargs: Any) -> None:
"""Helper for partially comparing a message.
Takes the message as first arguments, and comparisons to be made
as keyword arguments.
Uses patma.match_list on the params argument.
"""
error = self.messageDiffers(msg, **kwargs)
if error:
raise AssertionError(error)
def messageEqual(self, msg: Message, **kwargs: Any) -> bool:
"""Boolean negation of `messageDiffers` (returns a boolean,
not an optional string)."""
return not self.messageDiffers(msg, **kwargs)
def messageDiffers(
self,
msg: Message,
params: Optional[List[Union[str, None, patma.Operator]]] = None,
target: Optional[str] = None,
tags: Optional[
Dict[Union[str, patma.Operator], Union[str, patma.Operator, None]]
] = None,
nick: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
**kwargs: Any,
) -> Optional[str]:
"""Returns an error message if the message doesn't match the given arguments,
or None if it matches."""
for (key, value) in kwargs.items():
if getattr(msg, key) != value:
fail_msg = (
fail_msg or "expected {param} to be {expects}, got {got}: {msg}"
)
return fail_msg.format(
*extra_format,
got=getattr(msg, key),
expects=value,
param=key,
msg=msg,
)
if params and not patma.match_list(list(msg.params), params):
fail_msg = (
fail_msg or "expected params to match {expects}, got {got}: {msg}"
)
return fail_msg.format(
*extra_format, got=msg.params, expects=params, msg=msg
)
if tags and not patma.match_dict(msg.tags, tags):
fail_msg = fail_msg or "expected tags to match {expects}, got {got}: {msg}"
return fail_msg.format(*extra_format, got=msg.tags, expects=tags, msg=msg)
if nick:
got_nick = msg.prefix.split("!")[0] if msg.prefix else None
if nick != got_nick:
fail_msg = (
fail_msg
or "expected nick to be {expects}, got {got} instead: {msg}"
)
return fail_msg.format(
*extra_format, got=got_nick, expects=nick, param=key, msg=msg
)
return None
def assertIn(
self,
member: Any,
container: Union[Iterable[Any], Container[Any]],
msg: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, item=member, list=container, msg=msg)
assert member in container, msg # type: ignore
def assertNotIn(
self,
member: Any,
container: Union[Iterable[Any], Container[Any]],
msg: Optional[str] = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, item=member, list=container, msg=msg)
assert member not in container, msg # type: ignore
def assertEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got == expects, msg
def assertNotEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got != expects, msg
def assertGreater(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got >= expects, msg # type: ignore
def assertGreaterEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got >= expects, msg # type: ignore
def assertLess(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got < expects, msg # type: ignore
def assertLessEqual(
self,
got: T,
expects: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, expects=expects, msg=msg)
assert got <= expects, msg # type: ignore
def assertTrue(
self,
got: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, msg=msg)
assert got, msg
def assertFalse(
self,
got: T,
msg: Any = None,
fail_msg: Optional[str] = None,
extra_format: Tuple = (),
) -> None:
if fail_msg:
msg = fail_msg.format(*extra_format, got=got, msg=msg)
assert not got, msg
@contextlib.contextmanager
def assertRaises(self, exception: Type[Exception]) -> Iterator[None]:
with pytest.raises(exception):
yield
class BaseClientTestCase(_IrcTestCase[basecontrollers.BaseClientController]):
"""Basic class for client tests. Handles spawning a client and exchanging
messages with it."""
conn: Optional[socket.socket]
nick: Optional[str] = None
user: Optional[List[str]] = None
server: socket.socket
protocol_version = Optional[str]
acked_capabilities = Optional[Set[str]]
__new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise
def setUp(self) -> None:
super().setUp()
self.conn = None
self._setUpServer()
def tearDown(self) -> None:
if self.conn:
try:
self.conn.sendall(b"QUIT :end of test.")
except BrokenPipeError:
pass # client already disconnected
except OSError:
pass # the conn was already closed by the test, or something
self.controller.kill()
if self.conn:
self.conn_file.close()
self.conn.close()
self.server.close()
def _setUpServer(self) -> None:
"""Creates the server and make it listen."""
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind(("", 0)) # Bind any free port
self.server.listen(1)
# Used to check if the client is alive from time to time
self.server.settimeout(1)
def acceptClient(
self,
tls_cert: Optional[str] = None,
tls_key: Optional[str] = None,
server: Optional[socket.socket] = None,
) -> None:
"""Make the server accept a client connection. Blocking."""
server = server or self.server
assert server
# Wait for the client to connect
while True:
try:
(self.conn, addr) = server.accept()
except socket.timeout:
self.controller.check_is_alive()
else:
break
if tls_cert is None and tls_key is None:
pass
else:
assert (
tls_cert and tls_key
), "tls_cert must be provided if and only if tls_key is."
with tempfile.NamedTemporaryFile(
"at"
) as certfile, tempfile.NamedTemporaryFile("at") as keyfile:
certfile.write(tls_cert)
certfile.seek(0)
keyfile.write(tls_key)
keyfile.seek(0)
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain(certfile=certfile.name, keyfile=keyfile.name)
self.conn = context.wrap_socket(self.conn, server_side=True)
self.conn_file = self.conn.makefile(newline="\r\n", encoding="utf8")
def getLine(self) -> str:
line = self.conn_file.readline()
if self.show_io:
print("{:.3f} C: {}".format(time.time(), line.strip()))
return line
def getMessage(
self, *args: Any, filter_pred: Optional[Callable[[Message], bool]] = None
) -> Message:
"""Gets a message and returns it. If a filter predicate is given,
fetches messages until the predicate returns a False on a message,
and returns this message."""
while True:
line = self.getLine(*args)
if not line:
raise ConnectionClosed()
msg = message_parser.parse_message(line)
if not filter_pred or filter_pred(msg):
return msg
def sendLine(self, line: str) -> None:
assert self.conn
self.conn.sendall(line.encode())
if not line.endswith("\r\n"):
self.conn.sendall(b"\r\n")
if self.show_io:
print("{:.3f} S: {}".format(time.time(), line.strip()))
def readCapLs(
self, auth: Optional[Authentication] = None, tls_config: tls.TlsConfig = None<๏ฝfimโhole๏ฝ> ) -> None:
(hostname, port) = self.server.getsockname()
self.controller.run(
hostname=hostname, port=port, auth=auth, tls_config=tls_config
)
self.acceptClient()
m = self.getMessage()
self.assertEqual(m.command, "CAP", "First message is not CAP LS.")
if m.params == ["LS"]:
self.protocol_version = 301
elif m.params == ["LS", "302"]:
self.protocol_version = 302
elif m.params == ["END"]:
self.protocol_version = None
else:
raise AssertionError("Unknown CAP params: {}".format(m.params))
def userNickPredicate(self, msg: Message) -> bool:
"""Predicate to be used with getMessage to handle NICK/USER
transparently."""
if msg.command == "NICK":
self.assertEqual(len(msg.params), 1, msg=msg)
self.nick = msg.params[0]
return False
elif msg.command == "USER":
self.assertEqual(len(msg.params), 4, msg=msg)
self.user = msg.params
return False
else:
return True
def negotiateCapabilities(
self,
caps: List[str],
cap_ls: bool = True,
auth: Optional[Authentication] = None,
) -> Optional[Message]:
"""Performes a complete capability negociation process, without
ending it, so the caller can continue the negociation."""
if cap_ls:
self.readCapLs(auth)
if not self.protocol_version:
# No negotiation.
return None
self.sendLine("CAP * LS :{}".format(" ".join(caps)))
capability_names = frozenset(capabilities.cap_list_to_dict(caps))
self.acked_capabilities = set()
while True:
m = self.getMessage(filter_pred=self.userNickPredicate)
if m.command != "CAP":
return m
self.assertGreater(len(m.params), 0, m)
if m.params[0] == "REQ":
self.assertEqual(len(m.params), 2, m)
requested = frozenset(m.params[1].split())
if not requested.issubset(capability_names):
self.sendLine(
"CAP {} NAK :{}".format(self.nick or "*", m.params[1][0:100])
)
else:
self.sendLine(
"CAP {} ACK :{}".format(self.nick or "*", m.params[1])
)
self.acked_capabilities.update(requested) # type: ignore
else:
return m
class BaseServerTestCase(
_IrcTestCase[basecontrollers.BaseServerController], Generic[TClientName]
):
"""Basic class for server tests. Handles spawning a server and exchanging
messages with it."""
show_io: bool # set by conftest.py
password: Optional[str] = None
ssl = False
valid_metadata_keys: Set[str] = set()
invalid_metadata_keys: Set[str] = set()
server_support: Optional[Dict[str, Optional[str]]]
run_services = False
__new__ = object.__new__ # pytest won't collect Generic[] subclasses otherwise
def setUp(self) -> None:
super().setUp()
self.server_support = None
(self.hostname, self.port) = self.controller.get_hostname_and_port()
self.controller.run(
self.hostname,
self.port,
password=self.password,
valid_metadata_keys=self.valid_metadata_keys,
invalid_metadata_keys=self.invalid_metadata_keys,
ssl=self.ssl,
run_services=self.run_services,
)
self.clients: Dict[TClientName, client_mock.ClientMock] = {}
def tearDown(self) -> None:
self.controller.kill()
for client in list(self.clients):
self.removeClient(client)
def addClient(
self, name: Optional[TClientName] = None, show_io: Optional[bool] = None
) -> TClientName:
"""Connects a client to the server and adds it to the dict.
If 'name' is not given, uses the lowest unused non-negative integer."""
self.controller.wait_for_port()
if self.run_services:
self.controller.wait_for_services()
if not name:
new_name: int = (
max(
[int(name) for name in self.clients if isinstance(name, (int, str))]
+ [0]
)
+ 1
)
name = cast(TClientName, new_name)
show_io = show_io if show_io is not None else self.show_io
self.clients[name] = client_mock.ClientMock(name=name, show_io=show_io)
self.clients[name].connect(self.hostname, self.port)
return name
def removeClient(self, name: TClientName) -> None:
"""Disconnects the client, without QUIT."""
assert name in self.clients
self.clients[name].disconnect()
del self.clients[name]
def getMessages(self, client: TClientName, **kwargs: Any) -> List[Message]:
return self.clients[client].getMessages(**kwargs)
def getMessage(self, client: TClientName, **kwargs: Any) -> Message:
return self.clients[client].getMessage(**kwargs)
def getRegistrationMessage(self, client: TClientName) -> Message:
"""Filter notices, do not send pings."""
while True:
msg = self.getMessage(
client,
synchronize=False,
filter_pred=lambda m: m.command not in ("NOTICE", RPL_HELLO),
)
if msg.command == "PING":
# Hi Unreal
self.sendLine(client, "PONG :" + msg.params[0])
else:
return msg
def sendLine(self, client: TClientName, line: Union[str, bytes]) -> None:
return self.clients[client].sendLine(line)
def getCapLs(
self, client: TClientName, as_list: bool = False
) -> Union[List[str], Dict[str, Optional[str]]]:
"""Waits for a CAP LS block, parses all CAP LS messages, and return
the dict capabilities, with their values.
If as_list is given, returns the raw list (ie. key/value not split)
in case the order matters (but it shouldn't)."""
caps = []
while True:
m = self.getRegistrationMessage(client)
self.assertMessageMatch(m, command="CAP")
self.assertEqual(m.params[1], "LS", fail_msg="Expected CAP * LS, got {got}")
if m.params[2] == "*":
caps.extend(m.params[3].split())
else:
caps.extend(m.params[2].split())
if not as_list:
return capabilities.cap_list_to_dict(caps)
return caps
def assertDisconnected(self, client: TClientName) -> None:
try:
self.getMessages(client)
self.getMessages(client)
except (socket.error, ConnectionClosed):
del self.clients[client]
return
else:
raise AssertionError("Client not disconnected.")
def skipToWelcome(self, client: TClientName) -> List[Message]:
"""Skip to the point where we are registered
<https://tools.ietf.org/html/rfc2812#section-3.1>
"""
result = []
while True:
m = self.getMessage(client, synchronize=False)
result.append(m)
if m.command == "001":
return result
elif m.command == "PING":
# Hi, Unreal
self.sendLine(client, "PONG :" + m.params[0])
def requestCapabilities(
self,
client: TClientName,
capabilities: List[str],
skip_if_cap_nak: bool = False,
) -> None:
self.sendLine(client, "CAP REQ :{}".format(" ".join(capabilities)))
m = self.getRegistrationMessage(client)
try:
self.assertMessageMatch(
m, command="CAP", fail_msg="Expected CAP ACK, got: {msg}"
)
self.assertEqual(
m.params[1], "ACK", m, fail_msg="Expected CAP ACK, got: {msg}"
)
except AssertionError:
if skip_if_cap_nak:
raise runner.CapabilityNotSupported(" or ".join(capabilities))
else:
raise
def connectClient(
self,
nick: str,
name: TClientName = None,
capabilities: Optional[List[str]] = None,
skip_if_cap_nak: bool = False,
show_io: Optional[bool] = None,
account: Optional[str] = None,
password: Optional[str] = None,
ident: str = "username",
) -> List[Message]:
"""Connections a new client, does the cap negotiation
and connection registration, and skips to the end of the MOTD.
Returns the list of all messages received after registration,
just like `skipToWelcome`."""
client = self.addClient(name, show_io=show_io)
if capabilities:
self.sendLine(client, "CAP LS 302")
m = self.getRegistrationMessage(client)
self.requestCapabilities(client, capabilities, skip_if_cap_nak)
if password is not None:
if "sasl" not in (capabilities or ()):
raise ValueError("Used 'password' option without sasl capbilitiy")
self.sendLine(client, "AUTHENTICATE PLAIN")
m = self.getRegistrationMessage(client)
self.assertMessageMatch(m, command="AUTHENTICATE", params=["+"])
self.sendLine(client, sasl_plain_blob(account or nick, password))
m = self.getRegistrationMessage(client)
self.assertIn(m.command, ["900", "903"], str(m))
self.sendLine(client, "NICK {}".format(nick))
self.sendLine(client, "USER %s * * :Realname" % (ident,))
if capabilities:
self.sendLine(client, "CAP END")
welcome = self.skipToWelcome(client)
self.sendLine(client, "PING foo")
# Skip all that happy welcoming stuff
self.server_support = {}
while True:
m = self.getMessage(client)
if m.command == "PONG":
break
elif m.command == "005":
for param in m.params[1:-1]:
if "=" in param:
(key, value) = param.split("=")
self.server_support[key] = value
else:
self.server_support[param] = None
welcome.append(m)
return welcome
def joinClient(self, client: TClientName, channel: str) -> None:
self.sendLine(client, "JOIN {}".format(channel))
received = {m.command for m in self.getMessages(client)}
self.assertIn(
"366",
received,
fail_msg="Join to {} failed, {item} is not in the set of "
"received responses: {list}",
extra_format=(channel,),
)
def joinChannel(self, client: TClientName, channel: str) -> None:
self.sendLine(client, "JOIN {}".format(channel))
# wait until we see them join the channel
joined = False
while not joined:
for msg in self.getMessages(client):
if (
msg.command == "JOIN"
and 0 < len(msg.params)
and msg.params[0].lower() == channel.lower()
):
joined = True
break
elif msg.command in CHANNEL_JOIN_FAIL_NUMERICS:
raise ChannelJoinException(msg.command, msg.params)
_TSelf = TypeVar("_TSelf", bound="OptionalityHelper")
_TReturn = TypeVar("_TReturn")
class OptionalityHelper(Generic[TController]):
controller: TController
def checkSaslSupport(self) -> None:
if self.controller.supported_sasl_mechanisms:
return
raise runner.NotImplementedByController("SASL")
def checkMechanismSupport(self, mechanism: str) -> None:
if mechanism in self.controller.supported_sasl_mechanisms:
return
raise runner.OptionalSaslMechanismNotSupported(mechanism)
@staticmethod
def skipUnlessHasMechanism(
mech: str,
) -> Callable[[Callable[[_TSelf], _TReturn]], Callable[[_TSelf], _TReturn]]:
# Just a function returning a function that takes functions and
# returns functions, nothing to see here.
# If Python didn't have such an awful syntax for callables, it would be:
# str -> ((TSelf -> TReturn) -> (TSelf -> TReturn))
def decorator(f: Callable[[_TSelf], _TReturn]) -> Callable[[_TSelf], _TReturn]:
@functools.wraps(f)
def newf(self: _TSelf) -> _TReturn:
self.checkMechanismSupport(mech)
return f(self)
return newf
return decorator
@staticmethod
def skipUnlessHasSasl(
f: Callable[[_TSelf], _TReturn]
) -> Callable[[_TSelf], _TReturn]:
@functools.wraps(f)
def newf(self: _TSelf) -> _TReturn:
self.checkSaslSupport()
return f(self)
return newf
def mark_services(cls: TClass) -> TClass:
cls.run_services = True
return pytest.mark.services(cls) # type: ignore
def mark_specifications(
*specifications_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
specifications = frozenset(
Specifications.from_name(s) if isinstance(s, str) else s
for s in specifications_str
)
if None in specifications:
raise ValueError("Invalid set of specifications: {}".format(specifications))
def decorator(f: TCallable) -> TCallable:
for specification in specifications:
f = getattr(pytest.mark, specification.value)(f)
if strict:
f = pytest.mark.strict(f)
if deprecated:
f = pytest.mark.deprecated(f)
return f
return decorator
def mark_capabilities(
*capabilities_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
capabilities = frozenset(
Capabilities.from_name(c) if isinstance(c, str) else c for c in capabilities_str
)
if None in capabilities:
raise ValueError("Invalid set of capabilities: {}".format(capabilities))
def decorator(f: TCallable) -> TCallable:
for capability in capabilities:
f = getattr(pytest.mark, capability.value)(f)
# Support for any capability implies IRCv3
f = pytest.mark.IRCv3(f)
return f
return decorator
def mark_isupport(
*tokens_str: str, deprecated: bool = False, strict: bool = False
) -> Callable[[TCallable], TCallable]:
tokens = frozenset(
IsupportTokens.from_name(c) if isinstance(c, str) else c for c in tokens_str
)
if None in tokens:
raise ValueError("Invalid set of isupport tokens: {}".format(tokens))
def decorator(f: TCallable) -> TCallable:
for token in tokens:
f = getattr(pytest.mark, token.value)(f)
return f
return decorator<๏ฝfimโend๏ฝ> | |
<|file_name|>services-mechanism.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/python
import sys
sys.path.append('/usr/share/mandriva/')
<๏ฝfimโhole๏ฝ>from mcc2.backends.services.service import Services
if __name__ == '__main__':
Services.main()<๏ฝfimโend๏ฝ> | |
<|file_name|>job.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// FIXME: stolen from cargo. Should be extracted into a common crate.
//! Job management (mostly for windows)
//!
//! Most of the time when you're running cargo you expect Ctrl-C to actually
//! terminate the entire tree of processes in play, not just the one at the top
//! (cago). This currently works "by default" on Unix platforms because Ctrl-C
//! actually sends a signal to the *process group* rather than the parent
//! process, so everything will get torn down. On Windows, however, this does
//! not happen and Ctrl-C just kills cargo.
//!
//! To achieve the same semantics on Windows we use Job Objects to ensure that
//! all processes die at the same time. Job objects have a mode of operation
//! where when all handles to the object are closed it causes all child
//! processes associated with the object to be terminated immediately.
//! Conveniently whenever a process in the job object spawns a new process the
//! child will be associated with the job object as well. This means if we add
//! ourselves to the job object we create then everything will get torn down!
#![allow(clippy::missing_safety_doc)]
pub use self::imp::Setup;
pub fn setup() -> Option<Setup> {
unsafe { imp::setup() }
}
#[cfg(unix)]
mod imp {
pub type Setup = ();
pub unsafe fn setup() -> Option<()> {
Some(())
}
}
#[cfg(windows)]
mod imp {
use std::io;
use std::mem;
use std::ptr;
use winapi::shared::minwindef::*;
use winapi::um::handleapi::*;
use winapi::um::jobapi2::*;
use winapi::um::processthreadsapi::*;
use winapi::um::winnt::HANDLE;
use winapi::um::winnt::*;
pub struct Setup {
job: Handle,
}
pub struct Handle {
inner: HANDLE,
}
fn last_err() -> io::Error {
io::Error::last_os_error()
}
pub unsafe fn setup() -> Option<Setup> {
// Creates a new job object for us to use and then adds ourselves to it.
// Note that all errors are basically ignored in this function,
// intentionally. Job objects are "relatively new" in Windows,
// particularly the ability to support nested job objects. Older
// Windows installs don't support this ability. We probably don't want
// to force Cargo to abort in this situation or force others to *not*
// use job objects, so we instead just ignore errors and assume that
// we're otherwise part of someone else's job object in this case.
let job = CreateJobObjectW(ptr::null_mut(), ptr::null());
if job.is_null() {
return None;
}
let job = Handle { inner: job };
// Indicate that when all handles to the job object are gone that all
// process in the object should be killed. Note that this includes our
// entire process tree by default because we've added ourselves and
// our children will reside in the job once we spawn a process.
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
let r = SetInformationJobObject(
job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD,
);
if r == 0 {
return None;
}
// Assign our process to this job object, meaning that our children will
// now live or die based on our existence.
let me = GetCurrentProcess();
let r = AssignProcessToJobObject(job.inner, me);
if r == 0 {
return None;
}
Some(Setup { job })
}
impl Drop for Setup {
fn drop(&mut self) {
// On normal exits (not ctrl-c), we don't want to kill any child
// processes. The destructor here configures our job object to
// *not* kill everything on close, then closes the job object.
unsafe {
let mut info: JOBOBJECT_EXTENDED_LIMIT_INFORMATION;
info = mem::zeroed();
let r = SetInformationJobObject(
self.job.inner,
JobObjectExtendedLimitInformation,
&mut info as *mut _ as LPVOID,
mem::size_of_val(&info) as DWORD,
);
if r == 0 {
info!("failed to configure job object to defaults: {}", last_err());
}
}
}
}
impl Drop for Handle {
fn drop(&mut self) {
unsafe {<๏ฝfimโhole๏ฝ> CloseHandle(self.inner);
}
}
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>game_base.cpp<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright (C) 2015, 2016 Simon Boyรฉ
*
* This file is part of lair.
*
* lair is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* lair is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with lair. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include <iostream>
#include <functional>
#include <SDL_mixer.h>
#include <lair/ldl/read.h>
#include <lair/ldl/write.h>
#include <lair/utils/game_state.h>
#include "lair/utils/game_base.h"
#define DEFAULT_LOG_LEVEL LogLevel::Debug
namespace lair {
// This function is declared in an auto-generated file
void registerLairResource(MemoryFileSystem& fs);
GameConfigBase::GameConfigBase()
: fullscreen(false)
, vSync (true)
, soundVolume(.25)
, musicVolume(.35)
, windowSize(1280, 720)
, debugGl(false)
{
}
void GameConfigBase::setFromArgs(int& argc, char** argv) {
int last = 1;
for(int argi = 1; argi < argc; ++ argi) {
char* arg = argv[argi];
if(std::strcmp(arg, "-f") == 0 || std::strcmp(arg, "--fullscreen") == 0) {
fullscreen = true;
}
else if(std::strcmp(arg, "--no-fullscreen") == 0) {
fullscreen = false;
}
else if(std::strcmp(arg, "--vsync") == 0) {
vSync = true;
}
else if(std::strcmp(arg, "--no-vsync") == 0) {
vSync = false;
}
else if(std::strcmp(arg, "--debug-gl") == 0) {
debugGl = true;
}
else {
argv[last++] = arg;
}
}
argv[last] = 0;
argc = last;
}
const PropertyList& GameConfigBase::properties() const {
return GameConfigBase::staticProperties();
}
const PropertyList& GameConfigBase::staticProperties() {
static PropertyList props;
if(!props.nProperties()) {
props.addProperty("fullscreen",
&GameConfigBase::fullscreen);
props.addProperty("vsync",
&GameConfigBase::vSync);
props.addProperty("sound_volume",
&GameConfigBase::soundVolume);
props.addProperty("music_volume",
&GameConfigBase::musicVolume);
props.addProperty("window_size",
&GameConfigBase::windowSize);
props.addProperty("debug_gl",
&GameConfigBase::debugGl);
}
return props;
}
GameBase::GameBase(int argc, char** argv)<๏ฝfimโhole๏ฝ> _stdlogBackend(std::clog, true),
//#endif
_fileBackend(_logStream, false),
_logger("game", &_mlogger, DEFAULT_LOG_LEVEL),
_argc(argc),
_argv(argv),
_dataPath(),
_sys(nullptr),
_window(nullptr),
_renderModule(nullptr),
_renderer(nullptr),
_audio(nullptr),
_assets(nullptr),
_loader(nullptr),
_nextState(nullptr),
_currentState(nullptr) {
//#ifndef _WIN32
_mlogger.addBackend(&_stdlogBackend);
//#endif
_mlogger.addBackend(&_fileBackend);
dbgLogger.setMaster(&_mlogger);
dbgLogger.setDefaultModuleName("DEBUG");
dbgLogger.setLevel(LogLevel::Debug);
}
GameBase::~GameBase() {
log().log("Stopping game...");
}
Path GameBase::dataPath() const {
return _dataPath;
}
FileSystemSP GameBase::fileSystem() const {
return _fileSystem;
}
PropertySerializer& GameBase::serializer() {
return _serializer;
}
SysModule* GameBase::sys() {
return _sys.get();
}
Window* GameBase::window() {
return _window;
}
AssetManager* GameBase::assets() {
return _assets.get();
}
LoaderManager* GameBase::loader() {
return _loader.get();
}
RenderModule* GameBase::renderModule() {
return _renderModule.get();
}
Renderer* GameBase::renderer() {
return _renderer;
}
AudioModule* GameBase::audio() {
return _audio.get();
}
int GameBase::argc() const {
return _argc;
}
char** GameBase::argv() const {
return _argv;
}
void GameBase::initialize(GameConfigBase& config) {
log().log("Starting game...");
_sys.reset(new SysModule(&_mlogger, LogLevel::Log));
_sys->initialize();
_sys->onQuit = std::bind(&GameBase::quit, this);
// Config
Path configLogicPath = "config.ldl";
Path configRealPath = sys()->basePath() / configLogicPath;
Path::IStream in(configRealPath.native().c_str());
if(in.good()) {
log().info("Read config \"", configRealPath, "\"...");
Variant confVar;
parseLdl(confVar, configRealPath, configLogicPath, log());
_serializer.read(config, confVar, log());
}
else {
log().info("Config not found, create one: \"", configLogicPath, "\"...");
ErrorList errors;
Path::OStream out(configRealPath.native().c_str());
if(out.good()) {
LdlWriter writer(&out, configLogicPath.utf8String(), &errors);
Variant confVar;
_serializer.write(confVar, config, log());
ldlWrite(writer, confVar);
errors.log(log());
}
else {
log().warning("Unable to create the config file \"", configRealPath, "\".");
}
}
// Command-line arguments override config files.
config.setFromArgs(_argc, _argv);
// Data path
const char* envPath = std::getenv("LAIR_DATA_DIR");
if(envPath) {
_dataPath = envPath;
} else {
_dataPath = _sys->basePath() / "assets";
}
log().log("Data directory: ", _dataPath);
// File systems
_realFs.reset(new RealFileSystem(_dataPath));
_memoryFs.reset(new MemoryFileSystem);
registerLairResource(*_memoryFs);
_fileSystem = std::make_shared<FileSystem>();
_fileSystem->addFileSystem("/", _realFs);
_fileSystem->addFileSystem("/", _memoryFs);
// Assets
_assets = make_unique(new AssetManager);
_loader = make_unique(new LoaderManager(_assets.get(), 1, _logger));
_loader->setFileSystem(fileSystem());
// Window
_window = _sys->createWindow("Lair", config.windowSize(0), config.windowSize(1));
_window->setFullscreen(config.fullscreen);
_sys->setVSyncEnabled(config.vSync);
log().info("VSync: ", _sys->isVSyncEnabled()? "on": "off");
// Render
_renderModule.reset(new RenderModule(sys(), assets(), &_mlogger, DEFAULT_LOG_LEVEL));
_renderModule->initialize(config.debugGl);
_renderer = _renderModule->createRenderer();
// Audio
_audio.reset(new AudioModule(&_mlogger));
_audio->initialize();
_audio->setMusicVolume(config.musicVolume);
}
void GameBase::shutdown() {
_assets->releaseAll();
_audio->shutdown();
_renderModule->shutdown();
_renderModule.reset();
_window->destroy();
_sys->shutdown();
_sys.reset();
}
void GameBase::setNextState(GameState* state) {
if(_nextState) {
log().warning("Setting next state while an other state is enqueued.");
}
_nextState = state;
}
void GameBase::run() {
while(_nextState) {
_currentState = _nextState;
_nextState = nullptr;
_currentState->run();
}
}
void GameBase::quit() {
if(_currentState) {
_currentState->quit();
}
}
}<๏ฝfimโend๏ฝ> | : _mlogger(),
_logStream("log.txt"),
//#ifndef _WIN32 |
<|file_name|>general.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
*@author jaime P. Bravo
*/
$(document).ready(function () {
//forms general
function sendDataWithAjax(type, url, data) {
return $.ajax({
type: type,
url: url,
data: data,
dataType: 'json',
beforeSend: function () {
console.log('Enviando...');
},
success: function (response) {
},
error: function (jqXHR, textStatus, errorThrown) {
console.log('Cรณdigo:' + jqXHR.status);
console.log('Error AJAX: ' + textStatus);
console.log('Tipo Error: ' + errorThrown);
}
});
}<๏ฝfimโhole๏ฝ> e.preventDefault();
var url = $(this).attr('action');
var data = $(this).serialize();
var torre = sendDataWithAjax('POST', url, data);
torre.success(function (response) {
if (response.type === 'error') {
generateNoty('bottomLeft', response.message, 'error');
}
else {
generateNoty('bottomLeft', response.message, 'success');
$('.form-group.input-type').removeClass('has-error');
$('.error-icon').hide();
resetForms('create-form-view');
if (response.login === 'si') {
window.location.href = '/matters/index.php/inicio';
}
}
});
});
$('.dropdown.logo').click(function () {
window.location.href = '/matters/index.php/inicio';
});
//Noty Master function
function generateNoty(layout, text, type) {
var n = noty({
text: text,
type: type,
dismissQueue: true,
layout: layout,
theme: 'relax',
timeout: 4000
});
}
//Datatables
$('.table-datatable').DataTable({
"bStateSave": true
});
}); //End jQuery
function refreshTable(table) {
$('.' + table + '').DataTable().ajax.reload();
}
//Helper functions
function resetForms(form_class) {
$('.' + form_class + '').get(0).reset();
}
//Hover submenus
$(function () {
$(".dropdown").hover(
function () {
$('.dropdown-menu', this).stop(true, true).fadeIn("fast");
$(this).toggleClass('open');
$('b', this).toggleClass("caret caret-up");
$('b', this).hover().toggleClass("caret caret-reversed");
},
function () {
$('.dropdown-menu', this).stop(true, true).fadeOut("fast");
$(this).toggleClass('open');
$('b', this).toggleClass("caret caret-up");
$('b', this).hover().toggleClass("caret caret-reversed");
});
});<๏ฝfimโend๏ฝ> | // Form create
$('.create-form-view').submit(function (e) { |
<|file_name|>buttons-child-attribute.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright (c) 2017-2020 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use gtk::{
Inhibit,
PackType,
prelude::BoxExt,
prelude::ButtonExt,
prelude::LabelExt,
prelude::OrientableExt,
prelude::WidgetExt,
};
use gtk::Orientation::Vertical;
use gtk::WindowType::Popup;
use relm::Widget;
use relm_derive::{Msg, widget};
use self::Msg::*;
pub struct Model {
counter: i32,
}
<๏ฝfimโhole๏ฝ> Decrement,
Increment,
Quit,
}
#[widget]
impl Widget for Win {
fn model() -> Model {
Model {
counter: 0,
}
}
fn update(&mut self, event: Msg) {
match event {
Decrement => self.model.counter -= 1,
Increment => self.model.counter += 1,
Quit => gtk::main_quit(),
}
}
view! {
gtk::Window(Popup) {
gtk::Box {
orientation: Vertical,
#[name="label"]
gtk::Label {
text: &self.model.counter.to_string(),
},
#[name="dec_button"]
gtk::Button {
clicked => Decrement,
label: "-",
},
#[name="inc_button"]
gtk::Button {
child: {
expand: false,
fill: true,
pack_type: PackType::Start,
padding: 10,
position: 0,
},
clicked => Increment,
label: "+",
},
},
delete_event(_, _) => (Quit, Inhibit(false)),
}
}
}
fn main() {
Win::run(()).expect("Win::run");
}
#[cfg(test)]
mod tests {
use gtk::prelude::WidgetExt;
use crate::Win;
#[test]
fn button_position() {
let (_component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed");
let inc_button = &widgets.inc_button;
let dec_button = &widgets.dec_button;
let label = &widgets.label;
let inc_allocation = inc_button.allocation();
let dec_allocation = dec_button.allocation();
let label_allocation = label.allocation();
assert!(inc_allocation.y() < dec_allocation.y());
// 10 is the padding.
assert_eq!(
dec_allocation.y(),
inc_allocation.y() + inc_allocation.height() + 10 + label_allocation.height()
);
}
}<๏ฝfimโend๏ฝ> | #[derive(Msg)]
pub enum Msg { |
<|file_name|>worker_stop.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from django.core.management.base import NoArgsCommand, CommandError<๏ฝfimโhole๏ฝ>
class Command(NoArgsCommand):
def handle(self, **options):
daemon_handler = DaemonHandler("daemon.pid", "daemon.sock")
try:
daemon_handler.stop()
sys.stdout.write("Worker is stopped.\n")
except Exception, e:
raise CommandError(e)<๏ฝfimโend๏ฝ> | import sys
from xadrpy.core.workers.daemon import DaemonHandler |
<|file_name|>probe.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::{MethodError,Ambiguity,NoMatch};
use super::MethodIndex;
use super::{CandidateSource,ImplSource,TraitSource};
use check;
use check::{FnCtxt, NoPreference};
use middle::fast_reject;
use middle::subst;
use middle::subst::Subst;
use middle::traits;
use middle::ty::{mod, Ty};
use middle::ty::{MethodObject};
use middle::ty_fold::HigherRankedFoldable;
use middle::infer;
use middle::infer::InferCtxt;
use syntax::ast;
use syntax::codemap::{Span, DUMMY_SP};
use std::collections::HashSet;
use std::rc::Rc;
use util::ppaux::Repr;
use self::CandidateKind::*;
pub use self::PickAdjustment::*;
pub use self::PickKind::*;
struct ProbeContext<'a, 'tcx:'a> {
fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
steps: Rc<Vec<CandidateStep<'tcx>>>,
opt_simplified_steps: Option<Vec<fast_reject::SimplifiedType>>,
inherent_candidates: Vec<Candidate<'tcx>>,
extension_candidates: Vec<Candidate<'tcx>>,
impl_dups: HashSet<ast::DefId>,
static_candidates: Vec<CandidateSource>,
}
struct CandidateStep<'tcx> {
self_ty: Ty<'tcx>,
adjustment: PickAdjustment,
}<๏ฝfimโhole๏ฝ> xform_self_ty: Ty<'tcx>,
method_ty: Rc<ty::Method<'tcx>>,
kind: CandidateKind<'tcx>,
}
enum CandidateKind<'tcx> {
InherentImplCandidate(/* Impl */ ast::DefId, subst::Substs<'tcx>),
ObjectCandidate(MethodObject<'tcx>),
ExtensionImplCandidate(/* Impl */ ast::DefId, Rc<ty::TraitRef<'tcx>>,
subst::Substs<'tcx>, MethodIndex),
UnboxedClosureCandidate(/* Trait */ ast::DefId, MethodIndex),
WhereClauseCandidate(Rc<ty::TraitRef<'tcx>>, MethodIndex),
}
pub struct Pick<'tcx> {
pub method_ty: Rc<ty::Method<'tcx>>,
pub adjustment: PickAdjustment,
pub kind: PickKind<'tcx>,
}
#[deriving(Clone,Show)]
pub enum PickKind<'tcx> {
InherentImplPick(/* Impl */ ast::DefId),
ObjectPick(/* Trait */ ast::DefId, /* method_num */ uint, /* real_index */ uint),
ExtensionImplPick(/* Impl */ ast::DefId, MethodIndex),
TraitPick(/* Trait */ ast::DefId, MethodIndex),
WhereClausePick(/* Trait */ Rc<ty::TraitRef<'tcx>>, MethodIndex),
}
pub type PickResult<'tcx> = Result<Pick<'tcx>, MethodError>;
// This is a kind of "abstracted" version of ty::AutoAdjustment. The
// difference is that it doesn't embed any regions or other
// specifics. The "confirmation" step recreates those details as
// needed.
#[deriving(Clone,Show)]
pub enum PickAdjustment {
// Indicates that the source expression should be autoderef'd N times
//
// A = expr | *expr | **expr
AutoDeref(uint),
// Indicates that the source expression should be autoderef'd N
// times and then "unsized". This should probably eventually go
// away in favor of just coercing method receivers.
//
// A = unsize(expr | *expr | **expr)
AutoUnsizeLength(/* number of autoderefs */ uint, /* length*/ uint),
// Indicates that an autoref is applied after some number of other adjustments
//
// A = &A | &mut A
AutoRef(ast::Mutability, Box<PickAdjustment>),
}
pub fn probe<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
method_name: ast::Name,
self_ty: Ty<'tcx>,
call_expr_id: ast::NodeId)
-> PickResult<'tcx>
{
debug!("probe(self_ty={}, method_name={}, call_expr_id={})",
self_ty.repr(fcx.tcx()),
method_name,
call_expr_id);
// FIXME(#18741) -- right now, creating the steps involves evaluating the
// `*` operator, which registers obligations that then escape into
// the global fulfillment context and thus has global
// side-effects. This is a bit of a pain to refactor. So just let
// it ride, although it's really not great, and in fact could I
// think cause spurious errors. Really though this part should
// take place in the `fcx.infcx().probe` below.
let steps = create_steps(fcx, span, self_ty);
// Create a list of simplified self types, if we can.
let mut simplified_steps = Vec::new();
for step in steps.iter() {
match fast_reject::simplify_type(fcx.tcx(), step.self_ty, true) {
None => { break; }
Some(simplified_type) => { simplified_steps.push(simplified_type); }
}
}
let opt_simplified_steps =
if simplified_steps.len() < steps.len() {
None // failed to convert at least one of the steps
} else {
Some(simplified_steps)
};
debug!("ProbeContext: steps for self_ty={} are {}",
self_ty.repr(fcx.tcx()),
steps.repr(fcx.tcx()));
// this creates one big transaction so that all type variables etc
// that we create during the probe process are removed later
let mut dummy = Some((steps, opt_simplified_steps)); // FIXME(#18101) need once closures
fcx.infcx().probe(|| {
let (steps, opt_simplified_steps) = dummy.take().unwrap();
let mut probe_cx = ProbeContext::new(fcx, span, method_name, steps, opt_simplified_steps);
probe_cx.assemble_inherent_candidates();
probe_cx.assemble_extension_candidates_for_traits_in_scope(call_expr_id);
probe_cx.pick()
})
}
fn create_steps<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_ty: Ty<'tcx>)
-> Vec<CandidateStep<'tcx>> {
let mut steps = Vec::new();
let (fully_dereferenced_ty, dereferences, _) =
check::autoderef(
fcx, span, self_ty, None, NoPreference,
|t, d| {
let adjustment = AutoDeref(d);
steps.push(CandidateStep { self_ty: t, adjustment: adjustment });
None::<()> // keep iterating until we can't anymore
});
match fully_dereferenced_ty.sty {
ty::ty_vec(elem_ty, Some(len)) => {
steps.push(CandidateStep {
self_ty: ty::mk_vec(fcx.tcx(), elem_ty, None),
adjustment: AutoUnsizeLength(dereferences, len),
});
}
_ => {
}
}
return steps;
}
impl<'a,'tcx> ProbeContext<'a,'tcx> {
fn new(fcx: &'a FnCtxt<'a,'tcx>,
span: Span,
method_name: ast::Name,
steps: Vec<CandidateStep<'tcx>>,
opt_simplified_steps: Option<Vec<fast_reject::SimplifiedType>>)
-> ProbeContext<'a,'tcx>
{
ProbeContext {
fcx: fcx,
span: span,
method_name: method_name,
inherent_candidates: Vec::new(),
extension_candidates: Vec::new(),
impl_dups: HashSet::new(),
steps: Rc::new(steps),
opt_simplified_steps: opt_simplified_steps,
static_candidates: Vec::new(),
}
}
fn tcx(&self) -> &'a ty::ctxt<'tcx> {
self.fcx.tcx()
}
fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> {
self.fcx.infcx()
}
///////////////////////////////////////////////////////////////////////////
// CANDIDATE ASSEMBLY
fn assemble_inherent_candidates(&mut self) {
let steps = self.steps.clone();
for step in steps.iter() {
self.assemble_probe(step.self_ty);
}
}
fn assemble_probe(&mut self, self_ty: Ty<'tcx>) {
debug!("assemble_probe: self_ty={}",
self_ty.repr(self.tcx()));
match self_ty.sty {
ty::ty_trait(box ty::TyTrait { ref principal, bounds, .. }) => {
self.assemble_inherent_candidates_from_object(self_ty, &*principal, bounds);
self.assemble_inherent_impl_candidates_for_type(principal.def_id);
}
ty::ty_enum(did, _) |
ty::ty_struct(did, _) |
ty::ty_unboxed_closure(did, _, _) => {
self.assemble_inherent_impl_candidates_for_type(did);
}
ty::ty_param(p) => {
self.assemble_inherent_candidates_from_param(self_ty, p);
}
_ => {
}
}
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: ast::DefId) {
// Read the inherent implementation candidates for this type from the
// metadata if necessary.
ty::populate_implementations_for_type_if_necessary(self.tcx(), def_id);
for impl_infos in self.tcx().inherent_impls.borrow().get(&def_id).iter() {
for &impl_def_id in impl_infos.iter() {
self.assemble_inherent_impl_probe(impl_def_id);
}
}
}
fn assemble_inherent_impl_probe(&mut self, impl_def_id: ast::DefId) {
if !self.impl_dups.insert(impl_def_id) {
return; // already visited
}
let method = match impl_method(self.tcx(), impl_def_id, self.method_name) {
Some(m) => m,
None => { return; } // No method with correct name on this impl
};
if !self.has_applicable_self(&*method) {
// No receiver declared. Not a candidate.
return self.record_static_candidate(ImplSource(impl_def_id));
}
let impl_substs = self.impl_substs(impl_def_id);
// Determine the receiver type that the method itself expects.
let xform_self_ty =
self.xform_self_ty(&method, &impl_substs);
self.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
method_ty: method,
kind: InherentImplCandidate(impl_def_id, impl_substs)
});
}
fn assemble_inherent_candidates_from_object(&mut self,
self_ty: Ty<'tcx>,
principal: &ty::TraitRef<'tcx>,
_bounds: ty::ExistentialBounds) {
debug!("assemble_inherent_candidates_from_object(self_ty={})",
self_ty.repr(self.tcx()));
let tcx = self.tcx();
// It is illegal to invoke a method on a trait instance that
// refers to the `Self` type. An error will be reported by
// `enforce_object_limitations()` if the method refers to the
// `Self` type anywhere other than the receiver. Here, we use
// a substitution that replaces `Self` with the object type
// itself. Hence, a `&self` method will wind up with an
// argument type like `&Trait`.
let rcvr_substs = principal.substs.clone().with_self_ty(self_ty);
let trait_ref = Rc::new(ty::TraitRef {
def_id: principal.def_id,
substs: rcvr_substs.clone()
});
self.elaborate_bounds(&[trait_ref.clone()], |this, new_trait_ref, m, method_num| {
let vtable_index =
get_method_index(tcx, &*new_trait_ref,
trait_ref.clone(), method_num);
// FIXME Hacky. By-value `self` methods in objects ought to be
// just a special case of passing ownership of a DST value
// as a parameter. *But* we currently hack them in and tie them to
// the particulars of the `Box` type. So basically for a `fn foo(self,...)`
// method invoked on an object, we don't want the receiver type to be
// `TheTrait`, but rather `Box<TheTrait>`. Yuck.
let mut m = m;
match m.explicit_self {
ty::ByValueExplicitSelfCategory => {
let mut n = (*m).clone();
let self_ty = n.fty.sig.inputs[0];
n.fty.sig.inputs[0] = ty::mk_uniq(tcx, self_ty);
m = Rc::new(n);
}
_ => { }
}
let xform_self_ty =
this.xform_self_ty(&m, &new_trait_ref.substs);
this.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
method_ty: m,
kind: ObjectCandidate(MethodObject {
trait_ref: new_trait_ref,
object_trait_id: principal.def_id,
method_num: method_num,
real_index: vtable_index
})
});
});
}
fn assemble_inherent_candidates_from_param(&mut self,
_rcvr_ty: Ty<'tcx>,
param_ty: ty::ParamTy) {
// FIXME -- Do we want to commit to this behavior for param bounds?
let ty::ParamTy { space, idx: index, .. } = param_ty;
let bounds =
self.fcx.inh.param_env.bounds.get(space, index).trait_bounds
.as_slice();
self.elaborate_bounds(bounds, |this, trait_ref, m, method_num| {
let xform_self_ty =
this.xform_self_ty(&m, &trait_ref.substs);
debug!("found match: trait_ref={} substs={} m={}",
trait_ref.repr(this.tcx()),
trait_ref.substs.repr(this.tcx()),
m.repr(this.tcx()));
assert_eq!(m.generics.types.get_slice(subst::TypeSpace).len(),
trait_ref.substs.types.get_slice(subst::TypeSpace).len());
assert_eq!(m.generics.regions.get_slice(subst::TypeSpace).len(),
trait_ref.substs.regions().get_slice(subst::TypeSpace).len());
assert_eq!(m.generics.types.get_slice(subst::SelfSpace).len(),
trait_ref.substs.types.get_slice(subst::SelfSpace).len());
assert_eq!(m.generics.regions.get_slice(subst::SelfSpace).len(),
trait_ref.substs.regions().get_slice(subst::SelfSpace).len());
// Because this trait derives from a where-clause, it
// should not contain any inference variables or other
// artifacts. This means it is safe to put into the
// `WhereClauseCandidate` and (eventually) into the
// `WhereClausePick`.
assert!(trait_ref.substs.types.iter().all(|&t| !ty::type_needs_infer(t)));
this.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
method_ty: m,
kind: WhereClauseCandidate(trait_ref, method_num)
});
});
}
// Do a search through a list of bounds, using a callback to actually
// create the candidates.
fn elaborate_bounds(
&mut self,
bounds: &[Rc<ty::TraitRef<'tcx>>],
mk_cand: for<'a> |this: &mut ProbeContext<'a, 'tcx>,
tr: Rc<ty::TraitRef<'tcx>>,
m: Rc<ty::Method<'tcx>>,
method_num: uint|)
{
let tcx = self.tcx();
let mut cache = HashSet::new();
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
// Already visited this trait, skip it.
if !cache.insert(bound_trait_ref.def_id) {
continue;
}
let (pos, method) = match trait_method(tcx, bound_trait_ref.def_id, self.method_name) {
Some(v) => v,
None => { continue; }
};
if !self.has_applicable_self(&*method) {
self.record_static_candidate(TraitSource(bound_trait_ref.def_id));
} else {
mk_cand(self, bound_trait_ref, method, pos);
}
}
}
fn assemble_extension_candidates_for_traits_in_scope(&mut self,
expr_id: ast::NodeId)
{
let mut duplicates = HashSet::new();
let opt_applicable_traits = self.fcx.ccx.trait_map.get(&expr_id);
for applicable_traits in opt_applicable_traits.into_iter() {
for &trait_did in applicable_traits.iter() {
if duplicates.insert(trait_did) {
self.assemble_extension_candidates_for_trait(trait_did);
}
}
}
}
fn assemble_extension_candidates_for_trait(&mut self,
trait_def_id: ast::DefId) {
debug!("assemble_extension_candidates_for_trait: trait_def_id={}",
trait_def_id.repr(self.tcx()));
// Check whether `trait_def_id` defines a method with suitable name:
let trait_items =
ty::trait_items(self.tcx(), trait_def_id);
let matching_index =
trait_items.iter()
.position(|item| item.name() == self.method_name);
let matching_index = match matching_index {
Some(i) => i,
None => { return; }
};
let method = match (&*trait_items)[matching_index].as_opt_method() {
Some(m) => m,
None => { return; }
};
// Check whether `trait_def_id` defines a method with suitable name:
if !self.has_applicable_self(&*method) {
debug!("method has inapplicable self");
return self.record_static_candidate(TraitSource(trait_def_id));
}
self.assemble_extension_candidates_for_trait_impls(trait_def_id,
method.clone(),
matching_index);
self.assemble_unboxed_closure_candidates(trait_def_id,
method,
matching_index);
}
fn assemble_extension_candidates_for_trait_impls(&mut self,
trait_def_id: ast::DefId,
method: Rc<ty::Method<'tcx>>,
method_index: uint)
{
ty::populate_implementations_for_trait_if_necessary(self.tcx(),
trait_def_id);
let trait_impls = self.tcx().trait_impls.borrow();
let impl_def_ids = match trait_impls.get(&trait_def_id) {
None => { return; }
Some(impls) => impls,
};
for &impl_def_id in impl_def_ids.borrow().iter() {
debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={} impl_def_id={}",
trait_def_id.repr(self.tcx()),
impl_def_id.repr(self.tcx()));
if !self.impl_can_possibly_match(impl_def_id) {
continue;
}
let impl_substs = self.impl_substs(impl_def_id);
debug!("impl_substs={}", impl_substs.repr(self.tcx()));
let impl_trait_ref =
ty::impl_trait_ref(self.tcx(), impl_def_id)
.unwrap() // we know this is a trait impl
.subst(self.tcx(), &impl_substs);
debug!("impl_trait_ref={}", impl_trait_ref.repr(self.tcx()));
// Determine the receiver type that the method itself expects.
let xform_self_ty =
self.xform_self_ty(&method, &impl_trait_ref.substs);
debug!("xform_self_ty={}", xform_self_ty.repr(self.tcx()));
self.extension_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
method_ty: method.clone(),
kind: ExtensionImplCandidate(impl_def_id, impl_trait_ref, impl_substs, method_index)
});
}
}
fn impl_can_possibly_match(&self, impl_def_id: ast::DefId) -> bool {
let simplified_steps = match self.opt_simplified_steps {
Some(ref simplified_steps) => simplified_steps,
None => { return true; }
};
let impl_type = ty::lookup_item_type(self.tcx(), impl_def_id);
let impl_simplified_type =
match fast_reject::simplify_type(self.tcx(), impl_type.ty, false) {
Some(simplified_type) => simplified_type,
None => { return true; }
};
simplified_steps.contains(&impl_simplified_type)
}
fn assemble_unboxed_closure_candidates(&mut self,
trait_def_id: ast::DefId,
method_ty: Rc<ty::Method<'tcx>>,
method_index: uint)
{
// Check if this is one of the Fn,FnMut,FnOnce traits.
let tcx = self.tcx();
let kind = if Some(trait_def_id) == tcx.lang_items.fn_trait() {
ty::FnUnboxedClosureKind
} else if Some(trait_def_id) == tcx.lang_items.fn_mut_trait() {
ty::FnMutUnboxedClosureKind
} else if Some(trait_def_id) == tcx.lang_items.fn_once_trait() {
ty::FnOnceUnboxedClosureKind
} else {
return;
};
// Check if there is an unboxed-closure self-type in the list of receivers.
// If so, add "synthetic impls".
let steps = self.steps.clone();
for step in steps.iter() {
let (closure_def_id, _, _) = match step.self_ty.sty {
ty::ty_unboxed_closure(a, b, ref c) => (a, b, c),
_ => continue,
};
let unboxed_closures = self.fcx.inh.unboxed_closures.borrow();
let closure_data = match unboxed_closures.get(&closure_def_id) {
Some(data) => data,
None => {
self.tcx().sess.span_bug(
self.span,
format!("No entry for unboxed closure: {}",
closure_def_id.repr(self.tcx())).as_slice());
}
};
// this closure doesn't implement the right kind of `Fn` trait
if closure_data.kind != kind {
continue;
}
// create some substitutions for the argument/return type;
// for the purposes of our method lookup, we only take
// receiver type into account, so we can just substitute
// fresh types here to use during substitution and subtyping.
let trait_def = ty::lookup_trait_def(self.tcx(), trait_def_id);
let substs = self.infcx().fresh_substs_for_trait(self.span,
&trait_def.generics,
step.self_ty);
let xform_self_ty = self.xform_self_ty(&method_ty, &substs);
self.inherent_candidates.push(Candidate {
xform_self_ty: xform_self_ty,
method_ty: method_ty.clone(),
kind: UnboxedClosureCandidate(trait_def_id, method_index)
});
}
}
///////////////////////////////////////////////////////////////////////////
// THE ACTUAL SEARCH
fn pick(mut self) -> PickResult<'tcx> {
let steps = self.steps.clone();
for step in steps.iter() {
match self.pick_step(step) {
Some(r) => {
return r;
}
None => { }
}
}
Err(NoMatch(self.static_candidates))
}
fn pick_step(&mut self, step: &CandidateStep<'tcx>) -> Option<PickResult<'tcx>> {
debug!("pick_step: step={}", step.repr(self.tcx()));
if ty::type_is_error(step.self_ty) {
return None;
}
match self.pick_by_value_method(step) {
Some(result) => return Some(result),
None => {}
}
match self.pick_autorefd_method(step) {
Some(result) => return Some(result),
None => {}
}
// FIXME -- Super hack. For DST types, we will convert to
// &&[T] or &&str, as part of a kind of legacy lookup scheme.
match step.self_ty.sty {
ty::ty_str | ty::ty_vec(_, None) => self.pick_autorefrefd_method(step),
_ => None
}
}
fn pick_by_value_method(&mut self,
step: &CandidateStep<'tcx>)
-> Option<PickResult<'tcx>>
{
/*!
* For each type `T` in the step list, this attempts to find a
* method where the (transformed) self type is exactly `T`. We
* do however do one transformation on the adjustment: if we
* are passing a region pointer in, we will potentially
* *reborrow* it to a shorter lifetime. This allows us to
* transparently pass `&mut` pointers, in particular, without
* consuming them for their entire lifetime.
*/
let adjustment = match step.adjustment {
AutoDeref(d) => consider_reborrow(step.self_ty, d),
AutoUnsizeLength(..) | AutoRef(..) => step.adjustment.clone(),
};
return self.pick_method(step.self_ty).map(|r| self.adjust(r, adjustment.clone()));
fn consider_reborrow(ty: Ty, d: uint) -> PickAdjustment {
// Insert a `&*` or `&mut *` if this is a reference type:
match ty.sty {
ty::ty_rptr(_, ref mt) => AutoRef(mt.mutbl, box AutoDeref(d+1)),
_ => AutoDeref(d),
}
}
}
fn pick_autorefd_method(&mut self,
step: &CandidateStep<'tcx>)
-> Option<PickResult<'tcx>>
{
let tcx = self.tcx();
self.search_mutabilities(
|m| AutoRef(m, box step.adjustment.clone()),
|m,r| ty::mk_rptr(tcx, r, ty::mt {ty:step.self_ty, mutbl:m}))
}
fn pick_autorefrefd_method(&mut self,
step: &CandidateStep<'tcx>)
-> Option<PickResult<'tcx>>
{
let tcx = self.tcx();
self.search_mutabilities(
|m| AutoRef(m, box AutoRef(m, box step.adjustment.clone())),
|m,r| ty::mk_rptr(tcx, r, ty::mt { ty: ty::mk_rptr(tcx, r, ty::mt { ty:step.self_ty,
mutbl:m}),
mutbl: m }))
}
fn search_mutabilities(&mut self,
mk_adjustment: |ast::Mutability| -> PickAdjustment,
mk_autoref_ty: |ast::Mutability, ty::Region| -> Ty<'tcx>)
-> Option<PickResult<'tcx>>
{
// In general, during probing we erase regions. See
// `impl_self_ty()` for an explanation.
let region = ty::ReStatic;
// Search through mutabilities in order to find one where pick works:
[ast::MutImmutable, ast::MutMutable]
.iter()
.flat_map(|&m| {
let autoref_ty = mk_autoref_ty(m, region);
self.pick_method(autoref_ty)
.map(|r| self.adjust(r, mk_adjustment(m)))
.into_iter()
})
.nth(0)
}
fn adjust(&mut self,
result: PickResult<'tcx>,
adjustment: PickAdjustment)
-> PickResult<'tcx> {
match result {
Err(e) => Err(e),
Ok(mut pick) => {
pick.adjustment = adjustment;
Ok(pick)
}
}
}
fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option<PickResult<'tcx>> {
debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty));
debug!("searching inherent candidates");
match self.consider_candidates(self_ty, self.inherent_candidates[]) {
None => {}
Some(pick) => {
return Some(pick);
}
}
debug!("searching extension candidates");
self.consider_candidates(self_ty, self.extension_candidates[])
}
fn consider_candidates(&self,
self_ty: Ty<'tcx>,
probes: &[Candidate<'tcx>])
-> Option<PickResult<'tcx>> {
let mut applicable_candidates: Vec<_> =
probes.iter()
.filter(|&probe| self.consider_probe(self_ty, probe))
.collect();
debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx()));
if applicable_candidates.len() > 1 {
match self.collapse_candidates_to_trait_pick(applicable_candidates[]) {
Some(pick) => { return Some(Ok(pick)); }
None => { }
}
}
if applicable_candidates.len() > 1 {
let sources = probes.iter().map(|p| p.to_source()).collect();
return Some(Err(Ambiguity(sources)));
}
applicable_candidates.pop().map(|probe| {
let pick = probe.to_unadjusted_pick();
Ok(pick)
})
}
fn consider_probe(&self, self_ty: Ty<'tcx>, probe: &Candidate<'tcx>) -> bool {
debug!("consider_probe: self_ty={} probe={}",
self_ty.repr(self.tcx()),
probe.repr(self.tcx()));
self.infcx().probe(|| {
// First check that the self type can be related.
match self.make_sub_ty(self_ty, probe.xform_self_ty) {
Ok(()) => { }
Err(_) => {
debug!("--> cannot relate self-types");
return false;
}
}
// If so, impls may carry other conditions (e.g., where
// clauses) that must be considered. Make sure that those
// match as well (or at least may match, sometimes we
// don't have enough information to fully evaluate).
match probe.kind {
InherentImplCandidate(impl_def_id, ref substs) |
ExtensionImplCandidate(impl_def_id, _, ref substs, _) => {
// Check whether the impl imposes obligations we have to worry about.
let impl_generics = ty::lookup_item_type(self.tcx(), impl_def_id).generics;
let impl_bounds = impl_generics.to_bounds(self.tcx(), substs);
// Erase any late-bound regions bound in the impl
// which appear in the bounds.
let impl_bounds = self.erase_late_bound_regions(&ty::bind(impl_bounds)).value;
// Convert the bounds into obligations.
let obligations =
traits::obligations_for_generics(
self.tcx(),
traits::ObligationCause::misc(self.span),
&impl_bounds,
&substs.types);
debug!("impl_obligations={}", obligations.repr(self.tcx()));
// Evaluate those obligations to see if they might possibly hold.
let mut selcx = traits::SelectionContext::new(self.infcx(),
&self.fcx.inh.param_env,
self.fcx);
obligations.all(|o| selcx.evaluate_obligation(o))
}
ObjectCandidate(..) |
UnboxedClosureCandidate(..) |
WhereClauseCandidate(..) => {
// These have no additional conditions to check.
true
}
}
})
}
/// Sometimes we get in a situation where we have multiple probes that are all impls of the
/// same trait, but we don't know which impl to use. In this case, since in all cases the
/// external interface of the method can be determined from the trait, it's ok not to decide.
/// We can basically just collapse all of the probes for various impls into one where-clause
/// probe. This will result in a pending obligation so when more type-info is available we can
/// make the final decision.
///
/// Example (`src/test/run-pass/method-two-trait-defer-resolution-1.rs`):
///
/// ```
/// trait Foo { ... }
/// impl Foo for Vec<int> { ... }
/// impl Foo for Vec<uint> { ... }
/// ```
///
/// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we
/// use, so it's ok to just commit to "using the method from the trait Foo".
fn collapse_candidates_to_trait_pick(&self,
probes: &[&Candidate<'tcx>])
-> Option<Pick<'tcx>> {
// Do all probes correspond to the same trait?
let trait_data = match probes[0].to_trait_data() {
Some(data) => data,
None => return None,
};
if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) {
return None;
}
// If so, just use this trait and call it a day.
let (trait_def_id, method_num) = trait_data;
let method_ty = probes[0].method_ty.clone();
Some(Pick {
method_ty: method_ty,
adjustment: AutoDeref(0),
kind: TraitPick(trait_def_id, method_num)
})
}
///////////////////////////////////////////////////////////////////////////
// MISCELLANY
fn make_sub_ty(&self, sub: Ty<'tcx>, sup: Ty<'tcx>) -> infer::ures<'tcx> {
self.infcx().sub_types(false, infer::Misc(DUMMY_SP), sub, sup)
}
fn has_applicable_self(&self, method: &ty::Method) -> bool {
// "fast track" -- check for usage of sugar
match method.explicit_self {
ty::StaticExplicitSelfCategory => {
// fallthrough
}
ty::ByValueExplicitSelfCategory |
ty::ByReferenceExplicitSelfCategory(..) |
ty::ByBoxExplicitSelfCategory => {
return true;
}
}
// FIXME -- check for types that deref to `Self`,
// like `Rc<Self>` and so on.
//
// Note also that the current code will break if this type
// includes any of the type parameters defined on the method
// -- but this could be overcome.
return false;
}
fn record_static_candidate(&mut self, source: CandidateSource) {
self.static_candidates.push(source);
}
fn xform_self_ty(&self,
method: &Rc<ty::Method<'tcx>>,
substs: &subst::Substs<'tcx>)
-> Ty<'tcx> {
debug!("xform_self_ty(self_ty={}, substs={})",
method.fty.sig.inputs[0].repr(self.tcx()),
substs.repr(self.tcx()));
// It is possible for type parameters or early-bound lifetimes
// to appear in the signature of `self`. The substitutions we
// are given do not include type/lifetime parameters for the
// method yet. So create fresh variables here for those too,
// if there are any.
assert_eq!(substs.types.len(subst::FnSpace), 0);
assert_eq!(substs.regions().len(subst::FnSpace), 0);
let mut substs = substs;
let placeholder;
if
!method.generics.types.is_empty_in(subst::FnSpace) ||
!method.generics.regions.is_empty_in(subst::FnSpace)
{
let method_types =
self.infcx().next_ty_vars(
method.generics.types.len(subst::FnSpace));
// In general, during probe we erase regions. See
// `impl_self_ty()` for an explanation.
let method_regions =
method.generics.regions.get_slice(subst::FnSpace)
.iter()
.map(|_| ty::ReStatic)
.collect();
placeholder = (*substs).clone().with_method(method_types, method_regions);
substs = &placeholder;
}
// Replace early-bound regions and types.
let xform_self_ty = method.fty.sig.inputs[0].subst(self.tcx(), substs);
// Replace late-bound regions bound in the impl or
// where-clause (2 levels of binding).
let xform_self_ty =
self.erase_late_bound_regions(&ty::bind(ty::bind(xform_self_ty))).value.value;
// Replace late-bound regions bound in the method (1 level of binding).
self.erase_late_bound_regions(&ty::bind(xform_self_ty)).value
}
fn impl_substs(&self,
impl_def_id: ast::DefId)
-> subst::Substs<'tcx>
{
let impl_pty = ty::lookup_item_type(self.tcx(), impl_def_id);
let type_vars =
impl_pty.generics.types.map(
|_| self.infcx().next_ty_var());
let region_placeholders =
impl_pty.generics.regions.map(
|_| ty::ReStatic); // see erase_late_bound_regions() for an expl of why 'static
subst::Substs::new(type_vars, region_placeholders)
}
/// Replace late-bound-regions bound by `value` with `'static` using
/// `ty::erase_late_bound_regions`.
///
/// This is only a reasonable thing to do during the *probe* phase, not the *confirm* phase, of
/// method matching. It is reasonable during the probe phase because we don't consider region
/// relationships at all. Therefore, we can just replace all the region variables with 'static
/// rather than creating fresh region variables. This is nice for two reasons:
///
/// 1. Because the numbers of the region variables would otherwise be fairly unique to this
/// particular method call, it winds up creating fewer types overall, which helps for memory
/// usage. (Admittedly, this is a rather small effect, though measureable.)
///
/// 2. It makes it easier to deal with higher-ranked trait bounds, because we can replace any
/// late-bound regions with 'static. Otherwise, if we were going to replace late-bound
/// regions with actual region variables as is proper, we'd have to ensure that the same
/// region got replaced with the same variable, which requires a bit more coordination
/// and/or tracking the substitution and
/// so forth.
fn erase_late_bound_regions<T>(&self, value: &T) -> T
where T : HigherRankedFoldable<'tcx>
{
ty::erase_late_bound_regions(self.tcx(), value)
}
}
fn impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
impl_def_id: ast::DefId,
method_name: ast::Name)
-> Option<Rc<ty::Method<'tcx>>>
{
let impl_items = tcx.impl_items.borrow();
let impl_items = impl_items.get(&impl_def_id).unwrap();
impl_items
.iter()
.map(|&did| ty::impl_or_trait_item(tcx, did.def_id()))
.find(|m| m.name() == method_name)
.and_then(|item| item.as_opt_method())
}
/// Find method with name `method_name` defined in `trait_def_id` and return it, along with its
/// index (or `None`, if no such method).
fn trait_method<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_def_id: ast::DefId,
method_name: ast::Name)
-> Option<(uint, Rc<ty::Method<'tcx>>)>
{
let trait_items = ty::trait_items(tcx, trait_def_id);
trait_items
.iter()
.enumerate()
.find(|&(_, ref item)| item.name() == method_name)
.and_then(|(idx, item)| item.as_opt_method().map(|m| (idx, m)))
}
// Determine the index of a method in the list of all methods belonging
// to a trait and its supertraits.
fn get_method_index<'tcx>(tcx: &ty::ctxt<'tcx>,
trait_ref: &ty::TraitRef<'tcx>,
subtrait: Rc<ty::TraitRef<'tcx>>,
n_method: uint) -> uint {
// We need to figure the "real index" of the method in a
// listing of all the methods of an object. We do this by
// iterating down the supertraits of the object's trait until
// we find the trait the method came from, counting up the
// methods from them.
let mut method_count = 0;
ty::each_bound_trait_and_supertraits(tcx, &[subtrait], |bound_ref| {
if bound_ref.def_id == trait_ref.def_id {
false
} else {
let trait_items = ty::trait_items(tcx, bound_ref.def_id);
for trait_item in trait_items.iter() {
match *trait_item {
ty::MethodTraitItem(_) => method_count += 1,
ty::TypeTraitItem(_) => {}
}
}
true
}
});
method_count + n_method
}
impl<'tcx> Candidate<'tcx> {
fn to_unadjusted_pick(&self) -> Pick<'tcx> {
Pick {
method_ty: self.method_ty.clone(),
adjustment: AutoDeref(0),
kind: match self.kind {
InherentImplCandidate(def_id, _) => {
InherentImplPick(def_id)
}
ObjectCandidate(ref data) => {
ObjectPick(data.trait_ref.def_id, data.method_num, data.real_index)
}
ExtensionImplCandidate(def_id, _, _, index) => {
ExtensionImplPick(def_id, index)
}
UnboxedClosureCandidate(trait_def_id, index) => {
TraitPick(trait_def_id, index)
}
WhereClauseCandidate(ref trait_ref, index) => {
// Only trait derived from where-clauses should
// appear here, so they should not contain any
// inference variables or other artifacts. This
// means they are safe to put into the
// `WhereClausePick`.
assert!(trait_ref.substs.types.iter().all(|&t| !ty::type_needs_infer(t)));
WhereClausePick((*trait_ref).clone(), index)
}
}
}
}
fn to_source(&self) -> CandidateSource {
match self.kind {
InherentImplCandidate(def_id, _) => ImplSource(def_id),
ObjectCandidate(ref obj) => TraitSource(obj.trait_ref.def_id),
ExtensionImplCandidate(def_id, _, _, _) => ImplSource(def_id),
UnboxedClosureCandidate(trait_def_id, _) => TraitSource(trait_def_id),
WhereClauseCandidate(ref trait_ref, _) => TraitSource(trait_ref.def_id),
}
}
fn to_trait_data(&self) -> Option<(ast::DefId,MethodIndex)> {
match self.kind {
InherentImplCandidate(..) |
ObjectCandidate(..) => {
None
}
UnboxedClosureCandidate(trait_def_id, method_num) => {
Some((trait_def_id, method_num))
}
ExtensionImplCandidate(_, ref trait_ref, _, method_num) |
WhereClauseCandidate(ref trait_ref, method_num) => {
Some((trait_ref.def_id, method_num))
}
}
}
}
impl<'tcx> Repr<'tcx> for Candidate<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("Candidate(xform_self_ty={}, kind={})",
self.xform_self_ty.repr(tcx),
self.kind.repr(tcx))
}
}
impl<'tcx> Repr<'tcx> for CandidateKind<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
match *self {
InherentImplCandidate(ref a, ref b) =>
format!("InherentImplCandidate({},{})", a.repr(tcx), b.repr(tcx)),
ObjectCandidate(ref a) =>
format!("ObjectCandidate({})", a.repr(tcx)),
ExtensionImplCandidate(ref a, ref b, ref c, ref d) =>
format!("ExtensionImplCandidate({},{},{},{})", a.repr(tcx), b.repr(tcx),
c.repr(tcx), d),
UnboxedClosureCandidate(ref a, ref b) =>
format!("UnboxedClosureCandidate({},{})", a.repr(tcx), b),
WhereClauseCandidate(ref a, ref b) =>
format!("WhereClauseCandidate({},{})", a.repr(tcx), b),
}
}
}
impl<'tcx> Repr<'tcx> for CandidateStep<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("CandidateStep({},{})",
self.self_ty.repr(tcx),
self.adjustment)
}
}
impl<'tcx> Repr<'tcx> for PickAdjustment {
fn repr(&self, _tcx: &ty::ctxt) -> String {
format!("{}", self)
}
}
impl<'tcx> Repr<'tcx> for PickKind<'tcx> {
fn repr(&self, _tcx: &ty::ctxt) -> String {
format!("{}", self)
}
}
impl<'tcx> Repr<'tcx> for Pick<'tcx> {
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
format!("Pick(method_ty={}, adjustment={}, kind={})",
self.method_ty.repr(tcx),
self.adjustment,
self.kind)
}
}<๏ฝfimโend๏ฝ> |
struct Candidate<'tcx> { |
<|file_name|>NoFollow.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package com.suscipio_solutions.consecro_mud.Commands;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMParms;
<๏ฝfimโhole๏ฝ>
private final String[] access=I(new String[]{"NOFOLLOW","NOFOL"});
@Override public String[] getAccessWords(){return access;}
@Override
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
if((commands.size()>1)&&(commands.elementAt(0) instanceof String))
{
if(((String)commands.elementAt(0)).equalsIgnoreCase("UNFOLLOW"))
{
unfollow(mob,((commands.size()>1)&&(commands.elementAt(1) instanceof String)&&(((String)commands.elementAt(1)).equalsIgnoreCase("QUIETLY"))));
return false;
}
MOB M=mob.fetchFollower(CMParms.combine(commands,1));
if((M==null)&&(mob.location()!=null))
{
M=mob.location().fetchInhabitant(CMParms.combine(commands,1));
if(M!=null)
mob.tell(L("@x1 is not following you!",M.name(mob)));
else
mob.tell(L("There is noone here called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if((mob.location()!=null)&&(M!=null)&&(M.amFollowing()==mob))
{
nofollow(M,true,false);
return true;
}
mob.tell(L("There is noone called '@x1' following you!",CMParms.combine(commands,1)));
return false;
}
if(!mob.isAttribute(MOB.Attrib.NOFOLLOW))
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,true);
//unfollow(mob,false);
mob.tell(L("You are no longer accepting new followers."));
}
else
{
mob.setAttribute(MOB.Attrib.NOFOLLOW,false);
mob.tell(L("You are now accepting new followers."));
}
return false;
}
@Override public boolean canBeOrdered(){return true;}
}<๏ฝfimโend๏ฝ> | @SuppressWarnings("rawtypes")
public class NoFollow extends Follow
{
public NoFollow(){} |
<|file_name|>qgsmaplayer.py<|end_file_name|><๏ฝfimโbegin๏ฝ># The following has been generated automatically from src/core/qgsmaplayer.h
QgsMapLayer.LayerType = QgsMapLayerType
# monkey patching scoped based enum
QgsMapLayer.VectorLayer = QgsMapLayerType.VectorLayer
QgsMapLayer.VectorLayer.__doc__ = ""
QgsMapLayer.RasterLayer = QgsMapLayerType.RasterLayer
QgsMapLayer.RasterLayer.__doc__ = ""
QgsMapLayer.PluginLayer = QgsMapLayerType.PluginLayer
QgsMapLayer.PluginLayer.__doc__ = ""
QgsMapLayer.MeshLayer = QgsMapLayerType.MeshLayer
QgsMapLayer.MeshLayer.__doc__ = "Added in 3.2"
QgsMapLayer.VectorTileLayer = QgsMapLayerType.VectorTileLayer
QgsMapLayer.VectorTileLayer.__doc__ = "Added in 3.14"
QgsMapLayer.AnnotationLayer = QgsMapLayerType.AnnotationLayer<๏ฝfimโhole๏ฝ>QgsMapLayer.LayerFlag.baseClass = QgsMapLayer
QgsMapLayer.LayerFlags.baseClass = QgsMapLayer
LayerFlags = QgsMapLayer # dirty hack since SIP seems to introduce the flags in module
QgsMapLayer.StyleCategory.baseClass = QgsMapLayer
QgsMapLayer.StyleCategories.baseClass = QgsMapLayer
StyleCategories = QgsMapLayer # dirty hack since SIP seems to introduce the flags in module<๏ฝfimโend๏ฝ> | QgsMapLayer.AnnotationLayer.__doc__ = "Contains freeform, georeferenced annotations. Added in QGIS 3.16"
QgsMapLayerType.__doc__ = 'Types of layers that can be added to a map\n\n.. versionadded:: 3.8\n\n' + '* ``VectorLayer``: ' + QgsMapLayerType.VectorLayer.__doc__ + '\n' + '* ``RasterLayer``: ' + QgsMapLayerType.RasterLayer.__doc__ + '\n' + '* ``PluginLayer``: ' + QgsMapLayerType.PluginLayer.__doc__ + '\n' + '* ``MeshLayer``: ' + QgsMapLayerType.MeshLayer.__doc__ + '\n' + '* ``VectorTileLayer``: ' + QgsMapLayerType.VectorTileLayer.__doc__ + '\n' + '* ``AnnotationLayer``: ' + QgsMapLayerType.AnnotationLayer.__doc__
# -- |
<|file_name|>f32prefixes.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>//! Constants for all SI prefixes as `f32`s
/// The SI prefix for 10^24
pub const YOTTA: f32 = 1e24;
/// The SI prefix for 10^21
pub const ZETTA: f32 = 1e21;
/// The SI prefix for 10^18
pub const EXA: f32 = 1e18;
/// The SI prefix for 10^15
pub const PETA: f32 = 1e15;
/// The SI prefix for 10^12
pub const TERA: f32 = 1e12;
/// The SI prefix for 10^9
pub const GIGA: f32 = 1e9;
/// The SI prefix for 10^6
pub const MEGA: f32 = 1e6;
/// The SI prefix for 10^3
pub const KILO: f32 = 1e3;
/// The SI prefix for 10^2
pub const HECTO: f32 = 1e2;
/// The SI prefix for 10^1
pub const DECA: f32 = 1e1;
/// The SI prefix for 10^-1
pub const DECI: f32 = 1e-1;
/// The SI prefix for 10^-2
pub const CENTI: f32 = 1e-2;
/// The SI prefix for 10^-3
pub const MILLI: f32 = 1e-3;
/// The SI prefix for 10^-6<๏ฝfimโhole๏ฝ>/// The SI prefix for 10^-9
pub const NANO: f32 = 1e-9;
/// The SI prefix for 10^-12
pub const PICO: f32 = 1e-12;
/// The SI prefix for 10^-15
pub const FEMTO: f32 = 1e-15;
/// The SI prefix for 10^-18
pub const ATTO: f32 = 1e-18;
/// The SI prefix for 10^-21
pub const ZEPTO: f32 = 1e-21;
/// The SI prefix for 10^-24
pub const YOCTO: f32 = 1e-24;<๏ฝfimโend๏ฝ> | pub const MICRO: f32 = 1e-6; |
<|file_name|>index.test.js<|end_file_name|><๏ฝfimโbegin๏ฝ>'use strict';
// ------------------------------------------------------------------------------------------ Test Dependencies
var fs = require('fs');
var path = require('path');
var should = require('chai').should();
var nconf = require('nconf');
nconf.argv()
.env()
.file('local', { file: path.join(__dirname, '../../../local.json') })<๏ฝfimโhole๏ฝ> .file({ file: path.join(__dirname, '../../../config.json') });
var settings = require('../../../lib/settings');
describe('Index View', function() {
var sandbox;
before(function () {
return new Promise(function(resolve, reject) {
settings.get(function(err, settings) {
should.exist(settings);
should.not.exist(err);
sandbox = settings;
resolve();
});
});
});
beforeEach(function *() {
yield browser.url(sandbox.general.baseUrl);
});
it('should have the window title based on user settings', function *() {
var title = yield browser.getTitle()
title.should.be.equal(sandbox.general.title);
});
it('should have the header title based on user settings', function *() {
var header = yield browser.getText('#hp header h1')
header.should.be.equal(sandbox.general.title);
});
it('should have the header subtitle based on user settings', function *() {
var subtitle = yield browser.getText('#hp header h2')
subtitle.should.be.equal(sandbox.general.subtitle);
});
it('should have a link to the home page', function *() {
var link = yield browser.isExisting('#hp header a[href="/"]');
link.should.be.equal(true);
var linkText = yield browser.getText('#hp header a[href="/"]');
linkText.should.be.equal(sandbox.general.title);
});
it('should have a link to the settings page', function *() {
var link = yield browser.isExisting('ul.nav > li > a');
link.should.be.equal(true);
var linkText = yield browser.getText('ul.nav > li > a')
linkText.should.be.equal('Settings');
});
it('should have a form to enable downloading a file', function *() {
var form = yield browser.isExisting('.download form');
form.should.be.equal(true);
var action = yield browser.getAttribute('.download form', 'action');
should.exist(action);
action.should.be.equal(sandbox.general.baseUrl + '/download');
var submit = yield browser.isExisting('.download form button[type=submit]');
submit.should.be.equal(true);
});
it('should not have a form to enable downloading a file if this feature is disabled', function *() {
var currentValue = sandbox.general.enableDownload;
if(currentValue) {
var enableDownload = yield browser.click('ul.nav > li > a')
.click('input#enableDownload')
.submitForm('.tab-pane.active form')
.waitForExist('.tab-pane.active .alert strong', 5000)
.isExisting('input#enableDownload:checked');
enableDownload.should.be.equal(false);
}
var download = yield browser.url('/')
.isExisting('.download');
download.should.be.equal(false);
var form = yield browser.url('/')
.isExisting('.download form');
form.should.be.equal(false);
var submit = yield browser.url('/')
.isExisting('.download form button[type=submit]');
submit.should.be.equal(false);
if(currentValue) {
var enableDownload = yield browser.click('ul.nav > li > a')
.click('input#enableDownload')
.submitForm('.tab-pane.active form')
.waitForExist('.tab-pane.active .alert strong', 5000)
.isExisting('input#enableDownload:checked');
enableDownload.should.be.equal(true);
}
});
it('should have a form to enable downloading a file which throws an error when trying to exploit path traversal', function *() {
var form = yield browser.isExisting('.download form');
form.should.be.equal(true);
var action = yield browser.getAttribute('.download form', 'action');
should.exist(action);
action.should.be.equal(sandbox.general.baseUrl + '/download');
var submit = yield browser.isExisting('.download form button[type=submit]');
submit.should.be.equal(true);
var error = yield browser.setValue('input[name="token"]', '../config')
.submitForm('.download form button[type=submit]')
.waitForExist('.download form span.help-block span.text-danger')
.getText('.download form span.help-block span.text-danger');
error.should.equals('Oh my... something went terribly wrong!');
});
it('should have a dropzone', function *() {
var dropzone = yield browser.isExisting('.dz-action-add.dz-clickable.dropzone');
dropzone.should.be.equal(true);
var previewTemplate = yield browser.isExisting('.dropzone .dz-preview-template');
previewTemplate.should.be.equal(true);
var message = yield browser.isExisting('.dropzone .dz-default.dz-message');
message.should.be.equal(true);
});
it('should have a fallback to dropzone', function *() {
var currentValue = sandbox.dropzone.fallback;
var fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(true);
if(!currentValue) {
fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(false);
}
});
it('should be possible to upload file and retrieve token', function *() {
var currentValue = sandbox.dropzone.fallback;
var fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(true);
if(!sandbox.storage.location === 'local') {
var alert = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/storage"]')
.selectByVisibleText('select#StorageLocation', 'Local file system')
.submitForm('.tab-pane.active form')
.waitForExist('.tab-pane.active .alert strong', 5000)
.getText('.tab-pane.active .alert strong');
alert.should.be.equal('Success!');
}
var preview = yield browser.url('/')
.waitForExist('input#payload')
.execute(function() {
// The WebDriverIO chooseFile() method cannot target an invisible input
// It also does not work well with multiple file input
jQuery("input#payload").removeAttr('multiple')
.show();
})
.waitForVisible('input#payload')
.chooseFile('input#payload', path.join(__dirname, '../../../README.md'))
.submitForm('.fallback form')
.waitForExist('.dz-preview-template')
.getText('.dz-preview-template .dz-preview-description span[data-dz-name]')
preview.should.be.equal('README.md');
var token = yield browser.getText('.dz-preview-item .dz-preview-description .dz-preview-result .text-success a')
should.exist(token);
var tokenLink = yield browser.getAttribute('.dz-preview-item .dz-preview-description .dz-preview-result .text-success a', 'href')
tokenLink.should.be.equal(sandbox.general.baseUrl + '/download/' + token + '/');
var emailHeader = yield browser.getText('#hp .dz-completed-container .dz-upload-complete h2');
should.exist(emailHeader);
var emailForm = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form');
emailForm.should.be.equal(true);
var emailFrom = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form input#from');
emailFrom.should.be.equal(true);
var emailTo = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form input#to');
emailTo.should.be.equal(true);
var emailBody = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form textarea[name="email[body]"]');
emailBody.should.be.equal(true);
var submit = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form button[type="submit"]');
submit.should.be.equal(true);
if(!currentValue) {
fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(false);
}
});
it('should be possible to upload encrypted file and retrieve token', function *() {
var currentValue = sandbox.dropzone.fallback;
if(!currentValue) {
var fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(true);
}
if(!sandbox.storage.location === 'local') {
var alert = yield browser.url('/')
.click('ul.nav > li > a')
.click('a[href="/settings/storage"]')
.selectByVisibleText('select#StorageLocation', 'Local file system')
.submitForm('.tab-pane.active form')
.waitForExist('.tab-pane.active .alert strong', 5000)
.getText('.tab-pane.active .alert strong');
alert.should.be.equal('Success!');
}
var encrypted = yield browser.url('/')
.click('ul.nav > li > a')
.click('a[href="/settings/security"]')
.click('input#encryptionEnabled')
.setValue('input#encryptionKey', 'MySecretEncryptionKey')
.submitForm('.tab-pane.active form')
.waitForExist('.tab-pane.active .alert strong', 5000)
.isExisting('input#encryptionEnabled:checked');
encrypted.should.be.equal(true);
var preview = yield browser.url('/')
.waitForExist('input#payload')
.execute(function() {
// The WebDriverIO chooseFile() method cannot target an invisible input
// It also does not work well with multiple file input
jQuery("input#payload").removeAttr('multiple')
.show();
})
.waitForVisible('input#payload')
.chooseFile('input#payload', path.join(__dirname, '../../../README.md'))
.submitForm('.fallback form')
.waitForExist('.dz-preview-template')
.getText('.dz-preview-template .dz-preview-description span[data-dz-name]')
preview.should.be.equal('README.md');
var token = yield browser.getText('.dz-preview-item .dz-preview-description .dz-preview-result .text-success a')
should.exist(token);
var tokenLink = yield browser.getAttribute('.dz-preview-item .dz-preview-description .dz-preview-result .text-success a', 'href')
tokenLink.should.be.equal(sandbox.general.baseUrl + '/download/' + token + '/');
var emailHeader = yield browser.getText('#hp .dz-completed-container .dz-upload-complete h2');
should.exist(emailHeader);
var emailForm = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form');
emailForm.should.be.equal(true);
var emailFrom = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form input#from');
emailFrom.should.be.equal(true);
var emailTo = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form input#to');
emailTo.should.be.equal(true);
var emailBody = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form textarea[name="email[body]"]');
emailBody.should.be.equal(true);
var submit = yield browser.isExisting('#hp .dz-completed-container .dz-upload-complete form button[type="submit"]');
submit.should.be.equal(true);
if(!currentValue) {
fallback = yield browser.click('ul.nav > li > a')
.click('a[href="/settings/transfer"]')
.click('input#forceFallback')
.submitForm('.tab-pane.active form')
.url('/')
.isExisting('.fallback');
fallback.should.be.equal(false);
}
});
});<๏ฝfimโend๏ฝ> | |
<|file_name|>375. Guess Number Higher or Lower II.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""
We are playing the Guess Game. The game is as follows:
I pick a number from 1 to n. You have to guess which number I picked.
Every time you guess wrong, I'll tell you whether the number I picked is higher or lower.
However, when you guess a particular number x, and you guess wrong, you pay $x. You win the game when you guess the number I picked.
Example:
n = 10, I pick 8.<๏ฝfimโhole๏ฝ>Third round: You guess 9, I tell you that it's lower. You pay $9.
Game over. 8 is the number I picked.
You end up paying $5 + $7 + $9 = $21.
Given a particular n โฅ 1, find out how much money you need to have to guarantee a win.
"""
class Solution(object):
def getMoneyAmount(self, n):
"""
:type n: int
:rtype: int
"""
self.dp = [[0] * (n + 1) for _ in range(n + 1)]
return self.helper(1, n)
def helper(self, s, e):
if s >= e:
return 0
if self.dp[s][e] != 0:
return self.dp[s][e]
res = float('inf')
for i in range(s, e + 1):
res = min(res, i + max(self.helper(s, i - 1), self.helper(i + 1, e)))
self.dp[s][e] = res
return res<๏ฝfimโend๏ฝ> |
First round: You guess 5, I tell you that it's higher. You pay $5.
Second round: You guess 7, I tell you that it's higher. You pay $7. |
<|file_name|>rodi.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/python
import sys, signal, logging, time, RPi.GPIO as GPIO
<๏ฝfimโhole๏ฝ>WATER_VALVE = 10 # GPIO port for the Water Electo valve, High by default after boot
VALVE_CHGSTATE_TIMER = 25 # Electro valve needs roughly 20 seconds to switch from open to close and vice versa
logger = None
def Setup():
global logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.FileHandler('/var/log/rodi.log')
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(message)s',"%Y-%m-%d %H:%M:%S")
handler.setFormatter(formatter)
logger.addHandler(handler)
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(WATER_VALVE, GPIO.OUT)
GPIO.setup(FLOATSW_HIGH_WL, GPIO.IN, pull_up_down=GPIO.PUD_UP) #, initial = GPIO.HIGH)
if not sys.stdout.isatty():
sys.stderr = open('/var/log/rodi_stderr.log', 'a')
sys.stdout = open('/var/log/rodi_stdout.log', 'a')
def Alert(message):
global logger
logger.info(message) # log the event
print(message)
logger.handlers[0].flush()
def Close_valve():
GPIO.output(WATER_VALVE, False)
Alert("Closing the RO/DI valve")
def Open_valve():
if GPIO.input(WATER_VALVE) == True:
Alert("RO/DI Valve already opened")
sys.exit(5)
else:
Alert("Opening the RO/DI valve")
GPIO.output(WATER_VALVE, True)
time.sleep(VALVE_CHGSTATE_TIMER)
def Refilling():
if GPIO.input(WATER_VALVE) == True:
return True
else:
return False
class GracefulKiller:
kill_now = False
def __init__(self):
signal.signal(signal.SIGINT, self.exit_gracefully)
signal.signal(signal.SIGTERM, self.exit_gracefully)
def exit_gracefully(self,signum, frame):
self.kill_now = True
if not len(sys.argv) > 1:
print("You must provide one numerical argument to this function (duration in seconds). Exiting.")
sys.exit(1)
if sys.argv[1] != "close" and sys.argv[1] != "stop" and not sys.argv[1].isdigit():
print("Value is neither 'close', 'stop' or a refill duration expressed in seconds")
sys.exit(1)
i = 0
killer = GracefulKiller()
Setup()
if sys.argv[1] == "close" or sys.argv[1] == "stop":
Close_valve()
if str.count(subprocess.check_output(["ps", "aux"]), "rodi") > 1:
Alert("Warning, we were called while another instance of rodi.py was already in Memory")
sys.exit(1)
if GPIO.input(FLOATSW_HIGH_WL) == 0:
Alert("Water level in sump already high, refilling would be dangerous, exiting")
if GPIO.input(WATER_VALVE) == True:
Alert("RO/DI Valve already opened while high water in the sump, closing.")
Close_valve()
sys.exit(3)
if sys.argv[1].isdigit():
Alert("Not already refilling, sump water level normal, proceeding.")
Alert("Refilling for " + sys.argv[1] + " seconds")
try:
Open_valve()
while i<VALVE_CHGSTATE_TIMER+int(sys.argv[1]):
time.sleep(1)
i=i+1
if GPIO.input(FLOATSW_HIGH_WL) == 0:
Alert("Water level in sump is now high, stopping the refill")
Close_valve()
sys.exit(3)
break
if killer.kill_now:
Alert("Caught a Sigterm, Sigkill or CTRL+C, exiting.")
Close_valve()
sys.exit(2)
break
Alert("Refill done, exiting.")
Close_valve()
sys.exit(0)
except (RuntimeError, IOError):
Alert("Caught an exception, exiting.")
Close_valve()
sys.exit(4)
# Exit code :
# 5 : already refilling or cannot create lock file
# 4 : Caught an exception
# 3 : water is high either at start or during the refill
# 2 : a sigkill, sigterm or keyboard CTRL+C signal was received
# 1 : incorrect parameter received
# 0 : all went fine<๏ฝfimโend๏ฝ> | FLOATSW_HIGH_WL = 26 # high water level float switch |
<|file_name|>matches2021.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// run-pass
// edition:2021
// regression test for https://github.com/rust-lang/rust/pull/85678
<๏ฝfimโhole๏ฝ>#![feature(assert_matches)]
use std::assert_matches::assert_matches;
fn main() {
assert!(matches!((), ()));
assert_matches!((), ());
}<๏ฝfimโend๏ฝ> | |
<|file_name|>test_mqtt.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""Test MQTT connections."""
import unittest
from infopanel import mqtt
from infopanel.tests import load_test_config
class TestMqtt(unittest.TestCase):
"""Test connectivity with MQTT."""
@classmethod
def setUpClass(cls):
cls.conf = load_test_config()
def setUp(self):<๏ฝfimโhole๏ฝ> @unittest.skip(
"Something wrong with the test.mosquitto.org connection from travis ci"
)
def test_connect(self):
"""
Make sure we can connect.
This relies on the test.mosquitto.org test server.
"""
self.client.start()
self.client.stop()
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()<๏ฝfimโend๏ฝ> | """Set up each test."""
data = {}
self.client = mqtt.MQTTClient(data, self.conf["mqtt"])
|
<|file_name|>forms.py<|end_file_name|><๏ฝfimโbegin๏ฝ>''' forms, mostly used for simple tastypie validation '''
from django.contrib.gis import forms
class MeetingForm(forms.Form):
''' form for meetings '''<๏ฝfimโhole๏ฝ> day_of_week = forms.IntegerField(min_value=1, max_value=7)
start_time = forms.TimeField()
end_time = forms.TimeField()
name = forms.CharField(max_length=100)
description = forms.CharField(max_length=255, required=False)
address = forms.CharField(max_length=300)<๏ฝfimโend๏ฝ> | |
<|file_name|>SystemValue.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package net.stickycode.configuration.value;
import net.stickycode.configuration.ConfigurationValue;
public class SystemValue
implements ConfigurationValue {
private String value;
public SystemValue(String value) {
this.value = value;
}
@Override
public String get() {<๏ฝfimโhole๏ฝ> public boolean hasPrecedence(ConfigurationValue v) {
if (ApplicationValue.class.isAssignableFrom(v.getClass()))
return false;
if (SystemValue.class.isAssignableFrom(v.getClass()))
return false;
return true;
}
@Override
public String toString() {
return getClass().getSimpleName() + "{" + value + "}";
}
}<๏ฝfimโend๏ฝ> | return value;
}
@Override |
<|file_name|>redis.go<|end_file_name|><๏ฝfimโbegin๏ฝ>package kvstore
import (
"context"
"errors"
"log"
"strconv"
"strings"
"fmt"
"github.com/gomodule/redigo/redis"
"os"
)
var (
errRedisNewHandler = "kvstore(redis): new handler error"
errRedisConnectionNotExists = "kvstore(redis): '%s' handler not exists"
errRedisZscanCursorTypeError = "kvstore(redis): zscan cursor type error"
errRedisZscanValueTypeError = "kvstore(redis): zscan value type error"
errRedisZscanValueLengthError = "kvstore(redis): zscan value length error"
)
type RedisHandler struct {
pools map[string]*RedisPool
}
func NewRedisHandler() Handler {
return &RedisHandler{}
}
func (this *RedisHandler) Initiate(ctx context.Context) error {
this.pools = make(map[string]*RedisPool)
return nil
}
func (this *RedisHandler) NewHandler(name string, config map[string]interface{}) (KvstoreHandler, error) {
pool := &RedisPool{Pool: &redis.Pool{}}
configHost := config["KVSTORE_REDIS_HOST"]
if configHost != nil {
host, ok := configHost.(string)
if ok {
pool.Host = host
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_HOST data type must be string")
}
}
configPort := config["KVSTORE_REDIS_PORT"]
if configPort != nil {
port, ok := configPort.(string)
if ok {
pool.Port = port
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_PORT data type must be string")
}
}
configPassword := config["KVSTORE_REDIS_PASSWORD"]
if configPassword != nil {
password, ok := configPassword.(string)
if ok {
pool.Password = password
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_PASSWORD data type must be string")
}
}
configDatabase := config["KVSTORE_REDIS_DATABASE"]
if configDatabase != nil {
database, ok := configDatabase.(int)
if ok {
pool.Database = database
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_DATABASE data type must be int")
}
}
configKeyPrefix := config["KVSTORE_REDIS_KEY_PREFIX"]
if configKeyPrefix != nil {
keyPrefix, ok := configKeyPrefix.(string)
if ok {
pool.KeyPrefix = keyPrefix
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_KEY_PREFIX data type must be string")
}
}
configMaxActive := config["KVSTORE_REDIS_MAX_ACTIVE"]
if configMaxActive != nil {
maxActive, ok := configMaxActive.(int)
if ok {
pool.MaxActive = maxActive
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_MAX_ACTIVE data type must be int")
}
}
configMaxIdle := config["KVSTORE_REDIS_MAX_IDLE"]
if configMaxIdle != nil {
maxIdle, ok := configMaxIdle.(int)
if ok {
pool.MaxIdle = maxIdle
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_MAX_IDLE data type must be int")
}
}
configDebug := config["KVSTORE_REDIS_DEBUG"]
if configDebug != nil {
debug, ok := configDebug.(bool)
if ok {
pool.Debug = debug
} else {
return nil, errors.New(errRedisNewHandler + ": KVSTORE_REDIS_DEBUG data type must be bool")
}
}
if pool.Host != "" {
pool.Pool = &redis.Pool{
MaxIdle: pool.MaxIdle,
MaxActive: pool.MaxActive,
Dial: func() (conn redis.Conn, err error) {
c, err := redis.Dial("tcp",
pool.Host+":"+pool.Port,
redis.DialPassword(pool.Password),
redis.DialDatabase(pool.Database),
)
if err != nil {
return nil, err
}
return c, nil
},
}
if this.pools == nil {
this.pools = make(map[string]*RedisPool)
}
this.pools[name] = pool
return pool, nil
}
return nil, errors.New(errRedisNewHandler + ": pool.Host is empty")
}
func (this *RedisHandler) GetHandler(name string) (KvstoreHandler, error) {
if this.pools == nil {
return nil, errors.New(fmt.Sprintf(errRedisConnectionNotExists, name))
}
handlerPool, ok := this.pools[name]
if !ok {
return nil, errors.New(fmt.Sprintf(errRedisConnectionNotExists, name))
}
return handlerPool, nil
}
type RedisPool struct {
*redis.Pool
Host string
Port string
Password string
Database int
KeyPrefix string
Debug bool
}
func (this *RedisPool) GetConfig() map[string]interface{} {
return map[string]interface{}{
"host": this.Host,
"port": this.Port,
"password": this.Password,
"database": this.Database,
"max_idle": this.MaxIdle,
"max_active": this.MaxActive,
"prefix": this.KeyPrefix,
"debug": this.Debug,
}
}
func (this *RedisPool) Get(key interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := redis.String(c.Do("GET", sKey))
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Set(key interface{}, value interface{}) error {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
_, err = c.Do("SET", sKey, value)
return err
}
func (this *RedisPool) Del(key interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("DEL", sKey)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Keys(key interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := redis.Strings(c.Do("KEYS", sKey))
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Expire(key interface{}, expire int64) error {
c := this.Pool.Get()
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
_, err = c.Do("EXPIRE", sKey, expire)
return err
}
func (this *RedisPool) Hmset(key interface{}, args ...interface{}) error {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
redisArgs := redis.Args{}.Add(sKey)
for i := 0; i < len(args); i = i + 2 {
redisArgs = redisArgs.Add(args[i]).Add(args[i+1])
}
_, err = c.Do("HMSET", redisArgs...)
return err
}
func (this *RedisPool) Hmget(key interface{}, args ...interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
redisArgs := redis.Args{}
redisArgs = redisArgs.Add(sKey)
for i := 0; i < len(args); i = i + 1 {
redisArgs = redisArgs.Add(args[i])
}
values, err := c.Do("HMGET", redisArgs...)
if err != nil {
return nil, err
}
return values, nil
}
func (this *RedisPool) Hset(key interface{}, field interface{}, value interface{}) error {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
_, err = c.Do("HSET", sKey, field, value)
return err
}
func (this *RedisPool) Hget(key interface{}, field interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("HGET", sKey, field)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Hlen(key interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
count, err := c.Do("HLEN", sKey)
if err != nil {
return nil, err
}
return count, nil
}
func (this *RedisPool) Hdel(key interface{}, field interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("HDEL", sKey, field)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Hgetall(key interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := redis.Values(c.Do("HGETALL", sKey))
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) ScanStruct(src []interface{}, dest interface{}) error {
return redis.ScanStruct(src, dest)
}
func (this *RedisPool) Exists(key interface{}) (bool, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return false, err
}
value, err := redis.Int(c.Do("EXISTS", sKey))
if err != nil {
return false, err
}
if value == 1 {
return true, nil
} else {
return false, nil
}
}
func (this *RedisPool) Sadd(key interface{}, args ...interface{}) error {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
redisArgs := redis.Args{}.Add(sKey)
for _, arg := range args {
redisArgs = redisArgs.AddFlat(arg)
}
_, err = c.Do("SADD", redisArgs...)
return err
}
func (this *RedisPool) Scard(key interface{}) (int64, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return 0, err
}
return redis.Int64(c.Do("SCARD", sKey))
}
func (this *RedisPool) Zadd(key interface{}, value ...interface{}) error {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return err
}
var valueLen int
for _, v := range value {
if v == nil {
continue
}
valueLen++
}
var inputs []interface{}
if valueLen > 0 {
inputs = append(inputs, sKey)
inputs = append(inputs, value...)
_, err = c.Do("ZADD", inputs...)
return err
}
return nil
}
func (this *RedisPool) Zcard(key interface{}) (int64, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return 0, err
}
return redis.Int64(c.Do("ZCARD", sKey))
//return count, err
}
func (this *RedisPool) Zscore(key interface{}, field interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("ZSCORE", sKey, field)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Zrem(key interface{}, field interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
res, err := c.Do("ZREM", sKey, field)
if err != nil {
return nil, err
}
return res, nil
}
func (this *RedisPool) Zrange(key interface{}, start, end interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("ZRANGE", sKey, start, end)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Zrevrange(key interface{}, start, end interface{}) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
value, err := c.Do("ZREVRANGE", sKey, start, end)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) ZrangeByScore(key interface{}, min, max interface{}, limits ...interface{}) (value interface{}, err error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
args := redis.Args{}.Add(sKey).Add(min).Add(max).Add("WITHSCORES")
if len(limits) > 0 {
args = args.Add("LIMIT").Add(limits...)
}
if len(limits) > 0 {
args = args.Add("LIMIT").Add(limits...)
}
value, err = c.Do("ZRANGEBYSCORE", args...)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) ZrevrangeByScore(key interface{}, min, max interface{}, limits ...interface{}) (value interface{}, err error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
args := redis.Args{}.Add(sKey).Add(max).Add(min).Add("WITHSCORES")
if len(limits) > 0 {
args = args.Add("LIMIT").Add(limits...)
}
value, err = c.Do("ZREVRANGEBYSCORE", args...)
if err != nil {
return nil, err
}
return value, nil
}
func (this *RedisPool) Zscan(key interface{}, cursor string, match string, count int64) (nextCursor string, keys []string, err error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return "", nil, err
}
args := redis.Args{}.Add(sKey).Add(cursor)
if match != "" {
args = args.Add("MATCH", match)
}
if count == 0 {<๏ฝfimโhole๏ฝ> count = 1000
}
if count > 0 {
args = args.Add("COUNT", count)
}
values, err := redis.Values(c.Do("ZSCAN", args...))
if err != nil {
return "", nil, err
}
if len(values) == 2 {
nextCursor = ""
if cursorByte, ok := values[0].([]uint8); ok {
nextCursor = string(cursorByte)
} else {
return "", nil, errors.New(errRedisZscanCursorTypeError)
}
keys = make([]string, 0)
if keysArr, ok := values[1].([]interface{}); ok {
for _, keyInterface := range keysArr {
if keyByte, ok := keyInterface.([]uint8); ok {
keys = append(keys, string(keyByte))
} else {
return "", nil, errors.New(errRedisZscanCursorTypeError)
}
}
return nextCursor, keys, nil
} else {
return "", nil, errors.New(errRedisZscanValueTypeError)
}
} else {
return "", nil, errors.New(errRedisZscanValueLengthError)
}
}
func (this *RedisPool) Sscan(key interface{}, cursor string, match string, count int64) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
args := redis.Args{}.Add(sKey).Add(cursor)
if match != "" {
args = args.Add("MATCH", match)
}
if count == 0 {
count = 1000
}
if count > 0 {
args = args.Add("COUNT", count)
}
return c.Do("SSCAN", args...)
}
func (this *RedisPool) Hscan(key interface{}, cursor string, match string, count int64) (interface{}, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
args := redis.Args{}.Add(sKey).Add(cursor)
if match != "" {
args = args.Add("MATCH", match)
}
if count == 0 {
count = 1000
}
if count > 0 {
args = args.Add("COUNT", count)
}
return c.Do("HSCAN", args...)
}
func (this *RedisPool) Scan(cursor string, match string, count int64) (nextCursor string, keys []string, err error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
args := redis.Args{}.Add(cursor)
if match != "" {
args = args.Add("MATCH", match)
}
if count > 0 {
args = args.Add("COUNT", count)
}
values, err := redis.Values(c.Do("SCAN", args...))
if err != nil {
return "", nil, err
}
if len(values) == 2 {
nextCursor := ""
if cursorByte, ok := values[0].([]uint8); ok {
nextCursor = string(cursorByte)
} else {
return "", nil, errors.New(errRedisZscanCursorTypeError)
}
keys = make([]string, 0)
if keysArr, ok := values[1].([]interface{}); ok {
for _, keyInterface := range keysArr {
if keyByte, ok := keyInterface.([]uint8); ok {
keys = append(keys, string(keyByte))
} else {
return "", nil, errors.New(errRedisZscanCursorTypeError)
}
}
return nextCursor, keys, nil
} else {
return "", nil, errors.New(errRedisZscanValueTypeError)
}
} else {
return "", nil, errors.New(errRedisZscanValueLengthError)
}
}
func (this *RedisPool) Sort(key interface{}, by interface{}, offest int64, count int64, desc *bool, alpha *bool, gets ...interface{}) ([]string, error) {
c := this.Pool.Get()
if this.Debug {
c = redis.NewLoggingConn(c, log.New(os.Stdout, "", log.LstdFlags), "")
}
defer c.Close()
sKey, err := this.generateKey(key)
if err != nil {
return nil, err
}
args := redis.Args{}.Add(sKey)
if by != nil {
args = args.Add("BY", by)
}
if len(gets) > 0 {
for _, get := range gets {
if get != "" {
args = args.Add("GET", get)
}
}
}
if count == 0 {
count = 1000
}
args = append(args, "LIMIT", offest, count)
if desc != nil && *desc {
args = args.Add("DESC")
}
if alpha != nil && *alpha {
args = args.Add("ALPHA")
}
return redis.Strings(c.Do("SORT", args...))
}
func (this *RedisPool) generateKey(key interface{}) (rKey string, err error) {
switch key.(type) {
case string:
rKey = this.KeyPrefix + key.(string)
case int:
rKey = this.KeyPrefix + strconv.FormatInt(key.(int64), 10)
case int64:
rKey = this.KeyPrefix + strconv.FormatInt(key.(int64), 10)
case []byte:
rKey = this.KeyPrefix + string(key.([]byte))
default:
return "", errors.New("key type not support")
}
return rKey, nil
}
func RedisErrNil(err error) bool {
return strings.Contains(err.Error(), redis.ErrNil.Error())
}<๏ฝfimโend๏ฝ> | |
<|file_name|>opc6emu.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import sys, re
mnemonics="mov,and,or,xor,add,adc,sto,ld,ror,jsr,sub,sbc,inc,lsr,dec,asr,halt,bswp,putpsr,getpsr,rti,not,out,in,push,pop,cmp,cmpc".split(",")
op = dict([(opcode,mnemonics.index(opcode)) for opcode in mnemonics])
dis = dict([(mnemonics.index(opcode),opcode) for opcode in mnemonics])
pred_dict = {0:"",1:"0.",2:"z.",3:"nz.",4:"c.",5:"nc.",6:"mi.",7:"pl."}
if len(sys.argv) > 3:
with open(sys.argv[3],"r") as f:
input_text = iter(''.join(f.readlines()))
else:
input_text = iter([chr(0)]*100000)
def print_memory_access( type, address, data):
ch = '%s' % chr(data) if ( 0x1F < data < 0x7F) else '.'
print( "%5s: Address : 0x%04x (%5d) : Data : 0x%04x (%5d) %s" % (type,address,address,data,data,ch))
with open(sys.argv[1],"r") as f:
wordmem = [ (int(x,16) & 0xFFFF) for x in f.read().split() ]
(regfile, acc, c, z, pcreg, c_save, s, ei, swiid, interrupt, iomem) = ([0]*16,0,0,0,15,0,0,0,0,0, [0]*65536) # initialise machine state inc PC = reg[15]
print ("PC : Mem : Instruction : SWI I S C Z : %s\n%s" % (''.join([" r%2d " % d for d in range(0,16)]), '-'*130))
while True:
(pc_save,flag_save,regfile[0],preserve_flag) = (regfile[pcreg],(swiid,ei,s,c,z),0,False) # always overwrite regfile location 0 and then dont care about assignments
instr_word = wordmem[regfile[pcreg] & 0xFFFF ] & 0xFFFF
(p0, p1, p2) = ( (instr_word & 0x8000) >> 15, (instr_word & 0x4000) >> 14, (instr_word & 0x2000)>>13)
(opcode, source, dest) = (((instr_word & 0xF00) >> 8) | (0x10 if (p0,p1,p2)==(0,0,1) else 0x00), (instr_word & 0xF0) >>4, instr_word & 0xF)
(instr_len, rdmem, preserve_flag) = (2 if (instr_word & 0x1000) else 1, (opcode in(op["ld"],op["in"],op["pop"])), (dest==pcreg))
operand = wordmem[regfile[pcreg]+1] if (instr_len==2) else (source if opcode in [op["dec"],op["inc"]] else ((opcode==op["pop"])-(opcode==op["push"])))
instr_str = "%s%s r%d," % ((pred_dict[p0<<2 | p1<<1 | p2] if (p0,p1,p2)!=(0,0,1) else ""),dis[opcode],dest)
instr_str += ("%s%d%s" % (("r" if opcode not in (op["inc"],op["dec"]) else ""),source, (",0x%04x" % operand) if instr_len==2 else ''))
instr_str = re.sub("r0","psr",instr_str,1) if (opcode in (op["putpsr"],op["getpsr"])) else instr_str
(mem_str, source) = (" %04x %4s " % (instr_word, "%04x" % (operand) if instr_len==2 else ''), (0 if opcode in (op["dec"],op["inc"]) else source))
regfile[15] += instr_len
eff_addr = (regfile[source] + operand*(opcode!=op["pop"]))&0xFFFF # EA_ED must be computed after PC is brought up to date
ea_ed = wordmem[eff_addr] if (opcode in(op["ld"],op["pop"])) else iomem[eff_addr] if rdmem else eff_addr
if opcode == op["in"]:
try:
ea_ed = ord(input_text.__next__())
except:
ea_ed = 0
if interrupt : # software interrupts dont care about EI bit
(interrupt, regfile[pcreg], pc_int, psr_int , ei) = (0, 0x0002, pc_save, (swiid,ei,s,c,z), 0)
else:
print ("%04x :%s: %-22s : %1X %d %d %d %d : %s" % (pc_save, mem_str, instr_str, swiid ,ei, s, c, z, ' '.join(["%04x" % i for i in regfile])))
if ( ( (p0,p1,p2)==(0,0,1) ) or (bool(p2) ^ (bool(s if p0==1 else z) if p1==1 else bool(c if p0==1 else 1)))):
if opcode == (op["halt"]):
print("Stopped on halt instruction at %04x with halt number 0x%04x" % (regfile[15]-(instr_len), operand) )
break
elif opcode == (op["rti"]) and (dest==15):
(regfile[pcreg], flag_save, preserve_flag ) = (pc_int, (0,psr_int[1],psr_int[2],psr_int[3],psr_int[4]), True )
elif opcode in (op["and"], op["or"]):
regfile[dest] = ((regfile[dest] & ea_ed) if opcode==op["and"] else (regfile[dest] | ea_ed))& 0xFFFF
elif opcode == op["xor"]:
regfile[dest] = (regfile[dest] ^ ea_ed) & 0xFFFF
elif opcode in (op["ror"],op["asr"],op["lsr"]):
(c, regfile[dest]) = (ea_ed & 0x1, ( ((c<<15) if opcode==op["ror"] else (ea_ed&0x8000 if opcode==op["asr"] else 0)) | ((ea_ed&0xFFFF) >> 1)))
elif opcode in (op["add"], op["adc"], op["inc"]) :
res = (regfile[dest] + ea_ed + (c if opcode==op["adc"] else 0)) & 0x1FFFF
(c, regfile[dest]) = ( (res>>16) & 1, res & 0xFFFF)
elif opcode in (op["mov"], op["ld"], op["not"], op["in"], op["pop"]):
(regfile[source],regfile[dest]) = (regfile[source] if opcode !=op["pop"] else ((regfile[source]+operand)&0xFFFF), (~ea_ed if opcode==op["not"] else ea_ed) & 0xFFFF)
if opcode in (op["ld"],op["in"],op["pop"]):
print_memory_access( "IN" if opcode==op["in"] else "LOAD" , eff_addr, ea_ed)
elif opcode in (op["sub"], op["sbc"], op["cmp"], op["cmpc"], op["dec"]) :
res = (regfile[dest] + ((~ea_ed)&0xFFFF) + (c if (opcode in (op["cmpc"],op["sbc"])) else 1)) & 0x1FFFF
dest = 0 if opcode in( op["cmp"], op["cmpc"]) else dest # retarget r0 with result of comparison
(c, regfile[dest]) = ( (res>>16) & 1, res & 0xFFFF)
elif opcode == op["bswp"]:
regfile[dest] = (((ea_ed&0xFF00)>>8)|((ea_ed&0x00FF)<<8)) & 0xFFFF
elif opcode == op["jsr"]:
(preserve_flag,regfile[dest],regfile[pcreg]) = (True,regfile[pcreg],ea_ed)
elif opcode == op["putpsr"]:
(preserve_flag, flag_save, interrupt) = (True, ((ea_ed&0xF0)>>4,(ea_ed&0x8)>>3,(ea_ed&0x4)>>2,(ea_ed&0x2)>>1,(ea_ed)&1), (ea_ed&0xF0)!=0)
elif opcode == op["getpsr"]:
regfile[dest] = ((swiid&0xF)<<4) | (ei<<3) | (s<<2) | (c<<1) | z
elif opcode in (op["sto"],op["push"]):
(regfile[source],preserve_flag,wordmem[ea_ed]) = (ea_ed if opcode==op["push"] else regfile[source], True,regfile[dest])
print_memory_access("STORE",ea_ed,regfile[dest])<๏ฝfimโhole๏ฝ> elif opcode == op["out"]:
(preserve_flag,iomem[ea_ed], ch) = (True, regfile[dest], '%s' % chr(regfile[dest]) if ( 0x1F < regfile[dest] < 0x7F) else '.')
print_memory_access("OUT",ea_ed,regfile[dest])
(swiid,ei,s,c,z) = flag_save if (preserve_flag or dest==0xF ) else (swiid,ei, (regfile[dest]>>15) & 1, c, 1 if (regfile[dest]==0) else 0)
if len(sys.argv) > 2: # Dump memory for inspection if required
with open(sys.argv[2],"w" ) as f:
f.write( '\n'.join([''.join("%04x " % d for d in wordmem[j:j+16]) for j in [i for i in range(0,len(wordmem),16)]]))<๏ฝfimโend๏ฝ> | |
<|file_name|>0005_auto_20150420_1747.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('travelogue', '0004_auto_20150418_1655'),
]
operations = [
migrations.AddField(
model_name='tripnote',
name='date_taken',
field=models.DateTimeField(verbose_name='date note captured by user on the field', null=True, editable=False, blank=True),
preserve_default=True,
),
migrations.AddField(<๏ฝfimโhole๏ฝ> field=models.PositiveIntegerField(default=0, verbose_name='view count', editable=False),
preserve_default=True,
),
]<๏ฝfimโend๏ฝ> | model_name='tripnote',
name='view_count', |
<|file_name|>http.go<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright 2016 Robin Engel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package commons
import "net/http"
func HttpNoContent(w http.ResponseWriter) {
HttpError(w, http.StatusNoContent)
}
func HttpUnauthorized(w http.ResponseWriter) {<๏ฝfimโhole๏ฝ> HttpError(w, http.StatusUnauthorized)
}
func HttpBadRequest(w http.ResponseWriter) {
HttpError(w, http.StatusBadRequest)
}
func HttpError(w http.ResponseWriter, code int) {
http.Error(w, http.StatusText(code), code)
}
func HttpCheckError(err error, status int, w http.ResponseWriter) {
if err != nil {
HttpError(w, status)
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>RoleRepository.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package de.chandre.admintool.security.dbuser.repo;
import java.util.List;
import java.util.Set;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import de.chandre.admintool.security.dbuser.domain.ATRole;
/**
*
* @author Andrรฉ
* @since 1.1.7
*/
@Repository
public interface RoleRepository extends JpaRepository<ATRole, String> {
ATRole findByName(String name);
<๏ฝfimโhole๏ฝ> List<String> findAllRoleNames();
List<ATRole> findByNameIn(Set<String> ids);
List<ATRole> findByIdIn(Set<String> ids);
void deleteByName(String name);
}<๏ฝfimโend๏ฝ> | @Query("SELECT r.name FROM ATRole r")
|
<|file_name|>fake_driver.go<|end_file_name|><๏ฝfimโbegin๏ฝ>// Code generated by counterfeiter. DO NOT EDIT.
package fakesqldriverfakes
import (
"database/sql/driver"
"sync"
"code.cloudfoundry.org/bbs/db/sqldb/fakesqldriver"
)
type FakeDriver struct {
OpenStub func(string) (driver.Conn, error)
openMutex sync.RWMutex
openArgsForCall []struct {
arg1 string
}
openReturns struct {
result1 driver.Conn
result2 error
}
openReturnsOnCall map[int]struct {
result1 driver.Conn
result2 error
}
invocations map[string][][]interface{}
invocationsMutex sync.RWMutex
}
func (fake *FakeDriver) Open(arg1 string) (driver.Conn, error) {
fake.openMutex.Lock()
ret, specificReturn := fake.openReturnsOnCall[len(fake.openArgsForCall)]
fake.openArgsForCall = append(fake.openArgsForCall, struct {
arg1 string
}{arg1})
stub := fake.OpenStub
fakeReturns := fake.openReturns
fake.recordInvocation("Open", []interface{}{arg1})
fake.openMutex.Unlock()
if stub != nil {
return stub(arg1)
}
if specificReturn {
return ret.result1, ret.result2
}
return fakeReturns.result1, fakeReturns.result2
}
func (fake *FakeDriver) OpenCallCount() int {
fake.openMutex.RLock()
defer fake.openMutex.RUnlock()
return len(fake.openArgsForCall)
}
func (fake *FakeDriver) OpenCalls(stub func(string) (driver.Conn, error)) {
fake.openMutex.Lock()
defer fake.openMutex.Unlock()
fake.OpenStub = stub
}
func (fake *FakeDriver) OpenArgsForCall(i int) string {
fake.openMutex.RLock()
defer fake.openMutex.RUnlock()
argsForCall := fake.openArgsForCall[i]
return argsForCall.arg1
}
func (fake *FakeDriver) OpenReturns(result1 driver.Conn, result2 error) {
fake.openMutex.Lock()
defer fake.openMutex.Unlock()
fake.OpenStub = nil
fake.openReturns = struct {
result1 driver.Conn
result2 error
}{result1, result2}
}
func (fake *FakeDriver) OpenReturnsOnCall(i int, result1 driver.Conn, result2 error) {
fake.openMutex.Lock()
defer fake.openMutex.Unlock()
fake.OpenStub = nil
if fake.openReturnsOnCall == nil {
fake.openReturnsOnCall = make(map[int]struct {
result1 driver.Conn<๏ฝfimโhole๏ฝ> result1 driver.Conn
result2 error
}{result1, result2}
}
func (fake *FakeDriver) Invocations() map[string][][]interface{} {
fake.invocationsMutex.RLock()
defer fake.invocationsMutex.RUnlock()
fake.openMutex.RLock()
defer fake.openMutex.RUnlock()
copiedInvocations := map[string][][]interface{}{}
for key, value := range fake.invocations {
copiedInvocations[key] = value
}
return copiedInvocations
}
func (fake *FakeDriver) recordInvocation(key string, args []interface{}) {
fake.invocationsMutex.Lock()
defer fake.invocationsMutex.Unlock()
if fake.invocations == nil {
fake.invocations = map[string][][]interface{}{}
}
if fake.invocations[key] == nil {
fake.invocations[key] = [][]interface{}{}
}
fake.invocations[key] = append(fake.invocations[key], args)
}
var _ fakesqldriver.Driver = new(FakeDriver)<๏ฝfimโend๏ฝ> | result2 error
})
}
fake.openReturnsOnCall[i] = struct { |
<|file_name|>adapter.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright (c) 2014-2015, CKSource - Frederico Knabben. All rights reserved.
* Licensed under the terms of the MIT License (see LICENSE.md).
*/
( function( QUnit, bender ) {
var total = 0,
failed = 0,
passed = 0,
ignored = 0,
errors = 0,
result = {
success: true,
errors: []
};
// prevent QUnit from starting
QUnit.config.autostart = false;
bender.removeListener( window, 'load', QUnit.load );
function start() {
QUnit.testStart( function() {
total++;
} );
QUnit.testDone( function( details ) {
details.success = result.success;
details.error = result.errors.length ? result.errors.join( '\n' ) : undefined;
details.duration = details.runtime;
details.fullName = details.module + ' ' + details.name;
bender.result( details );
if ( details.success ) {
if ( details.ignored ) {
ignored++;
} else {
passed++;
}
} else {
failed++;
errors++;
}
result.success = true;
result.errors = [];
} );
QUnit.done( function( details ) {
details.duration = details.runtime;
bender.next( {
coverage: window.__coverage__,
duration: details.runtime,
passed: passed,
failed: failed,
errors: errors,
ignored: ignored,
total: total
} );
} );
QUnit.log( function( details ) {
// add detailed error message to test result
if ( !details.result ) {
result.success = false;
result.errors.push( [
details.message,
'Expected: ' + details.expected,
'Actual: ' + details.actual,
details.source
].join( '\n' ) );
}
} );
// manually start the runner
QUnit.load();
QUnit.start();
}
function stopRunner() {
QUnit.stop();
}
function isSingle( name ) {
return name === decodeURIComponent( window.location.hash.substr( 1 ) );
}
var oldTest = QUnit.test;
QUnit.test = function( name ) {
var module = this.config.currentModule,<๏ฝfimโhole๏ฝ>
if ( window.location.hash && window.location.hash !== '#child' && !isSingle( fullName ) ) {
return;
}
oldTest.apply( this, arguments );
};
window.assert = bender.assert = QUnit.assert;
bender.runner = QUnit;
bender.start = start;
bender.stopRunner = stopRunner;
} )( window.QUnit || {}, bender );<๏ฝfimโend๏ฝ> | fullName = module ? module + ' ' + name : name; |
<|file_name|>login.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""
LICENCE
-------
Copyright 2013 by Kitware, Inc. All Rights Reserved. Please refer to
KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
"""
import flask
from functools import wraps
from flask import Blueprint, redirect, render_template, request, session, flash, url_for, current_app
#from flask_openid import OpenID
#from flask_oauth import OAuth
mod = Blueprint('login', __name__)
#oid = OpenID()
# Load users
import os
thispath = os.path.dirname(os.path.abspath(__file__))
from WebUI import app
fin = open(os.path.join(app.config['ETC_DIR'], 'users.json'))
import json
USERS = json.loads(fin.read())
# Decorator for urls that require login
def login_required(f):
"""Checks whether user is logged in or redirects to login"""
@wraps(f)
def decorator(*args, **kwargs):
if not 'user' in flask.session:
flask.flash("Login required !", "error")
return flask.redirect(url_for("login.login") + "?next=" + flask.request.url)
else:
return f(*args, **kwargs)
return decorator
# Decorator for urls that require specific role
def role_required(role):
"""Checks whether user is logged in or redirects to login"""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not 'user' in flask.session:
flask.flash("this Login required !", "error")
return flask.redirect(url_for("login.login") + "&next=" + flask.request.url)
else:
if 'roles' in flask.session["user"]:
if role in flask.session["user"]["roles"]:
# flask.flash("Found access for \"" + role + "\" group :(", "success")
return f(*args, **kwargs)
flask.flash("Access restricted only to login group \"" + role + "\" group :(", "error")
return flask.redirect(url_for("home"))
return decorated_function
return decorator
@mod.route('/login', methods=["get"])
def login():
return render_template("login.html", next=flask.request.args.get("next","/home"))
@mod.route('/login.passwd', methods=['post'])
def login_passwd():
# Try to find the user
userid = request.form["login"]
app = flask.current_app
if userid in USERS:
# Load user
user = USERS[userid]<๏ฝfimโhole๏ฝ> flash('Authentication Error for: ' + userid, "error")
return redirect('/login')
flask.flash("Loading user: "+userid, "success")
return do_user_login(user, next=flask.request.form["next"])
else:
flash('Unknown user: ' + request.form['login'], "error")
return redirect('/login')
def do_user_login(user, next="/home"):
session['user'] = {
'fullname': user["fullname"],
'roles' : user["roles"],
}
flash('Successfully logged in user: ' + user["fullname"], 'success')
return redirect(next)
@mod.route('/logout', methods=['GET', 'POST'])
def logout():
"""Does the login via OpenID. Has to call into `oid.try_login`
to start the OpenID machinery.
"""
# if we are already logged in, go back to were we came from
flask.g.logged_in = False
session.clear()
return redirect(url_for('home'))<๏ฝfimโend๏ฝ> |
if user["passwd"] != request.form['passwd']: |
<|file_name|>tests.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from allauth.socialaccount.tests import OAuth2TestsMixin
from allauth.tests import MockedResponse, TestCase
from .provider import MicrosoftGraphProvider
<๏ฝfimโhole๏ฝ> provider_id = MicrosoftGraphProvider.id
def get_mocked_response(self):
response_data = """
{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users/$entity",
"id": "16f5a7b6-5a15-4568-aa5a-31bb117e9967",
"businessPhones": [],
"displayName": "Anne Weiler",
"givenName": "Anne",
"jobTitle": "Manufacturing Lead",
"mail": "[email protected]",
"mobilePhone": "+1 3528700812",
"officeLocation": null,
"preferredLanguage": "en-US",
"surname": "Weiler",
"userPrincipalName": "[email protected]"
}
""" # noqa
return MockedResponse(200, response_data)<๏ฝfimโend๏ฝ> | class MicrosoftGraphTests(OAuth2TestsMixin, TestCase): |
<|file_name|>const-err-multi.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(const_err)]<๏ฝfimโhole๏ฝ>pub const A: i8 = -std::i8::MIN;
//~^ ERROR const_err
pub const B: i8 = A;
//~^ ERROR const_err
pub const C: u8 = A as u8;
//~^ ERROR const_err
pub const D: i8 = 50 - A;
//~^ ERROR const_err
fn main() {
let _ = (A, B, C, D);
}<๏ฝfimโend๏ฝ> | |
<|file_name|>fakevtworkerclient.go<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
<๏ฝfimโhole๏ฝ>WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package fakevtworkerclient contains a fake for the vtworkerclient interface.
package fakevtworkerclient
import (
"time"
"golang.org/x/net/context"
"github.com/youtube/vitess/go/vt/logutil"
"github.com/youtube/vitess/go/vt/vtctl/fakevtctlclient"
"github.com/youtube/vitess/go/vt/worker/vtworkerclient"
)
// FakeVtworkerClient is a fake which implements the vtworkerclient interface.
// The fake can be used to return a specific result for a given command.
// If the command is not registered, an error will be thrown.
type FakeVtworkerClient struct {
*fakevtctlclient.FakeLoggerEventStreamingClient
}
// NewFakeVtworkerClient creates a FakeVtworkerClient struct.
func NewFakeVtworkerClient() *FakeVtworkerClient {
return &FakeVtworkerClient{fakevtctlclient.NewFakeLoggerEventStreamingClient()}
}
// FakeVtworkerClientFactory returns the current instance and stores the
// dialed server address in an outer struct.
func (f *FakeVtworkerClient) FakeVtworkerClientFactory(addr string, dialTimeout time.Duration) (vtworkerclient.Client, error) {
return &perAddrFakeVtworkerClient{f, addr}, nil
}
// perAddrFakeVtworkerClient is a client instance which captures the server
// address which was dialed by the client.
type perAddrFakeVtworkerClient struct {
*FakeVtworkerClient
addr string
}
// ExecuteVtworkerCommand is part of the vtworkerclient interface.
func (c *perAddrFakeVtworkerClient) ExecuteVtworkerCommand(ctx context.Context, args []string) (logutil.EventStream, error) {
return c.FakeLoggerEventStreamingClient.StreamResult(c.addr, args)
}
// Close is part of the vtworkerclient interface.
func (c *perAddrFakeVtworkerClient) Close() {}<๏ฝfimโend๏ฝ> | Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, |
<|file_name|>monopolygon.cpp<|end_file_name|><๏ฝfimโbegin๏ฝ>#include "test2D.hpp"<๏ฝfimโhole๏ฝ>
namespace boom {
namespace test2d {
MonoPolygon::MonoPolygon(DistV&& dL, DistV&& dR, const Vec2& ori, const Vec2& dir, float wofs):
_vOrigin(ori),
_vDir(dir),
_distL(std::move(dL)),
_distR(std::move(dR)),
_widthOffset(wofs)
{}
MonoPolygon MonoPolygon::Random(const FRandF& rff, const FRandI& rfi, const spn::RangeF& rV, const spn::RangeF& rLen, int nV) {
nV = std::max(3, nV);
// ใฉใณใใ ใซในใคใผใๆนๅใๆฑบใใ
auto dir = Vec2::RandomDir(rff);
// ๅ็นๅบงๆจ
auto origin = Vec2::Random(rff, rV);
// ้ทใ
float length = rff(rLen);
int nLeft = rfi({1, nV-2}),
nRight = rfi({0, nV-2-nLeft});
auto fnMakeLengthList = [&rff](const int n, const float len) {
std::vector<Vec2> distL(n+1);
float sumL = 0;
for(auto& d : distL) {
float num = rff({1e-2f, 1e1f});
sumL += num;
d.x = num;
d.y = rff({0, 1e1f});
}
distL.back().y = 0;
for(auto& d : distL) {
d.x /= sumL;
d.x *= len;
}
return distL;
};
float width_offset = rff(rLen * 1e-1f);
auto distL = fnMakeLengthList(nLeft, length),
distR = fnMakeLengthList(nRight, length);
return MonoPolygon(std::move(distL),
std::move(distR),
origin,
dir,
width_offset);
}
geo2d::PointL MonoPolygon::getPoints() const {
int nLeft = _distL.size()-1,
nRight = _distR.size()-1;
geo2d::PointL pts(2 + nLeft + nRight);
auto* ptr = pts.data();
spn::Vec2 dir90{-_vDir.y, _vDir.x};
// ๅง็นใ่ฟฝๅ
*ptr++ = _vOrigin + dir90*_widthOffset;
float cur = 0;
// ๅทฆๅดใซใฉใณใใ ใชๆฐใฎ้ ็น(ๆไฝ1)
for(int i=0 ; i<nLeft ; i++) {
auto& dist = _distL[i];
cur += dist.x;
*ptr++ = _vOrigin + _vDir*cur + dir90*(dist.y + _widthOffset);
}
cur += _distL.back().x;
// ็ต็นใซ้ ็นใ้
็ฝฎ
*ptr++ = _vOrigin + _vDir*cur + dir90*_widthOffset;
// ๅณๅดใซใฉใณใใ ใชๆฐใฎ้ ็น
cur -= _distR.back().x;
for(int i=nRight-1 ; i>=0 ; i--) {
auto& dist = _distR[i];
*ptr++ = _vOrigin + _vDir*cur - dir90*(dist.y - _widthOffset);
cur -= dist.x;
}
Assert(Trap, pts.data()+pts.size() == ptr)
return pts;
}
const spn::Vec2& MonoPolygon::getDir() const {
return _vDir;
}
bool MonoPolygon::hit(const spn::Vec2& p, float threshold) const {
Vec2 dir90(-_vDir.y, _vDir.x);
auto toP = p - (_vOrigin + dir90*_widthOffset);
float d_vert = _vDir.dot(toP),
d_horz = std::sqrt(toP.len_sq() - spn::Square(d_vert));
if(d_vert < 0)
return false;
auto fnGetHDist = [](const auto& distV, float d_vert, float invalid){
int nL = distV.size();
float cur_y = 0;
for(int i=0 ; i<nL ; i++) {
auto &dist = distV[i];
if(d_vert <= dist.x)
return spn::Lerp(cur_y, dist.y, d_vert / dist.x);
d_vert -= dist.x;
cur_y = dist.y;
}
return invalid;
};
if(dir90.dot(toP) < 0)
d_horz *= -1;
float dL = fnGetHDist(_distL, d_vert, -1.f),
dR = fnGetHDist(_distR, d_vert, -1.f);
return spn::IsInRange(d_horz, -dR-threshold, dL+threshold);
}
}
}<๏ฝfimโend๏ฝ> | |
<|file_name|>slide.js<|end_file_name|><๏ฝfimโbegin๏ฝ>$('section').horizon();
$(".menu-list").find("li").each(function(){<๏ฝfimโhole๏ฝ>
})<๏ฝfimโend๏ฝ> | var menuIndex = $(this).index()+1;
$(this).click(function(){
$(document).stop().horizon('scrollTo', menuIndex);
}) |
<|file_name|>Attr.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Attr.java
* Copyright (c) Radim Kocman
*/
package org.fit.cssbox.jsdombox.global.core;
<๏ฝfimโhole๏ฝ>
/**
* DOM Interface Attr Adapter
*
* @author Radim Kocman
*/
public class Attr extends Node
{
protected org.w3c.dom.Attr source;
public Attr(org.w3c.dom.Attr source, JSAdapterFactory jsaf)
{
super(source, jsaf);
this.source = source;
}
// DOM Level 1 Implementation
public String getName()
{
return source.getName().toLowerCase();
}
public boolean getSpecified()
{
return source.getSpecified();
}
public String getValue()
{
return source.getValue();
}
public void setValue(String value)
{
source.setValue(value);
jsaf.cssEvent.recomputeStyles(source);
}
// DOM Level 2 Implementation
public JSAdapter getOwnerElement()
{
Object result = source.getOwnerElement();
return jsaf.create(result, JSAdapterType.ELEMENT);
}
}<๏ฝfimโend๏ฝ> | import org.fit.cssbox.jsdombox.global.misc.JSAdapterFactory;
import org.fit.cssbox.jsdombox.global.misc.JSAdapter;
import org.fit.cssbox.jsdombox.global.misc.JSAdapterType;
|
<|file_name|>Parameterized.java<|end_file_name|><๏ฝfimโbegin๏ฝ>package org.junit.runners.fix.v411;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.runner.Runner;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.Suite;
import org.junit.runners.model.FrameworkField;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
/**
* <p>
* The custom runner <code>Parameterized</code> implements parameterized tests.
* When running a parameterized test class, instances are created for the
* cross-product of the test methods and the test data elements.
* </p>
*
* For example, to test a Fibonacci function, write:
*
* <pre>
* @RunWith(Parameterized.class)
* public class FibonacciTest {
* @Parameters(name= "{index}: fib({0})={1}")
* public static Iterable<Object[]> data() {
* return Arrays.asList(new Object[][] { { 0, 0 }, { 1, 1 }, { 2, 1 },
* { 3, 2 }, { 4, 3 }, { 5, 5 }, { 6, 8 } });
* }
*
* private int fInput;
*
* private int fExpected;
*
* public FibonacciTest(int input, int expected) {
* fInput= input;
* fExpected= expected;
* }
*
* @Test
* public void test() {
* assertEquals(fExpected, Fibonacci.compute(fInput));
* }
* }
* </pre>
*
* <p>
* Each instance of <code>FibonacciTest</code> will be constructed using the
* two-argument constructor and the data values in the
* <code>@Parameters</code> method.
*
* <p>
* In order that you can easily identify the individual tests, you may provide a
* name for the <code>@Parameters</code> annotation. This name is allowed
* to contain placeholders, which are replaced at runtime. The placeholders are
* <dl>
* <dt>{index}</dt>
* <dd>the current parameter index</dd>
* <dt>{0}</dt>
* <dd>the first parameter value</dd>
* <dt>{1}</dt>
* <dd>the second parameter value</dd>
* <dt>...</dt>
* <dd></dd>
* </dl>
* In the example given above, the <code>Parameterized</code> runner creates
* names like <code>[1: fib(3)=2]</code>. If you don't use the name parameter,
* then the current parameter index is used as name.
* </p>
*
* You can also write:
*
* <pre>
* @RunWith(Parameterized.class)
* public class FibonacciTest {
* @Parameters
* public static Iterable<Object[]> data() {
* return Arrays.asList(new Object[][] { { 0, 0 }, { 1, 1 }, { 2, 1 },
* { 3, 2 }, { 4, 3 }, { 5, 5 }, { 6, 8 } });
* }
* @Parameter(0)
* public int fInput;
*
* @Parameter(1)
* public int fExpected;
*
* @Test
* public void test() {
* assertEquals(fExpected, Fibonacci.compute(fInput));
* }
* }
* </pre>
*
* <p>
* Each instance of <code>FibonacciTest</code> will be constructed with the default constructor
* and fields annotated by <code>@Parameter</code> will be initialized
* with the data values in the <code>@Parameters</code> method.
* </p>
*
* @since 4.0
*/
public class Parameterized extends Suite {
/**
* Annotation for a method which provides parameters to be injected into the
* test class constructor by <code>Parameterized</code>
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public static @interface Parameters {
/**
* <p>
* Optional pattern to derive the test's name from the parameters. Use
* numbers in braces to refer to the parameters or the additional data
* as follows:
* </p>
*
* <pre>
* {index} - the current parameter index
* {0} - the first parameter value
* {1} - the second parameter value
* etc...
* </pre>
* <p>
* Default value is "{index}" for compatibility with previous JUnit
* versions.
* </p>
*
* @return {@link MessageFormat} pattern string, except the index
* placeholder.
* @see MessageFormat
*/
String name() default "{index}";
}
/**
* Annotation for fields of the test class which will be initialized by the
* method annotated by <code>Parameters</code><br/>
* By using directly this annotation, the test class constructor isn't needed.<br/>
* Index range must start at 0.
* Default value is 0.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public static @interface Parameter {
/**
* Method that returns the index of the parameter in the array
* returned by the method annotated by <code>Parameters</code>.<br/>
* Index range must start at 0.
* Default value is 0.
*
* @return the index of the parameter.
*/
int value() default 0;
}
protected static final Map<Character, String> ESCAPE_SEQUENCES = new HashMap<Character, String>();
static {
ESCAPE_SEQUENCES.put('\0', "\\\\0");
ESCAPE_SEQUENCES.put('\t', "\\\\t");
ESCAPE_SEQUENCES.put('\b', "\\\\b");
ESCAPE_SEQUENCES.put('\n', "\\\\n");
ESCAPE_SEQUENCES.put('\r', "\\\\r");
ESCAPE_SEQUENCES.put('\f', "\\\\f");
}
private class TestClassRunnerForParameters extends BlockJUnit4ClassRunner {
private final Object[] fParameters;
private final String fName;
TestClassRunnerForParameters(final Class<?> type, final Object[] parameters, final String name) throws InitializationError {
super(type);
this.fParameters = parameters;
this.fName = name;
}
@Override
public Object createTest() throws Exception {
if (fieldsAreAnnotated()) {
return createTestUsingFieldInjection();
} else {
return createTestUsingConstructorInjection();
}
}
private Object createTestUsingConstructorInjection() throws Exception {
return getTestClass().getOnlyConstructor().newInstance(this.fParameters);
}
private Object createTestUsingFieldInjection() throws Exception {
List<FrameworkField> annotatedFieldsByParameter = getAnnotatedFieldsByParameter();
if (annotatedFieldsByParameter.size() != this.fParameters.length) {
throw new Exception(
"Wrong number of parameters and @Parameter fields." + " @Parameter fields counted: " + annotatedFieldsByParameter.size() + ", available parameters: " + this.fParameters.length + ".");
}
Object testClassInstance = getTestClass().getJavaClass().newInstance();
for (FrameworkField each : annotatedFieldsByParameter) {
Field field = each.getField();
Parameter annotation = field.getAnnotation(Parameter.class);
int index = annotation.value();
try {
field.set(testClassInstance, this.fParameters[index]);
} catch (IllegalArgumentException iare) {
throw new Exception(
getTestClass().getName() + ": Trying to set " + field.getName() + " with the value " + this.fParameters[index] + " that is not the right type (" + this.fParameters[index].getClass()
.getSimpleName() + " instead of " + field.getType().getSimpleName() + ").", iare);
}
}
return testClassInstance;
}
@Override
protected String getName() {
return this.fName;
}
@Override
protected String testName(final FrameworkMethod method) {
return method.getName() + getName();
}
@Override
protected void validateConstructor(final List<Throwable> errors) {
validateOnlyOneConstructor(errors);
if (fieldsAreAnnotated()) {
validateZeroArgConstructor(errors);
}
}
@Override
protected void validateFields(final List<Throwable> errors) {
super.validateFields(errors);
if (fieldsAreAnnotated()) {
List<FrameworkField> annotatedFieldsByParameter = getAnnotatedFieldsByParameter();
int[] usedIndices = new int[annotatedFieldsByParameter.size()];
for (FrameworkField each : annotatedFieldsByParameter) {
int index = each.getField().getAnnotation(Parameter.class).value();
if ((index < 0) || (index > (annotatedFieldsByParameter.size() - 1))) {
errors.add(new Exception(
"Invalid @Parameter value: " + index + ". @Parameter fields counted: " + annotatedFieldsByParameter.size() + ". Please use an index between 0 and " + (annotatedFieldsByParameter.size() - 1) + "."));
} else {
usedIndices[index]++;
}
}
for (int index = 0; index < usedIndices.length; index++) {
int numberOfUse = usedIndices[index];
if (numberOfUse == 0) {
errors.add(new Exception("@Parameter(" + index + ") is never used."));
} else if (numberOfUse > 1) {
errors.add(new Exception("@Parameter(" + index + ") is used more than once (" + numberOfUse + ")."));
}
}
}
}
@Override
protected Statement classBlock(final RunNotifier notifier) {
return childrenInvoker(notifier);
}
@Override
protected Annotation[] getRunnerAnnotations() {
return new Annotation[0];
}
}
private static final List<Runner> NO_RUNNERS = Collections.<Runner> emptyList();
private final ArrayList<Runner> runners = new ArrayList<Runner>();
/**
* Only called reflectively. Do not use programmatically.
*/
public Parameterized(final Class<?> klass) throws Throwable {
super(klass, NO_RUNNERS);
Parameters parameters = getParametersMethod().getAnnotation(Parameters.class);
createRunnersForParameters(allParameters(), parameters.name());
}
@Override
protected List<Runner> getChildren() {
return this.runners;
}
@SuppressWarnings("unchecked")
private Iterable<Object[]> allParameters() throws Throwable {
Object parameters = getParametersMethod().invokeExplosively(null);
if (parameters instanceof Iterable) {
return (Iterable<Object[]>) parameters;
} else {
throw parametersMethodReturnedWrongType();
}
}
private FrameworkMethod getParametersMethod() throws Exception {<๏ฝfimโhole๏ฝ> if (each.isStatic() && each.isPublic()) {
return each;
}
}
throw new Exception("No public static parameters method on class " + getTestClass().getName());
}
private void createRunnersForParameters(final Iterable<Object[]> allParameters, final String namePattern) throws InitializationError, Exception {
try {
int i = 0;
for (Object[] parametersOfSingleTest : allParameters) {
String name = nameFor(namePattern, i, parametersOfSingleTest);
TestClassRunnerForParameters runner = new TestClassRunnerForParameters(getTestClass().getJavaClass(), parametersOfSingleTest, name);
this.runners.add(runner);
++i;
}
} catch (ClassCastException e) {
throw parametersMethodReturnedWrongType();
}
}
private String nameFor(final String namePattern, final int index, final Object[] parameters) {
String finalPattern = namePattern.replaceAll("\\{index\\}", Integer.toString(index));
String name = MessageFormat.format(finalPattern, parameters);
return "[" + sanitizeEscapeSequencesWithName(name) + "]";
}
private String sanitizeEscapeSequencesWithName(final String name) {
String result = name;
for (Map.Entry<Character, String> currentSequence : ESCAPE_SEQUENCES.entrySet()) {
result = result.replaceAll("" + currentSequence.getKey(), currentSequence.getValue());
}
return result;
}
private Exception parametersMethodReturnedWrongType() throws Exception {
String className = getTestClass().getName();
String methodName = getParametersMethod().getName();
String message = MessageFormat.format("{0}.{1}() must return an Iterable of arrays.", className, methodName);
return new Exception(message);
}
private List<FrameworkField> getAnnotatedFieldsByParameter() {
return getTestClass().getAnnotatedFields(Parameter.class);
}
private boolean fieldsAreAnnotated() {
return !getAnnotatedFieldsByParameter().isEmpty();
}
}<๏ฝfimโend๏ฝ> | List<FrameworkMethod> methods = getTestClass().getAnnotatedMethods(Parameters.class);
for (FrameworkMethod each : methods) { |
<|file_name|>WpApiParser.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from cse.util import Util
from collections import OrderedDict
from cse.pipeline import Handler
class WpApiParser(Handler):
def __init__(self):
super()
def parse(self, comments, url, assetId, parentId):
data = self.__buildDataSkeleton(url, assetId)
data["comments"] = self.__iterateComments(comments, parentId)
return data
def __buildDataSkeleton(self, url, assetId):
return {
"article_url" : url,
"article_id" : assetId,
"comments" : None
}
def __iterateComments(self, comments, parentId=None):
commentList = OrderedDict()
for comment in comments:
votes = 0
for action_summary in comment["action_summaries"]:
if action_summary["__typename"] == "LikeActionSummary":<๏ฝfimโhole๏ฝ> commentObject = {
"comment_author": comment["user"]["username"],
"comment_text" : comment["body"],
"timestamp" : comment["created_at"],
"parent_comment_id" : parentId,
"upvotes" : votes,
"downvotes": 0
}
commentList[comment["id"]] = commentObject
try:
commentReplies = self.__iterateComments(comment["replies"]["nodes"], comment["id"])
except KeyError: # There may be a limit of the nesting level of comments on wp
commentReplies = {}
commentList.update(commentReplies)
return commentList
# inherited from cse.pipeline.Handler
def registeredAt(self, ctx):
pass
def process(self, ctx, data):
result = self.parse(
comments=data["comments"],
url=data["url"],
assetId=data["assetId"],
parentId=data["parentId"]
)
ctx.write(result)<๏ฝfimโend๏ฝ> | votes = action_summary["count"]
|
<|file_name|>http.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*- coding: utf-8 -*-
import logging
import odoo
from odoo.tools.func import lazy_property
<๏ฝfimโhole๏ฝ>
class RootTkobr(odoo.http.Root):
@lazy_property
def session_store(self):
# Setup http sessions
_logger.debug('HTTP sessions stored in Postgres')
return PostgresSessionStore(session_class=odoo.http.OpenERPSession)
root = RootTkobr()
odoo.http.root.session_store = root.session_store<๏ฝfimโend๏ฝ> | from .sessionstore import PostgresSessionStore
_logger = logging.getLogger(__name__)
|
<|file_name|>topojson.js<|end_file_name|><๏ฝfimโbegin๏ฝ>topojson = (function() {
function merge(topology, arcs) {
var arcsByEnd = {},
fragmentByStart = {},
fragmentByEnd = {};
arcs.forEach(function(i) {
var e = ends(i);
(arcsByEnd[e[0]] || (arcsByEnd[e[0]] = [])).push(i);
(arcsByEnd[e[1]] || (arcsByEnd[e[1]] = [])).push(~i);
});
arcs.forEach(function(i) {
var e = ends(i),
start = e[0],
end = e[1],
f, g;
if (f = fragmentByEnd[start]) {
delete fragmentByEnd[f.end];
f.push(i);
f.end = end;
if (g = fragmentByStart[end]) {
delete fragmentByStart[g.start];
var fg = g === f ? f : f.concat(g);
fragmentByStart[fg.start = f.start] = fragmentByEnd[fg.end = g.end] = fg;
} else if (g = fragmentByEnd[end]) {
delete fragmentByStart[g.start];
delete fragmentByEnd[g.end];
var fg = f.concat(g.map(function(i) { return ~i; }).reverse());
fragmentByStart[fg.start = f.start] = fragmentByEnd[fg.end = g.start] = fg;
} else {
fragmentByStart[f.start] = fragmentByEnd[f.end] = f;
}
} else if (f = fragmentByStart[end]) {
delete fragmentByStart[f.start];
f.unshift(i);
f.start = start;
if (g = fragmentByEnd[start]) {
delete fragmentByEnd[g.end];
var gf = g === f ? f : g.concat(f);
fragmentByStart[gf.start = g.start] = fragmentByEnd[gf.end = f.end] = gf;
} else if (g = fragmentByStart[start]) {
delete fragmentByStart[g.start];
delete fragmentByEnd[g.end];
var gf = g.map(function(i) { return ~i; }).reverse().concat(f);
fragmentByStart[gf.start = g.end] = fragmentByEnd[gf.end = f.end] = gf;
} else {
fragmentByStart[f.start] = fragmentByEnd[f.end] = f;
}
} else if (f = fragmentByStart[start]) {
delete fragmentByStart[f.start];
f.unshift(~i);
f.start = end;
if (g = fragmentByEnd[end]) {
delete fragmentByEnd[g.end];
var gf = g === f ? f : g.concat(f);
fragmentByStart[gf.start = g.start] = fragmentByEnd[gf.end = f.end] = gf;
} else if (g = fragmentByStart[end]) {
delete fragmentByStart[g.start];
delete fragmentByEnd[g.end];
var gf = g.map(function(i) { return ~i; }).reverse().concat(f);
fragmentByStart[gf.start = g.end] = fragmentByEnd[gf.end = f.end] = gf;
} else {
fragmentByStart[f.start] = fragmentByEnd[f.end] = f;
}
} else if (f = fragmentByEnd[end]) {
delete fragmentByEnd[f.end];
f.push(~i);
f.end = start;
if (g = fragmentByEnd[start]) {
delete fragmentByStart[g.start];
var fg = g === f ? f : f.concat(g);
fragmentByStart[fg.start = f.start] = fragmentByEnd[fg.end = g.end] = fg;
} else if (g = fragmentByStart[start]) {
delete fragmentByStart[g.start];
delete fragmentByEnd[g.end];
var fg = f.concat(g.map(function(i) { return ~i; }).reverse());
fragmentByStart[fg.start = f.start] = fragmentByEnd[fg.end = g.start] = fg;
} else {
fragmentByStart[f.start] = fragmentByEnd[f.end] = f;
}
} else {
f = [i];
fragmentByStart[f.start = start] = fragmentByEnd[f.end = end] = f;
}
});
function ends(i) {
var arc = topology.arcs[i], p0 = arc[0], p1 = [0, 0];
arc.forEach(function(dp) { p1[0] += dp[0], p1[1] += dp[1]; });
return [p0, p1];
}
var fragments = [];
for (var k in fragmentByEnd) fragments.push(fragmentByEnd[k]);
return fragments;
}
function mesh(topology, o, filter) {
var arcs = [];
if (arguments.length > 1) {<๏ฝfimโhole๏ฝ> var geomsByArc = [],
geom;
function arc(i) {
if (i < 0) i = ~i;
(geomsByArc[i] || (geomsByArc[i] = [])).push(geom);
}
function line(arcs) {
arcs.forEach(arc);
}
function polygon(arcs) {
arcs.forEach(line);
}
function geometry(o) {
if (o.type === "GeometryCollection") o.geometries.forEach(geometry);
else if (o.type in geometryType) {
geom = o;
geometryType[o.type](o.arcs);
}
}
var geometryType = {
LineString: line,
MultiLineString: polygon,
Polygon: polygon,
MultiPolygon: function(arcs) { arcs.forEach(polygon); }
};
geometry(o);
geomsByArc.forEach(arguments.length < 3
? function(geoms, i) { arcs.push([i]); }
: function(geoms, i) { if (filter(geoms[0], geoms[geoms.length - 1])) arcs.push([i]); });
} else {
for (var i = 0, n = topology.arcs.length; i < n; ++i) arcs.push([i]);
}
return object(topology, {type: "MultiLineString", arcs: merge(topology, arcs)});
}
function object(topology, o) {
var tf = topology.transform,
kx = tf.scale[0],
ky = tf.scale[1],
dx = tf.translate[0],
dy = tf.translate[1],
arcs = topology.arcs;
function arc(i, points) {
if (points.length) points.pop();
for (var a = arcs[i < 0 ? ~i : i], k = 0, n = a.length, x = 0, y = 0, p; k < n; ++k) points.push([
(x += (p = a[k])[0]) * kx + dx,
(y += p[1]) * ky + dy
]);
if (i < 0) reverse(points, n);
}
function point(coordinates) {
return [coordinates[0] * kx + dx, coordinates[1] * ky + dy];
}
function line(arcs) {
var points = [];
for (var i = 0, n = arcs.length; i < n; ++i) arc(arcs[i], points);
if (points.length < 2) points.push(points[0]);
return points;
}
function ring(arcs) {
var points = line(arcs);
while (points.length < 4) points.push(points[0]);
return points;
}
function polygon(arcs) {
return arcs.map(ring);
}
function geometry(o) {
var t = o.type, g = t === "GeometryCollection" ? {type: t, geometries: o.geometries.map(geometry)}
: t in geometryType ? {type: t, coordinates: geometryType[t](o)}
: {type: null};
if ("id" in o) g.id = o.id;
if ("properties" in o) g.properties = o.properties;
return g;
}
var geometryType = {
Point: function(o) { return point(o.coordinates); },
MultiPoint: function(o) { return o.coordinates.map(point); },
LineString: function(o) { return line(o.arcs); },
MultiLineString: function(o) { return o.arcs.map(line); },
Polygon: function(o) { return polygon(o.arcs); },
MultiPolygon: function(o) { return o.arcs.map(polygon); }
};
return geometry(o);
}
function reverse(array, n) {
var t, j = array.length, i = j - n; while (i < --j) t = array[i], array[i++] = array[j], array[j] = t;
}
function bisect(a, x) {
var lo = 0, hi = a.length;
while (lo < hi) {
var mid = lo + hi >>> 1;
if (a[mid] < x) lo = mid + 1;
else hi = mid;
}
return lo;
}
function neighbors(objects) {
var objectsByArc = [],
neighbors = objects.map(function() { return []; });
function line(arcs, i) {
arcs.forEach(function(a) {
if (a < 0) a = ~a;
var o = objectsByArc[a] || (objectsByArc[a] = []);
if (!o[i]) o.forEach(function(j) {
var n, k;
k = bisect(n = neighbors[i], j); if (n[k] !== j) n.splice(k, 0, j);
k = bisect(n = neighbors[j], i); if (n[k] !== i) n.splice(k, 0, i);
}), o[i] = i;
});
}
function polygon(arcs, i) {
arcs.forEach(function(arc) { line(arc, i); });
}
function geometry(o, i) {
if (o.type === "GeometryCollection") o.geometries.forEach(function(o) { geometry(o, i); });
else if (o.type in geometryType) geometryType[o.type](o.arcs, i);
}
var geometryType = {
LineString: line,
MultiLineString: polygon,
Polygon: polygon,
MultiPolygon: function(arcs, i) { arcs.forEach(function(arc) { polygon(arc, i); }); }
};
objects.forEach(geometry);
return neighbors;
}
return {
version: "0.0.28",
mesh: mesh,
object: object,
neighbors: neighbors
};
})();<๏ฝfimโend๏ฝ> | |
<|file_name|>codenvy-api-builder.factory.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright (c) [2015] - [2017] Red Hat, Inc.<๏ฝfimโhole๏ฝ> * are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
import {CodenvyResourceBuilder} from './codenvy-resource-builder';
/**
* This class is providing the entry point for accessing the builders
* @author Florent Benoit
* @author Oleksii Orel
*/
export class CodenvyAPIBuilder {
/**
* Default constructor
* @ngInject for Dependency injection
*/
constructor () {
}
/**
* The Codenvy Resources builder
*
* @return {CodenvyResourceBuilder}
*/
getResourceBuilder() {
return new CodenvyResourceBuilder();
}
}<๏ฝfimโend๏ฝ> | * All rights reserved. This program and the accompanying materials |
<|file_name|>test_universe.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import xml.etree.ElementTree as ET
import numpy as np
import openmc
import pytest
from tests.unit_tests import assert_unbounded
def test_basic():
c1 = openmc.Cell()
c2 = openmc.Cell()
c3 = openmc.Cell()
u = openmc.Universe(name='cool', cells=(c1, c2, c3))
assert u.name == 'cool'
cells = set(u.cells.values())
assert not (cells ^ {c1, c2, c3})
# Test __repr__
repr(u)
with pytest.raises(TypeError):
u.add_cell(openmc.Material())
with pytest.raises(TypeError):
u.add_cells(c1)
u.remove_cell(c3)
cells = set(u.cells.values())
assert not (cells ^ {c1, c2})
u.clear_cells()
assert not set(u.cells)
def test_bounding_box():
cyl1 = openmc.ZCylinder(r=1.0)
cyl2 = openmc.ZCylinder(r=2.0)
c1 = openmc.Cell(region=-cyl1)
c2 = openmc.Cell(region=+cyl1 & -cyl2)
<๏ฝfimโhole๏ฝ> assert ll == pytest.approx((-2., -2., -np.inf))
assert ur == pytest.approx((2., 2., np.inf))
u = openmc.Universe()
assert_unbounded(u)
def test_plot(run_in_tmpdir, sphere_model):
m = sphere_model.materials[0]
univ = sphere_model.geometry.root_universe
colors = {m: 'limegreen'}
for basis in ('xy', 'yz', 'xz'):
univ.plot(
basis=basis,
pixels=(10, 10),
color_by='material',
colors=colors,
)
def test_get_nuclides(uo2):
c = openmc.Cell(fill=uo2)
univ = openmc.Universe(cells=[c])
nucs = univ.get_nuclides()
assert nucs == ['U235', 'O16']
def test_cells():
cells = [openmc.Cell() for i in range(5)]
cells2 = [openmc.Cell() for i in range(3)]
cells[0].fill = openmc.Universe(cells=cells2)
u = openmc.Universe(cells=cells)
assert not (set(u.cells.values()) ^ set(cells))
all_cells = set(u.get_all_cells().values())
assert not (all_cells ^ set(cells + cells2))
def test_get_all_materials(cell_with_lattice):
cells, mats, univ, lattice = cell_with_lattice
test_mats = set(univ.get_all_materials().values())
assert not (test_mats ^ set(mats))
def test_get_all_universes():
c1 = openmc.Cell()
u1 = openmc.Universe(cells=[c1])
c2 = openmc.Cell()
u2 = openmc.Universe(cells=[c2])
c3 = openmc.Cell(fill=u1)
c4 = openmc.Cell(fill=u2)
u3 = openmc.Universe(cells=[c3, c4])
univs = set(u3.get_all_universes().values())
assert not (univs ^ {u1, u2})
def test_create_xml(cell_with_lattice):
cells = [openmc.Cell() for i in range(5)]
u = openmc.Universe(cells=cells)
geom = ET.Element('geom')
u.create_xml_subelement(geom)
cell_elems = geom.findall('cell')
assert len(cell_elems) == len(cells)
assert all(c.get('universe') == str(u.id) for c in cell_elems)
assert not (set(c.get('id') for c in cell_elems) ^
set(str(c.id) for c in cells))<๏ฝfimโend๏ฝ> | u = openmc.Universe(cells=[c1, c2])
ll, ur = u.bounding_box |
<|file_name|>help.go<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* MinIO Cloud Storage, (C) 2019 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package openid
import "github.com/minio/minio/cmd/config"
// Help template for OpenID identity feature.
var (
Help = config.HelpKVS{
config.HelpKV{
Key: ConfigURL,<๏ฝfimโhole๏ฝ> },
config.HelpKV{
Key: ClientID,
Description: `unique public identifier for apps e.g. "292085223830.apps.googleusercontent.com"`,
Type: "string",
Optional: true,
},
config.HelpKV{
Key: ClaimName,
Description: `JWT canned policy claim name, defaults to "policy"`,
Optional: true,
Type: "string",
},
config.HelpKV{
Key: ClaimPrefix,
Description: `JWT claim namespace prefix e.g. "customer1/"`,
Optional: true,
Type: "string",
},
config.HelpKV{
Key: Scopes,
Description: `Comma separated list of OpenID scopes for server, defaults to advertised scopes from discovery document e.g. "email,admin"`,
Optional: true,
Type: "csv",
},
config.HelpKV{
Key: config.Comment,
Description: config.DefaultComment,
Optional: true,
Type: "sentence",
},
}
)<๏ฝfimโend๏ฝ> | Description: `openid discovery document e.g. "https://accounts.google.com/.well-known/openid-configuration"`,
Type: "url", |
<|file_name|>files.filters.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/*eslint no-console: 1 */
console.warn('You are using the default filter for the fileMeta service. For more information about event filters see https://docs.feathersjs.com/api/events.html#event-filtering'); // eslint-disable-line no-console
<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | module.exports = function(data, connection, hook) { // eslint-disable-line no-unused-vars
return data;
}; |
<|file_name|>ChevronDown.js<|end_file_name|><๏ฝfimโbegin๏ฝ>import React from 'react';
import IconBase from './../components/IconBase/IconBase';
export default class ChevronDown extends React.Component {
render() {
if(this.props.bare) {
return <g>
<path d="M256,298.3L256,298.3L256,298.3l174.2-167.2c4.3-4.2,11.4-4.1,15.8,0.2l30.6,29.9c4.4,4.3,4.5,11.3,0.2,15.5L264.1,380.9
c-2.2,2.2-5.2,3.2-8.1,3c-3,0.1-5.9-0.9-8.1-3L35.2,176.7c-4.3-4.2-4.2-11.2,0.2-15.5L66,131.3c4.4-4.3,11.5-4.4,15.8-0.2L256,298.3
z"></path>
</g>;
} return <IconBase>
<path d="M256,298.3L256,298.3L256,298.3l174.2-167.2c4.3-4.2,11.4-4.1,15.8,0.2l30.6,29.9c4.4,4.3,4.5,11.3,0.2,15.5L264.1,380.9<๏ฝfimโhole๏ฝ> }
};ChevronDown.defaultProps = {bare: false}<๏ฝfimโend๏ฝ> | c-2.2,2.2-5.2,3.2-8.1,3c-3,0.1-5.9-0.9-8.1-3L35.2,176.7c-4.3-4.2-4.2-11.2,0.2-15.5L66,131.3c4.4-4.3,11.5-4.4,15.8-0.2L256,298.3
z"></path>
</IconBase>; |
<|file_name|>object.js<|end_file_name|><๏ฝfimโbegin๏ฝ>'use strict';
var ObjectUtil, self;
<๏ฝfimโhole๏ฝ>};
/**
* @method promiseWhile
* @reference http://blog.victorquinn.com/javascript-promise-while-loop
*/
ObjectUtil.prototype.promiseWhile = function () {
};<๏ฝfimโend๏ฝ> | module.exports = ObjectUtil = function () {
self = this; |
<|file_name|>data.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Per-node data used in style calculation.
use context::{SharedStyleContext, StackLimitChecker};
use dom::TElement;
use invalidation::element::invalidator::InvalidationResult;
use invalidation::element::restyle_hints::RestyleHint;
#[cfg(feature = "gecko")]
use malloc_size_of::MallocSizeOfOps;
use properties::ComputedValues;
use rule_tree::StrongRuleNode;
use selector_parser::{EAGER_PSEUDO_COUNT, PseudoElement, RestyleDamage};
use selectors::NthIndexCache;
use servo_arc::Arc;
use shared_lock::StylesheetGuards;
use smallvec::SmallVec;
use std::fmt;
use std::mem;
use std::ops::{Deref, DerefMut};
use style_resolver::{PrimaryStyle, ResolvedElementStyles, ResolvedStyle};
bitflags! {
/// Various flags stored on ElementData.
#[derive(Default)]
pub struct ElementDataFlags: u8 {
/// Whether the styles changed for this restyle.
const WAS_RESTYLED = 1 << 0;
/// Whether the last traversal of this element did not do
/// any style computation. This is not true during the initial
/// styling pass, nor is it true when we restyle (in which case
/// WAS_RESTYLED is set).
///
/// This bit always corresponds to the last time the element was
/// traversed, so each traversal simply updates it with the appropriate
/// value.
const TRAVERSED_WITHOUT_STYLING = 1 << 1;
/// Whether the primary style of this element data was reused from
/// another element via a rule node comparison. This allows us to
/// differentiate between elements that shared styles because they met
/// all the criteria of the style sharing cache, compared to elements
/// that reused style structs via rule node identity.
///
/// The former gives us stronger transitive guarantees that allows us to
/// apply the style sharing cache to cousins.
const PRIMARY_STYLE_REUSED_VIA_RULE_NODE = 1 << 2;
}
}
/// A lazily-allocated list of styles for eagerly-cascaded pseudo-elements.
///
/// We use an Arc so that sharing these styles via the style sharing cache does
/// not require duplicate allocations. We leverage the copy-on-write semantics of
/// Arc::make_mut(), which is free (i.e. does not require atomic RMU operations)
/// in servo_arc.
#[derive(Clone, Debug, Default)]
pub struct EagerPseudoStyles(Option<Arc<EagerPseudoArray>>);
#[derive(Default)]
struct EagerPseudoArray(EagerPseudoArrayInner);
type EagerPseudoArrayInner = [Option<Arc<ComputedValues>>; EAGER_PSEUDO_COUNT];
impl Deref for EagerPseudoArray {
type Target = EagerPseudoArrayInner;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for EagerPseudoArray {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
// Manually implement `Clone` here because the derived impl of `Clone` for
// array types assumes the value inside is `Copy`.
impl Clone for EagerPseudoArray {
fn clone(&self) -> Self {
let mut clone = Self::default();
for i in 0..EAGER_PSEUDO_COUNT {
clone[i] = self.0[i].clone();
}
clone
}
}
// Override Debug to print which pseudos we have, and substitute the rule node
// for the much-more-verbose ComputedValues stringification.
impl fmt::Debug for EagerPseudoArray {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "EagerPseudoArray {{ ")?;
for i in 0..EAGER_PSEUDO_COUNT {
if let Some(ref values) = self[i] {
write!(f, "{:?}: {:?}, ", PseudoElement::from_eager_index(i), &values.rules)?;
}
}
write!(f, "}}")
}
}
// Can't use [None; EAGER_PSEUDO_COUNT] here because it complains
// about Copy not being implemented for our Arc type.
#[cfg(feature = "gecko")]
const EMPTY_PSEUDO_ARRAY: &'static EagerPseudoArrayInner = &[None, None, None, None];
#[cfg(feature = "servo")]
const EMPTY_PSEUDO_ARRAY: &'static EagerPseudoArrayInner = &[None, None, None];
impl EagerPseudoStyles {
/// Returns whether there are any pseudo styles.
pub fn is_empty(&self) -> bool {
self.0.is_none()
}
/// Grabs a reference to the list of styles, if they exist.
pub fn as_optional_array(&self) -> Option<&EagerPseudoArrayInner> {
match self.0 {
None => None,
Some(ref x) => Some(&x.0),
}
}
/// Grabs a reference to the list of styles or a list of None if
/// there are no styles to be had.
pub fn as_array(&self) -> &EagerPseudoArrayInner {
self.as_optional_array().unwrap_or(EMPTY_PSEUDO_ARRAY)
}
/// Returns a reference to the style for a given eager pseudo, if it exists.
pub fn get(&self, pseudo: &PseudoElement) -> Option<&Arc<ComputedValues>> {
debug_assert!(pseudo.is_eager());
self.0.as_ref().and_then(|p| p[pseudo.eager_index()].as_ref())
}
/// Sets the style for the eager pseudo.
pub fn set(&mut self, pseudo: &PseudoElement, value: Arc<ComputedValues>) {
if self.0.is_none() {
self.0 = Some(Arc::new(Default::default()));
}
let arr = Arc::make_mut(self.0.as_mut().unwrap());
arr[pseudo.eager_index()] = Some(value);
}
}
/// The styles associated with a node, including the styles for any
/// pseudo-elements.
#[derive(Clone, Default)]
pub struct ElementStyles {
/// The element's style.
pub primary: Option<Arc<ComputedValues>>,
/// A list of the styles for the element's eagerly-cascaded pseudo-elements.
pub pseudos: EagerPseudoStyles,
}
impl ElementStyles {
/// Returns the primary style.
pub fn get_primary(&self) -> Option<&Arc<ComputedValues>> {
self.primary.as_ref()
}
/// Returns the primary style. Panic if no style available.
pub fn primary(&self) -> &Arc<ComputedValues> {
self.primary.as_ref().unwrap()
}
/// Whether this element `display` value is `none`.
pub fn is_display_none(&self) -> bool {
self.primary().get_box().clone_display().is_none()
}
#[cfg(feature = "gecko")]
fn size_of_excluding_cvs(&self, _ops: &mut MallocSizeOfOps) -> usize {
// As the method name suggests, we don't measures the ComputedValues
// here, because they are measured on the C++ side.
// XXX: measure the EagerPseudoArray itself, but not the ComputedValues
// within it.
0
}
}
// We manually implement Debug for ElementStyles so that we can avoid the
// verbose stringification of every property in the ComputedValues. We
// substitute the rule node instead.
impl fmt::Debug for ElementStyles {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ElementStyles {{ primary: {:?}, pseudos: {:?} }}",
self.primary.as_ref().map(|x| &x.rules), self.pseudos)
}
}
/// Style system data associated with an Element.
///
/// In Gecko, this hangs directly off the Element. Servo, this is embedded
/// inside of layout data, which itself hangs directly off the Element. In
/// both cases, it is wrapped inside an AtomicRefCell to ensure thread safety.
#[derive(Debug, Default)]
pub struct ElementData {
/// The styles for the element and its pseudo-elements.
pub styles: ElementStyles,
/// The restyle damage, indicating what kind of layout changes are required
/// afte restyling.
pub damage: RestyleDamage,
/// The restyle hint, which indicates whether selectors need to be rematched
/// for this element, its children, and its descendants.
pub hint: RestyleHint,
/// Flags.
pub flags: ElementDataFlags,
}
/// The kind of restyle that a single element should do.
#[derive(Debug)]
pub enum RestyleKind {
/// We need to run selector matching plus re-cascade, that is, a full
/// restyle.
MatchAndCascade,
/// We need to recascade with some replacement rule, such as the style
/// attribute, or animation rules.
CascadeWithReplacements(RestyleHint),
/// We only need to recascade, for example, because only inherited
/// properties in the parent changed.
CascadeOnly,
}
impl ElementData {
/// Invalidates style for this element, its descendants, and later siblings,
/// based on the snapshot of the element that we took when attributes or
/// state changed.
pub fn invalidate_style_if_needed<'a, E: TElement>(
&mut self,
element: E,
shared_context: &SharedStyleContext,
stack_limit_checker: Option<&StackLimitChecker>,
nth_index_cache: &mut NthIndexCache,
) -> InvalidationResult {
// In animation-only restyle we shouldn't touch snapshot at all.
if shared_context.traversal_flags.for_animation_only() {
return InvalidationResult::empty();
}
use invalidation::element::invalidator::TreeStyleInvalidator;
use invalidation::element::state_and_attributes::StateAndAttrInvalidationProcessor;
debug!("invalidate_style_if_needed: {:?}, flags: {:?}, has_snapshot: {}, \
handled_snapshot: {}, pseudo: {:?}",
element,
shared_context.traversal_flags,
element.has_snapshot(),
element.handled_snapshot(),
element.implemented_pseudo_element());
if !element.has_snapshot() || element.handled_snapshot() {
return InvalidationResult::empty();
}
let mut xbl_stylists = SmallVec::<[_; 3]>::new();
// FIXME(emilio): This is wrong, needs to account for ::slotted rules
// that may apply to elements down the tree.
let cut_off_inheritance =
element.each_applicable_non_document_style_rule_data(|data, quirks_mode| {
xbl_stylists.push((data, quirks_mode))
});
let mut processor = StateAndAttrInvalidationProcessor::new(
shared_context,
&xbl_stylists,
cut_off_inheritance,
element,
self,
nth_index_cache,
);
let invalidator = TreeStyleInvalidator::new(
element,
stack_limit_checker,
&mut processor,
);
let result = invalidator.invalidate();
unsafe { element.set_handled_snapshot() }
debug_assert!(element.handled_snapshot());
result
}
/// Returns true if this element has styles.
#[inline]
pub fn has_styles(&self) -> bool {
self.styles.primary.is_some()
}
/// Returns this element's styles as resolved styles to use for sharing.
pub fn share_styles(&self) -> ResolvedElementStyles {
ResolvedElementStyles {
primary: self.share_primary_style(),
pseudos: self.styles.pseudos.clone(),
}
}
/// Returns this element's primary style as a resolved style to use for sharing.
pub fn share_primary_style(&self) -> PrimaryStyle {
let reused_via_rule_node =
self.flags.contains(ElementDataFlags::PRIMARY_STYLE_REUSED_VIA_RULE_NODE);
PrimaryStyle {
style: ResolvedStyle(self.styles.primary().clone()),
reused_via_rule_node,
}
}
/// Sets a new set of styles, returning the old ones.
pub fn set_styles(&mut self, new_styles: ResolvedElementStyles) -> ElementStyles {
if new_styles.primary.reused_via_rule_node {
self.flags.insert(ElementDataFlags::PRIMARY_STYLE_REUSED_VIA_RULE_NODE);
} else {
self.flags.remove(ElementDataFlags::PRIMARY_STYLE_REUSED_VIA_RULE_NODE);
}
mem::replace(&mut self.styles, new_styles.into())
}
/// Returns the kind of restyling that we're going to need to do on this
/// element, based of the stored restyle hint.
pub fn restyle_kind(
&self,
shared_context: &SharedStyleContext
) -> RestyleKind {
if shared_context.traversal_flags.for_animation_only() {
return self.restyle_kind_for_animation(shared_context);
}
if !self.has_styles() {
return RestyleKind::MatchAndCascade;
}
if self.hint.match_self() {
return RestyleKind::MatchAndCascade;
}
if self.hint.has_replacements() {
debug_assert!(!self.hint.has_animation_hint(),
"Animation only restyle hint should have already processed");
return RestyleKind::CascadeWithReplacements(self.hint & RestyleHint::replacements());
}
debug_assert!(self.hint.has_recascade_self(),
"We definitely need to do something: {:?}!", self.hint);
return RestyleKind::CascadeOnly;
}
/// Returns the kind of restyling for animation-only restyle.
fn restyle_kind_for_animation(
&self,
shared_context: &SharedStyleContext,
) -> RestyleKind {
debug_assert!(shared_context.traversal_flags.for_animation_only());
debug_assert!(self.has_styles(),
"Unstyled element shouldn't be traversed during \
animation-only traversal");
// return either CascadeWithReplacements or CascadeOnly in case of
// animation-only restyle. I.e. animation-only restyle never does
// selector matching.
if self.hint.has_animation_hint() {
return RestyleKind::CascadeWithReplacements(self.hint & RestyleHint::for_animations());
}
return RestyleKind::CascadeOnly;
}
/// Return true if important rules are different.
/// We use this to make sure the cascade of off-main thread animations is correct.<๏ฝfimโhole๏ฝ> /// and transform for now, but it's fine to compare all properties and let the user
/// the check which properties do they want.
/// If it costs too much, get_properties_overriding_animations() should return a set
/// containing only opacity and transform properties.
pub fn important_rules_are_different(
&self,
rules: &StrongRuleNode,
guards: &StylesheetGuards
) -> bool {
debug_assert!(self.has_styles());
let (important_rules, _custom) =
self.styles.primary().rules().get_properties_overriding_animations(&guards);
let (other_important_rules, _custom) = rules.get_properties_overriding_animations(&guards);
important_rules != other_important_rules
}
/// Drops any restyle state from the element.
///
/// FIXME(bholley): The only caller of this should probably just assert that
/// the hint is empty and call clear_flags_and_damage().
#[inline]
pub fn clear_restyle_state(&mut self) {
self.hint = RestyleHint::empty();
self.clear_restyle_flags_and_damage();
}
/// Drops restyle flags and damage from the element.
#[inline]
pub fn clear_restyle_flags_and_damage(&mut self) {
self.damage = RestyleDamage::empty();
self.flags.remove(ElementDataFlags::WAS_RESTYLED);
}
/// Returns whether this element is going to be reconstructed.
pub fn reconstructed_self(&self) -> bool {
self.damage.contains(RestyleDamage::reconstruct())
}
/// Mark this element as restyled, which is useful to know whether we need
/// to do a post-traversal.
pub fn set_restyled(&mut self) {
self.flags.insert(ElementDataFlags::WAS_RESTYLED);
self.flags.remove(ElementDataFlags::TRAVERSED_WITHOUT_STYLING);
}
/// Returns true if this element was restyled.
#[inline]
pub fn is_restyle(&self) -> bool {
self.flags.contains(ElementDataFlags::WAS_RESTYLED)
}
/// Mark that we traversed this element without computing any style for it.
pub fn set_traversed_without_styling(&mut self) {
self.flags.insert(ElementDataFlags::TRAVERSED_WITHOUT_STYLING);
}
/// Returns whether the element was traversed without computing any style for
/// it.
pub fn traversed_without_styling(&self) -> bool {
self.flags.contains(ElementDataFlags::TRAVERSED_WITHOUT_STYLING)
}
/// Returns whether this element has been part of a restyle.
#[inline]
pub fn contains_restyle_data(&self) -> bool {
self.is_restyle() || !self.hint.is_empty() || !self.damage.is_empty()
}
/// Returns whether it is safe to perform cousin sharing based on the ComputedValues
/// identity of the primary style in this ElementData. There are a few subtle things
/// to check.
///
/// First, if a parent element was already styled and we traversed past it without
/// restyling it, that may be because our clever invalidation logic was able to prove
/// that the styles of that element would remain unchanged despite changes to the id
/// or class attributes. However, style sharing relies on the strong guarantee that all
/// the classes and ids up the respective parent chains are identical. As such, if we
/// skipped styling for one (or both) of the parents on this traversal, we can't share
/// styles across cousins. Note that this is a somewhat conservative check. We could
/// tighten it by having the invalidation logic explicitly flag elements for which it
/// ellided styling.
///
/// Second, we want to only consider elements whose ComputedValues match due to a hit
/// in the style sharing cache, rather than due to the rule-node-based reuse that
/// happens later in the styling pipeline. The former gives us the stronger guarantees
/// we need for style sharing, the latter does not.
pub fn safe_for_cousin_sharing(&self) -> bool {
!self.flags.intersects(ElementDataFlags::TRAVERSED_WITHOUT_STYLING |
ElementDataFlags::PRIMARY_STYLE_REUSED_VIA_RULE_NODE)
}
/// Measures memory usage.
#[cfg(feature = "gecko")]
pub fn size_of_excluding_cvs(&self, ops: &mut MallocSizeOfOps) -> usize {
let n = self.styles.size_of_excluding_cvs(ops);
// We may measure more fields in the future if DMD says it's worth it.
n
}
}<๏ฝfimโend๏ฝ> | /// Note: Ignore custom properties for now because we only support opacity and transform
/// properties for animations running on compositor. Actually, we only care about opacity |
<|file_name|>twitter_analyse.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
"""
File: twitter_analyse.py
Author: Me
Email: 0
Github: 0
Description: Analyse tweets. For the detail, please refer to the document
```twitter_analyse.notes```
"""
# System lib
from __future__ import division
import json
import os
from math import log
import numpy
# 3-rd party lib
# import nltk
from nltk.classify import NaiveBayesClassifier
from textblob import TextBlob
# Constants
TWEET_DIR = os.path.join('.', 'twitter_data')
OSCAR_DIR = os.path.join(TWEET_DIR, 'oscar')
RAZZIES_DIR = os.path.join(TWEET_DIR, 'razzies')
PREDICT_DIR = os.path.join(TWEET_DIR, 'proof')
CANDIDATE_DIR = os.path.join(TWEET_DIR, 'candidates')
# PREDICT_OSCAR_DIR = os.path.join(PREDICT_DIR, 'oscar')
# PREDICT_RAZZIES_DIR = os.path.join(PREDICT_DIR, 'razzies')
def attribute_to_characteristic(tweet):
"""
Extract attributes from a tweet and form a characteristic of a tweet
@param tweet dict
@return dict
Charateristic of a tweet
"""
ret = {}
text = tweet['text']
retweets = tweet['retweet_count']
favorites = tweet['favorite_count']
followers = tweet['author_followers']
friends = tweet['author_friends']
publishes = tweet['author_num_of_status']
blob = TextBlob(text)
polarity = blob.sentiment.polarity
ret['scaled_polarity'] = calculate_scaled_polarity(
polarity,
int(retweets),
int(favorites),
int(followers),
int(friends),
int(publishes)
)
ret['retweets'] = retweets
ret['favorites'] = favorites
ret['followers'] = followers
ret['friends'] = friends
ret['publishes'] = publishes
ret['polarity'] = polarity
# print 'p=%.2f re=%d fav=%d, fol=%d, fd=%d, pub=%d' % (
# polarity, retweets, favorites, followers, friends, publishes
# )
return ret
def calculate_scaled_polarity(
polarity, retweets, favorites, followers, friends, publishes):
"""
Return a scaled polarity for a tweet
@param polarity float
@param retweets int
@param favorites int
@param followers int
@param friends int
@param publishes int
@return float
"""
# Avoid zero case and negative value
retweets = retweets if retweets > 0 else 1
favorites = favorites if favorites > 0 else 1
followers = followers if followers > 0 else 1
friends = friends if friends > 0 else 1
publishes = publishes if publishes > 0 else 1
# Entropy
ret = polarity * \
(
log(retweets, 2) +
log(favorites, 2) +
log(followers, 2) +
log(friends, 2) +
log(publishes, 2)
)
return round(ret, 2)
def tweets2film(tweet_characteristics):
"""
Aggreate tweet's characteristics to form a film's characteristics
@param tweet_characteristics list of dict
@return dict
characteristics of a film
"""
ret = {}
retweets_data = []
favorites_data = []
polarities_data = []
friends_data = []
followers_data = []
for t in tweet_characteristics:
retweets_data.append(t['retweets'])
favorites_data.append(t['favorites'])
polarities_data.append(t['polarity'])
friends_data.append(t['friends'])
followers_data.append(t['followers'])
retweets = numpy.array(retweets_data)
favorites = numpy.array(favorites_data)
polarities = numpy.array(polarities_data)
friends = numpy.array(friends_data)
followers = numpy.array(followers_data)
for data_set in [
('retweets', retweets),
('favorites', favorites),
('polarities', polarities),
('friends', friends),
('followers', followers)
]:
data_name = data_set[0]
data_list = data_set[1]
print '|%s| sd: %f mean: %f min: %d max: %d' % (
data_name,
round(data_list.std(), 2),
round(numpy.average(data_list), 2),
data_list.min(),
data_list.max(),
)
# ret['avg_followers'] = round(numpy.average(followers_data), 2)
# ret['avg_friends'] = round(numpy.average(friends_data), 2)
ret['avg_polarity'] = round(numpy.average(polarities_data), 2)
# ret['avg_retweet'] = round(numpy.average(retweets_data), 2)
# ret['std_friends'] = round(friends.std(), 2)
# ret['std_followers'] = round(followers.std(), 2)
# ret['std_polarity'] = round(polarities.std(), 2)
ret['std_retweet'] = round(retweets.std(), 2)
# ret['log_friends'] = round(log(sum(friends_data)) / log(2), 2)
# ret['log_followers'] = round(log(sum(followers_data)) / log(2), 2)
ret['log_retweets'] = round(log(sum(retweets_data)) / log(2), 2)
ret['log_favorites'] = round(log(sum(favorites_data)) / log(2), 2)
<๏ฝfimโhole๏ฝ>def construct_film_characteristic(film_name, tweet_characteristics):
"""
Construct featuresets for given parameters
@param film_name string
@param tweet_characteristics list of dict
@return featuresets
"""
ret = {}
# Analyze film's attributes
ret['length_of_film'] = len(film_name)
ret['number_of_words'] = len(film_name.split(' '))
# Analyze tweet's characteristics
aggreated_characteristic = tweets2film(tweet_characteristics)
# Merge 2 characteristics
ret = dict(ret.items() + aggreated_characteristic.items())
return ret
def predictCandidates():
list_of_files = os.listdir(CANDIDATE_DIR)
for fn in list_of_files:
path = os.path.join(CANDIDATE_DIR, fn)
film_name = os.path.splitext(fn)[0]
with open(path, 'r') as f:
tweets = json.load(f)
tweets = json.loads(tweets)
tweet_characteristics = []
for tweet in tweets:
# Per tweet analyze
characteristic = attribute_to_characteristic(tweet)
tweet_characteristics.append(characteristic)
film_characteristic = construct_film_characteristic(
film_name,
tweet_characteristics
)
result = classifier.classify(film_characteristic)
print 'film: |%s| PREDICT: |%s|\n' % (film_name, result)
features = []
for my_dir in [OSCAR_DIR, RAZZIES_DIR]:
label = os.path.basename(my_dir)
print "=========== Training {0} ============".format(label)
for fn in os.listdir(my_dir):
path = os.path.join(my_dir, fn)
film_name = os.path.splitext(fn)[0]
# print 'dir=%s, film_name=%s, path=%s' % (my_dir, film_name, path)
with open(path, 'r') as f:
tweets = json.load(f)
tweets = json.loads(tweets)
tweet_characteristics = []
for tweet in tweets:
# Per tweet analyze
characteristic = attribute_to_characteristic(tweet)
tweet_characteristics.append(characteristic)
try:
film_characteristic = construct_film_characteristic(
film_name,
tweet_characteristics
)
except Exception as e:
print '{0}: {1}'.format(film_name, e)
else:
# print 'film: |%s|' % film_name
# print film_characteristic
feature = (film_characteristic, label)
features.append(feature)
# Train the classifier
classifier = NaiveBayesClassifier.train(features)
classifier.show_most_informative_features(10)
# Predict the film
report = {}
predict_labels = ['oscar', 'razzies']
for predict_label in predict_labels:
my_dir = os.path.join(PREDICT_DIR, predict_label)
list_of_files = os.listdir(my_dir)
report[predict_label] = {
'number_of_match': 0,
'number_of_films': len(list_of_files)
}
for fn in list_of_files:
path = os.path.join(my_dir, fn)
film_name = os.path.splitext(fn)[0]
with open(path, 'r') as f:
tweets = json.load(f)
tweets = json.loads(tweets)
tweet_characteristics = []
for tweet in tweets:
# Per tweet analyze
characteristic = attribute_to_characteristic(tweet)
tweet_characteristics.append(characteristic)
film_characteristic = construct_film_characteristic(
film_name,
tweet_characteristics
)
result = classifier.classify(film_characteristic)
if result == predict_label:
report[predict_label]['number_of_match'] += 1
print film_characteristic
print 'film: |%s| PREDICT: |%s|\n' % (film_name, result)
report['features'] = film_characteristic.keys()
# classifier.show_most_informative_features()
print "# Features in film's characteristic\n"
for f in report['features']:
print '* %s' % f
print '\n# Prediction\n'
for predict_label in predict_labels:
r = report[predict_label]
print '## %s\n' % predict_label
print 'match %d out of %d, accuracy=%d%%\n' % (
r['number_of_match'],
r['number_of_films'],
round(r['number_of_match'] / r['number_of_films'] * 100)
)
print '## overall\n'
print 'match %d out of %d, accuracy=%d%%\n' % (
sum(
[report[p]['number_of_match'] for p in predict_labels]
),
sum(
[report[p]['number_of_films'] for p in predict_labels]
),
round(
sum(
[report[p]['number_of_match'] for p in predict_labels]
) /
sum(
[report[p]['number_of_films'] for p in predict_labels]
) * 100
)
)
predictCandidates()<๏ฝfimโend๏ฝ> | return ret
|
<|file_name|>_legendgrouptitle.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Legendgrouptitle(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "histogram2d"
_path_str = "histogram2d.legendgrouptitle"
_valid_props = {"font", "text"}
# font
# ----
@property
def font(self):
"""
Sets this legend group's title font.
The 'font' property is an instance of Font
that may be specified as:
- An instance of :class:`plotly.graph_objs.histogram2d.legendgrouptitle.Font`
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.histogram2d.legendgrouptitle.Font
"""
return self["font"]
@font.setter
def font(self, val):
self["font"] = val
# text
# ----
@property
def text(self):
"""
Sets the title of the legend group.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
font<๏ฝfimโhole๏ฝ> """
def __init__(self, arg=None, font=None, text=None, **kwargs):
"""
Construct a new Legendgrouptitle object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.histogram2d.Legendgrouptitle`
font
Sets this legend group's title font.
text
Sets the title of the legend group.
Returns
-------
Legendgrouptitle
"""
super(Legendgrouptitle, self).__init__("legendgrouptitle")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.histogram2d.Legendgrouptitle
constructor must be a dict or
an instance of :class:`plotly.graph_objs.histogram2d.Legendgrouptitle`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("font", None)
_v = font if font is not None else _v
if _v is not None:
self["font"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False<๏ฝfimโend๏ฝ> | Sets this legend group's title font.
text
Sets the title of the legend group. |
<|file_name|>compareAncestors.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
__description__ = \
"""
compareAncestor.py
"""
__author__ = "Michael J. Harms"
__usage__ = "comapreAncestors.py ancestor_file1 ancestor_file2"
__date__ = "100726"
import sys, phyloBase
class CompareAncestorError(Exception):
"""
General error class for this module.
"""
pass
def readAncestorFile(ancestor_file):
"""
"""
f = open(ancestor_file,'r')
lines = f.readlines()
f.close()
# Skip comments and blank lines
lines = [l for l in lines if l.strip() != "" and l[0] != "#"]
out = []
num_states = (len(lines[0].split())-2)/2
for l in lines[1:]:
position = int(l[7:12])
tmp_out = []
for i in range(num_states):
aa = l[12+12*i:18+12*i].strip()
pp = float(l[18+12*i:24+12*i])
tmp_out.append((aa,pp))
out.append((position,tmp_out))
return out
def compareAncestors(ancestor1_file,ancestor2_file,ambiguous_cutoff=0.8):
"""
"""
anc1 = readAncestorFile(ancestor1_file)
anc2 = readAncestorFile(ancestor2_file)
anc1_pos = [p[0] for p in anc1]
anc2_pos = [p[0] for p in anc2]
only_in_anc1 = [p for p in anc1_pos if p not in anc2_pos]
only_in_anc2 = [p for p in anc2_pos if p not in anc1_pos]
if len(only_in_anc1) > 0:
print "# Warning: some sites only in ancestor 1:"
print "".join(["# %i\n" % p for p in only_in_anc1]),
if len(only_in_anc2) > 0:
print "# Warning: some sites only in ancestRr 2:"
print "".join(["# %i\n" % p for p in only_in_anc2]),
all_pos = [p for p in anc1_pos if p not in only_in_anc1]
all_pos.extend([p for p in anc2_pos if p not in only_in_anc2 and p not in all_pos])
anc1_dict = dict([a for a in anc1 if a[0] in anc1_pos])
anc2_dict = dict([a for a in anc2 if a[0] in anc2_pos])
out = []
out.append("# pos new_state old_state same? state_type?")
out.append(" ambiguity pp_new pp_old\n")
out.append("#\n# same?\n")
out.append("# \'*\' -> changed\n")
out.append("# \' \' -> no change\n")
out.append("# flipped_with_alternate?\n")
out.append("# \'*\' -> took new state\n")
out.append("# \'~\' -> took alternate state\n")
out.append("# \' \' -> no change in state\n")
out.append("# ambig_state key:\n")
out.append("# \'~\' -> ambiguous in both\n")
out.append("# \'-\' -> newly ambiguous\n")
out.append("# \'+\' -> newly well supported\n")
out.append("# \' \' -> well suppported in both\n")
for p in all_pos:
s1 = anc1_dict[p]
s2 = anc2_dict[p]
# See if the new reconstruction has the same residue at this position
same = "*"
if s1[0][0] == s2[0][0]:
same = " "
# Check to see if new state existed as less likely state in original
# reconstruction
flipped = " "
if same == "*":
if s1[0] in [a[0] for a in s2[1:]]:
flipped = "~"
else:
flipped = "*"
# Remained ambiguous
if s1[0][1] <= ambiguous_cutoff and s2[0][1] <= ambiguous_cutoff:
ambig_state = "~"
# Newly ambiguous
elif s1[0][1] <= ambiguous_cutoff and s2[0][1] > ambiguous_cutoff:
ambig_state = "+"
# Became well supported
elif s1[0][1] > ambiguous_cutoff and s2[0][1] <= ambiguous_cutoff:
ambig_state = "-"
# Remained well supported
else:
ambig_state = " "
check_me = " "
if ambig_state == "-" or \
(same == "*" and ambig_state == " "):
check_me = "!"
out.append("%5i %s %s %s %s %s %6.2f%6.2f %s\n" % (p,s1[0][0],s2[0][0],
same,flipped,ambig_state,s1[0][1],s2[0][1],check_me))
return "".join(out)
def main(argv=None):
"""<๏ฝfimโhole๏ฝ> """
if argv == None:
argv = sys.argv[1:]
try:
ancestor1_file = argv[0]
ancestor2_file = argv[1]
except IndexError:
err = "Incorrect number of arguments!\n\n%s\n\n" % __usage__
raise CompareAncestorError(err)
out = compareAncestors(ancestor1_file,ancestor2_file)
print out
if __name__ == "__main__":
main()<๏ฝfimโend๏ฝ> | |
<|file_name|>hmm_sandbox.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
import tensorflow as tf
import edward as ed
import numpy as np
from numpy import array
from numpy.linalg import norm
from edward.models import Dirichlet, Multinomial, Gamma, Poisson
sess = tf.Session()
def build_toy_dataset(n, p, A, b):
"""
toy HMM with:
n=number of timesteps,
p=m length array where m is the number of hidden states and p_i is the
initial probability of being in state i
A=mxm transition matrix indexed by i, j where the (i,j) element is the
probability of transitioning from element j to element i
b=m length array where b_i contains the poison rate for state i
"""
p = array(p)/float(sum(p))
z = [np.random.multinomial(1, p)]
obs = [np.random.poisson(z[-1].dot(b))]
for step in range(n-1):
z += [np.random.multinomial(1, z[-1].dot(A))]
obs += [float(np.random.poisson(z[-1].dot(b)))]
return obs, z
n = 162
p_true = [.7, .3]
A_true = array([[0.8,0.4],[0.2,0.6]])
b_true = [0.1, 3.]
obs_train, z_train = build_toy_dataset(n, p_true, A_true, b_true)
obs_test, z_test = build_toy_dataset(n, p_true, A_true, b_true)
#obs = tf.placeholder(tf.float32, [n])
def gen_hmm(vd):
z = tf.expand_dims(
tf.transpose(
tf.expand_dims(Multinomial(total_count=1., probs=vd['p']), 0)), 0)
obs = tf.expand_dims(
Poisson(rate=tf.matmul(tf.expand_dims(vd['b'],0), z[-1])), 0)
for t in range(n-1):
z_new = tf.transpose(Multinomial(total_count=1.,
probs=tf.transpose(tf.matmul(tf.transpose(vd['A']),z[-1]),
name='tx_prob')),name='z_new')
z = tf.concat([z,tf.expand_dims(z_new,0)],0)
obs = tf.concat([obs,
tf.expand_dims(
Poisson(rate=tf.matmul(
tf.expand_dims(vd['b'],0), z_new)),0)], 0)
return obs, z
p_p_alpha = [2.,2.]
p_A_alpha = [[2.,1.],[1.,2.]]
p_b_alpha = [0.5,2.0]
p_b_beta = [1.,1.]
q_p_alpha = tf.Variable(p_p_alpha)
q_A_alpha = tf.Variable(p_A_alpha)
q_b_alpha = tf.Variable(p_b_alpha)
q_b_beta = tf.Variable(p_b_beta)<๏ฝfimโhole๏ฝ>A = Dirichlet(p_A_alpha, name='A')
b = Gamma(p_b_alpha, p_b_beta)
qp = Dirichlet(q_p_alpha, name='p')
qA = Dirichlet(q_A_alpha, name='A')
qb = Gamma(q_b_alpha, q_b_beta)
obs, z = gen_hmm({'p':p, 'A':A, 'b':b})
obs_train, z_train = build_toy_dataset(n, p_true, A_true, b_true)
obs_train = tf.expand_dims(tf.expand_dims(obs_train, 0), 0)
latent_vars = {p: qp, A: qA, b: qb}
data = {tf.squeeze(obs): tf.squeeze(obs_train)}
inference = ed.KLqp(latent_vars, data)
inference.run(n_samples=5, n_iter=2500)
print(qp.eval())
print(tf.transpose(qA).eval())
print(qb.eval())
obs_post = ed.copy(obs, {p: qp, A: qA, b: qb})
print("posterior observations")
print(tf.squeeze(obs_post).eval())
print("training observations")
print(tf.squeeze(obs_train).eval())
print("Mean absolute error on training data:")
print(ed.evaluate('mean_absolute_error', data={tf.squeeze(obs_post): tf.squeeze(obs_train)}))
print("test observations")
print(tf.squeeze(obs_test).eval())
print("Mean absolute error on test data:")
print(ed.evaluate('mean_absolute_error', data={tf.squeeze(obs_post): tf.squeeze(obs_test)}))
file_writer = tf.summary.FileWriter('/home/kyjohnso/projects/mlbslice/tb_logs',
tf.get_default_graph())
sess.close()<๏ฝfimโend๏ฝ> |
p = Dirichlet(p_p_alpha, name='p') |
<|file_name|>fundlist.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
# -*- coding: utf8 -*-
from __future__ import print_function
__author__ = 'gpanda'
"""References:
[1] easy thread-safe queque, http://pymotw.com/2/Queue/
"""
import argparse
import collections
import fileinput
import os
import pprint
import re
import string
import sys
import threading
import time
import Queue
from libs import driver
from libs.common import LOG, is_sec_id, AbriskError
config = {}
class Fund(object):
"""Fund data structure
pbr = price / book value (nav), an important index to sort funds
"""
def __init__(self, secId, name=None, time=None, price=float(0),
volume=float(0), nav=float(1)):
"""Initialize Fund object
:param secId: security id
:param name: name
:param time: data timestamp
:param price: security price
:param volume: exchange volume (unit: 0.1 billion)
:param nav: security (fund) net asset value or book value
"""
self.secId = secId
self.name = name
self.time = time
self.price = price
self.volume = volume
self.nav = nav
self.pbr = self.price / self.nav
def __cmp__(self, other):
return cmp(self.pbr, other.pbr)
def _initialize_input_parser():
parser = argparse.ArgumentParser(
description="Show me interesting funds."
)
parser.add_argument(
'--fin',
default="default.0",
nargs="*",
metavar="FILE",
help="Security list input file."
)
parser.add_argument(
'--workers',
default=5,
nargs="?",
metavar="COUNT",
help="Working thread count."
)
parser.add_argument(
'--head', '-H',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the top rank to show."
)
parser.add_argument(
'--tail', '-T',
default=0,
nargs="?",
metavar="COUNT",
help="How many items in the bottom rank to show."
)
parser.add_argument(
'--funds', '-f',
nargs="*",
metavar="FUND INDEX",
help="One or more specified funds."
)
parser.add_argument(
'-v', '--verbose',
action="store_true",
help="Show debug messages."
)
return parser
def _parse_input_0(opts):
global config
global LOG
# retrieve fund list files
files = opts['fin']
if not isinstance(files, list):
files = [files]
config['fin'] = files
workers = int(opts['workers'])
if workers > 0:
config['workers'] = workers
head = int(opts['head'])
if head > 0:
config['head'] = head
tail = int(opts['tail'])
if tail > 0:
config['tail'] = tail
funds = opts['funds']
if not isinstance(funds, list):
funds = [funds]
config['funds'] = funds
if opts['verbose']:
config['debug'] = True
LOG.setLevel(logging.DEBUG)
return config
def _parse_input_1(cfg):
"""
TODO: comments
"""
# pprint.pprint(config)
fund_pool = collections.OrderedDict()
files = cfg['fin']
for yaf in files:
if os.path.exists(yaf):
filename = os.path.basename(yaf)
# print("{filename}".format(filename=filename))
fund_pool[filename] = collections.OrderedDict()
for line in fileinput.input(yaf):
if line.startswith("#"):
continue
fields = line.split(',')
sid = string.strip(fields[0])
if is_sec_id(sid):
fund_pool[filename][sid] = [].extend(fields[1:])
funds = config['funds']
if funds[0]:
category = 'Quick_show'
fund_pool[category] = collections.OrderedDict()
for fund in funds:
if is_sec_id(fund):
fund_pool[category][fund] = []
return fund_pool
def work_flow(input_queues, output_queues, error_queues):
"""
TODO: comments
"""
local = threading.local()
local.thread_name = threading.current_thread().getName()
LOG.debug("*** Enters work_flow() >>>")
# print("*** Thread-{0}:{1} *** Enters work_flow >>>"
# .format(local.thread_name, time.time()))
def retrieve_data(sid):
"""
TODO: comments
"""
LOG.debug("Retrieving data for %s", sid)
# print("Thread-{0}: Retrieving data for {1}"
# .format(local.thread_name, sid))
fund_raw_data = driver.getpbr(sid)
if not fund_raw_data:
return None
fund = Fund(sid,
name=fund_raw_data[2],
time=fund_raw_data[0],
price=fund_raw_data[4],
volume=fund_raw_data[5],
nav=fund_raw_data[3],
)
# driver.show(fund_raw_data)
return fund
for c, iq in input_queues.items():
sid=None
try:
LOG.debug("Switching to category %s", c)
# print("Thread-{0}: Switching to category {1}"
# .format(local.thread_name, c))
while not iq.empty():
sid = iq.get(False)
fund = retrieve_data(sid)
if fund:
output_queues[c].put(fund)
LOG.debug("Leaving category %s", c)
# print("Thread-{0}: Leaving category {1}"
# .format(local.thread_name, c))
except Queue.Empty as e:
LOG.info("Unexpected Queue.Empty Exception occurs, %s", e)
except Exception as e:
ename = "T:[" + local.thread_name + "]C:[" + c + "]S:[" + sid + "]"
error_queues[c].put(AbriskError(ename, e))
LOG.debug("*** Exits from work_flow() <<<")
# print("*** Thread-{0} *** Exits from work_flow <<<"<๏ฝfimโhole๏ฝ> # .format(local.thread_name))
def sync(fund_pool):
"""Central controller of fund data synchronization.
** Preparing working queue (FIFO) and workers for funds of interest.
** Preparing data queue (Heap) for storing and sorting collected data.
** Retrieving fund data, refining and sorting them.
"""
input_queues = {}
output_queues = {}
error_queues = {}
for category, pool in fund_pool.items():
input_queues[category] = Queue.Queue(len(pool))
for sid in sorted(pool.keys()):
input_queues[category].put(sid)
output_queues[category] = Queue.PriorityQueue(len(pool))
error_queues[category] = Queue.Queue(len(pool))
workers = {}
worker_number = config['workers']
for i in range(worker_number):
workers[i] = threading.Thread(
target=work_flow,
name=str(i),
args=[input_queues, output_queues, error_queues],
)
workers[i].start()
for worker in workers.values():
worker.join()
rc = 0
for c, eq in error_queues.items():
if not eq.empty():
rc = 1
break
if rc == 0:
LOG.debug("All jobs have been done without errors.")
else:
LOG.debug("All jobs have been done, but there are errors.")
return output_queues, error_queues, rc
def report_fund_list(out_put_queues):
for category, priority_queue in out_put_queues.items():
LOG.debug("Category-%s", category)
# print("Category-{0}".format(category))
driver.setup_output(0, LOG)
driver.print_header()
while not priority_queue.empty():
fund = priority_queue.get()
driver.print_row((fund.time, fund.secId, fund.name,
fund.nav, fund.price, fund.volume,
fund.pbr))
def show_fund_pool(fund_pool):
for category, pool in fund_pool.items():
LOG.debug("Category %s", category)
# print("Category {category}".format(category=category))
for sid, extras in pool.items():
LOG.debug("%s, %s", sid, extras)
# print("{0}, {1}".format(sid, extras))
def main():
"""
TODO: no comments
"""
parser = _initialize_input_parser()
opts = vars(parser.parse_args(sys.argv[1:]))
cfg = _parse_input_0(opts)
fund_pool = _parse_input_1(cfg)
# show_fund_pool(fund_pool)
begin = time.time()
funds, errors, rc = sync(fund_pool)
if rc != 0:
for c, eq in errors.items():
print(c, file=sys.stderr)
while not eq.empty():
print(eq.get().name, file=sys.stderr)
sys.exit(1)
end = time.time()
report_fund_list(funds)
LOG.debug("Time usage: %s seconds; Workers: %s",
end - begin, config['workers'])
# print("Time usage: {0} seconds; Workers: {1}"
# .format(end - begin, config['workers']))
if __name__ == '__main__':
main()<๏ฝfimโend๏ฝ> | |
<|file_name|>extractConstant_StatementInsertionPosition5.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>// ==ORIGINAL==
function F0() {
function F1() {
function F2(x = /*[#|*/2 + 1/*|]*/) {
}
}
}
// ==SCOPE::Extract to constant in enclosing scope==
function F0() {
function F1() {
const newLocal = 2 + 1;
function F2(x = /*RENAME*/newLocal) {
}
}
}
// ==SCOPE::Extract to constant in function 'F0'==
function F0() {
const newLocal = 2 + 1;<๏ฝfimโhole๏ฝ> function F2(x = /*RENAME*/newLocal) {
}
}
}
// ==SCOPE::Extract to constant in global scope==
const newLocal = 2 + 1;
function F0() {
function F1() {
function F2(x = /*RENAME*/newLocal) {
}
}
}<๏ฝfimโend๏ฝ> |
function F1() { |
<|file_name|>scrollbar.py<|end_file_name|><๏ฝfimโbegin๏ฝ># DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2010 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Jeremy Mounier <[email protected]>
#
#import sys
#Is digit
#import re
import os
from PyQt4.QtCore import Qt, SIGNAL
from PyQt4.QtGui import QScrollBar, QAbstractSlider
class hexScrollBar(QScrollBar):
def __init__(self, whex):
QScrollBar.__init__(self)
self.init(whex)
self.initCallBacks()
# self.setValues()
def init(self, whex):
self.whex = whex
self.heditor = self.whex.heditor
self.filesize = self.heditor.filesize
self.min = 0
self.single = 1
#Initialized in Whex with LFMOD
self.page = self.heditor.pageSize
self.max = 0
#Long File Mode
# self.lfmod = False
###### LFMOD ######
###################
# self.maxint = 2147483647
# self.lines = self.filesize / self.heditor.bytesPerLine
# self.restlines = self.filesize % 16
# if self.isInt(self.lines):
# self.max = self.lines - 1
# self.page = self.heditor.pageSize / 16
# else:
# self.lfmod = True
# self.max = self.maxint - 1
# self.page = self.heditor.pageSize
####################
####################
def initCallBacks(self):
self.connect(self, SIGNAL("sliderMoved(int)"), self.moved)
self.connect(self, SIGNAL("actionTriggered(int)"), self.triggered)
def setValues(self):
self.setMinimum(self.min)
self.setMaximum(self.max)
self.setSingleStep(self.single)
self.setPageStep(self.page)
self.setRange(self.min, self.max)
# def isLFMOD(self):
# return self.lfmod
# def isInt(self, val):
# try:
# res = int(val)
# if res < 2147483647:
# return True
# else:
# return False<๏ฝfimโhole๏ฝ>
# LFMOD #
# def valueToOffset(self, value):
# return ((self.filesize * value) / self.maxint)
# def offsetToValue(self, offset):
# if self.isLFMOD():
# return ((self.maxint * offset) / self.filesize)
# else:
# return (offset / self.heditor.bytesPerLine)
########################################
# Navigation Operations #
########################################
def triggered(self, action):
if action == QAbstractSlider.SliderSingleStepAdd:
self.whex.view.move(self.singleStep(), 1)
elif action == QAbstractSlider.SliderSingleStepSub:
self.whex.view.move(self.singleStep(), 0)
elif action == QAbstractSlider.SliderPageStepSub:
self.whex.view.move(self.pageStep(), 0)
elif action == QAbstractSlider.SliderPageStepAdd:
self.whex.view.move(self.pageStep(), 1)
# def oldtriggered(self, action):
# offset = self.heditor.currentOffset
#######################
# LINES #
#######################
#LINE DOWN
# if action == QAbstractSlider.SliderSingleStepAdd:
# if offset + 16 <= (self.filesize - 5 * 16):
# self.heditor.readOffset(offset + 16)
#Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset + 16))
# else:
# self.setValue(self.value() + 1)
#LINE UP
# elif action == QAbstractSlider.SliderSingleStepSub:
# if offset - 16 >= 0:
# self.heditor.readOffset(offset - 16)
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset - 16))
# else:
# self.setValue(self.value() - 1)
#######################
# PAGES #
#######################
#PAGE UP
# elif action == QAbstractSlider.SliderPageStepSub:
# if offset - (self.page * 16) >= 0:
# self.heditor.readOffset(offset - (self.page * 16))
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset - (self.page * 16)))
# else:
# self.setValue(self.value() - self.page)
#PAGE DOWN
# elif action == QAbstractSlider.SliderPageStepAdd:
# if offset + (self.page * 16) <= self.filesize - (5* 16):
# self.heditor.readOffset(offset + (self.page * 16))
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset + (self.page * 16)))
# else:
# self.setValue(self.value() + self.page)
def moved(self, value):
if self.whex.isLFMOD():
if value <= self.max:
offset = (self.filesize * value) / self.whex.maxint
self.heditor.readOffset(offset)
else:
if value <= self.max:
if value == self.max:
offset = self.filesize - (5 * self.heditor.bytesPerLine)
else:
offset = value * self.heditor.bytesPerLine
self.heditor.readOffset(offset)<๏ฝfimโend๏ฝ> | # except ValueError, TypeError:
# return False
# else:
# return False |
<|file_name|>mod.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Networking primitives for TCP/UDP communication.
#![stable(feature = "rust1", since = "1.0.0")]
use prelude::v1::*;
use io::{self, Error, ErrorKind};
use sys_common::net as net_imp;
pub use self::ip::{IpAddr, Ipv4Addr, Ipv6Addr, Ipv6MulticastScope};
pub use self::addr::{SocketAddr, SocketAddrV4, SocketAddrV6, ToSocketAddrs};
pub use self::tcp::{TcpStream, TcpListener, Incoming};
pub use self::udp::UdpSocket;
pub use self::parser::AddrParseError;
mod ip;
mod addr;
mod tcp;
mod udp;
mod parser;
#[cfg(test)] mod test;
/// Possible values which can be passed to the `shutdown` method of `TcpStream`.
#[derive(Copy, Clone, PartialEq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Shutdown {
/// Indicates that the reading portion of this stream/socket should be shut
/// down. All currently blocked and future reads will return `Ok(0)`.
#[stable(feature = "rust1", since = "1.0.0")]
Read,
/// Indicates that the writing portion of this stream/socket should be shut
/// down. All currently blocked and future writes will return an error.
#[stable(feature = "rust1", since = "1.0.0")]
Write,
/// Shut down both the reading and writing portions of this stream.
///
/// See `Shutdown::Read` and `Shutdown::Write` for more information.
#[stable(feature = "rust1", since = "1.0.0")]
Both,
}
#[doc(hidden)]
trait NetInt {
fn from_be(i: Self) -> Self;
fn to_be(&self) -> Self;
}
macro_rules! doit {
($($t:ident)*) => ($(impl NetInt for $t {
fn from_be(i: Self) -> Self { <$t>::from_be(i) }
fn to_be(&self) -> Self { <$t>::to_be(*self) }
})*)
}
doit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize }
fn hton<I: NetInt>(i: I) -> I { i.to_be() }
fn ntoh<I: NetInt>(i: I) -> I { I::from_be(i) }
fn each_addr<A: ToSocketAddrs, F, T>(addr: A, mut f: F) -> io::Result<T>
where F: FnMut(&SocketAddr) -> io::Result<T>
{
let mut last_err = None;
for addr in try!(addr.to_socket_addrs()) {
match f(&addr) {
Ok(l) => return Ok(l),
Err(e) => last_err = Some(e),
}
}
Err(last_err.unwrap_or_else(|| {
Error::new(ErrorKind::InvalidInput,
"could not resolve to any addresses")
}))
}
/// An iterator over `SocketAddr` values returned from a host lookup operation.
#[unstable(feature = "lookup_host", reason = "unsure about the returned \
iterator and returning socket \
addresses")]
pub struct LookupHost(net_imp::LookupHost);
#[unstable(feature = "lookup_host", reason = "unsure about the returned \
iterator and returning socket \
addresses")]
impl Iterator for LookupHost {
type Item = io::Result<SocketAddr>;
fn next(&mut self) -> Option<io::Result<SocketAddr>> { self.0.next() }
}
/// Resolve the host specified by `host` as a number of `SocketAddr` instances.
///
/// This method may perform a DNS query to resolve `host` and may also inspect
/// system configuration to resolve the specified hostname.
///
/// # Examples
///
/// ```no_run
/// # #![feature(lookup_host)]
/// use std::net;
///
/// # fn foo() -> std::io::Result<()> {
/// for host in try!(net::lookup_host("rust-lang.org")) {
/// println!("found address: {}", try!(host));
/// }<๏ฝfimโhole๏ฝ> iterator and returning socket \
addresses")]
pub fn lookup_host(host: &str) -> io::Result<LookupHost> {
net_imp::lookup_host(host).map(LookupHost)
}
/// Resolve the given address to a hostname.
///
/// This function may perform a DNS query to resolve `addr` and may also inspect
/// system configuration to resolve the specified address. If the address
/// cannot be resolved, it is returned in string format.
#[unstable(feature = "lookup_addr", reason = "recent addition")]
pub fn lookup_addr(addr: &IpAddr) -> io::Result<String> {
net_imp::lookup_addr(addr)
}<๏ฝfimโend๏ฝ> | /// # Ok(())
/// # }
/// ```
#[unstable(feature = "lookup_host", reason = "unsure about the returned \ |
<|file_name|>rootReducer.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>import { appReducer } from './modules/app';
import { savedQueryReducer } from './modules/savedQuery';<๏ฝfimโhole๏ฝ>import { queriesReducer } from './modules/queries';
import { projectReducer } from './modules/project';
import { schemasReducer } from './modules/schemas';
import { dataExportReducer } from './modules/dataExport';
import { editorReducer } from './modules/editor';
export default {
app: appReducer,
queries: queriesReducer,
savedQuery: savedQueryReducer,
project: projectReducer,
schemas: schemasReducer,
dataExport: dataExportReducer,
editor: editorReducer,
};<๏ฝfimโend๏ฝ> | |
<|file_name|>rem.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#![feature(core, core_simd)]
extern crate core;
#[cfg(test)]
mod tests {
use core::simd::u16x8;
// #[simd]
// #[derive(Copy, Clone, Debug)]
// #[repr(C)]
// pub struct u16x8(pub u16, pub u16, pub u16, pub u16,
// pub u16, pub u16, pub u16, pub u16);
#[test]
fn rem_test1() {
let x: u16x8 = u16x8(
0, 1, 2, 3, 4, 5, 6, 7
);
let y: u16x8 = u16x8(
2, 2, 2, 2, 2, 2, 2, 2
);
let z: u16x8 = x % y;
let result: String = format!("{:?}", z);
assert_eq!(result, "u16x8(\
0, 1, 0, 1, 0, 1, 0, 1\<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | )".to_string());
}
} |
<|file_name|>detruire_sortie.py<|end_file_name|><๏ฝfimโbegin๏ฝ># -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY teleporterCT, INteleporterCT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'action detruire_sortie."""
from primaires.scripting.action import Action
from primaires.scripting.instruction import ErreurExecution
class ClasseAction(Action):
"""Dรฉtruit une sortie d'une salle."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.detruire_sortie, "Salle", "str")
@staticmethod
def detruire_sortie(salle, direction):
"""Dรฉtruit une sortie de salle et sa rรฉciproque de la destination.
La direction est ร choisir parmi est, ouest, nord, sud, nord-est,
nord-ouest, sud-est, sud-ouest, haut et bas.
"""
try:
direction = salle.sorties.get_nom_long(direction)
except KeyError:
raise ErreurExecution("direction {} inconnue".format(direction))
if not salle.sorties.sortie_existe(direction):
raise ErreurExecution("sortie {} non dรฉfinie".format(direction))
d_salle = salle.sorties[direction].salle_dest<๏ฝfimโhole๏ฝ> d_salle.sorties.supprimer_sortie(dir_opposee)
salle.sorties.supprimer_sortie(direction)<๏ฝfimโend๏ฝ> | dir_opposee = salle.sorties.get_nom_oppose(direction) |
<|file_name|>cython_demo.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import numpy as np
# Set the random seed for reproducibility
seed = np.random.randint(2**16)
print "Seed: ", seed
np.random.seed(seed)
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon
from optofit.cneuron.compartment import Compartment, SquidCompartment
from optofit.cneuron.channels import LeakChannel, NaChannel, KdrChannel
from optofit.cneuron.simulate import forward_euler
from hips.inference.particle_mcmc import *
from optofit.cinference.pmcmc import *
# Make a simple compartment
hypers = {
'C' : 1.0,
'V0' : -60.0,
'g_leak' : 0.3,
'E_leak' : -65.0,
'g_na' : 120.0,
'E_na' : 50.0,
'g_kdr' : 36.0,
'E_kdr' : -77.0
}
def sample_model():
# # Add a few channels
# body = Compartment(name='body', hypers=hypers)
# leak = LeakChannel(name='leak', hypers=hypers)
# na = NaChannel(name='na', hypers=hypers)
# kdr = KdrChannel(name='kdr', hypers=hypers)
#
# body.add_child(leak)
# body.add_child(na)
# body.add_child(kdr)
# Initialize the model
# body.initialize_offsets()
squid_body = SquidCompartment(name='body', hypers=hypers)
# Initialize the model
D, I = squid_body.initialize_offsets()
# Set the recording duration
t_start = 0
t_stop = 100.
dt = 0.01
t = np.arange(t_start, t_stop, dt)
T = len(t)
# Make input with an injected current from 500-600ms
inpt = np.zeros((T, I))
inpt[50/dt:60/dt,:] = 7.
inpt += np.random.randn(T, I)
# Set the initial distribution to be Gaussian around the steady state
z0 = np.zeros(D)
squid_body.steady_state(z0)
init = GaussianInitialDistribution(z0, 0.1**2 * np.eye(D))
# Set the proposal distribution using Hodgkin Huxley dynamics
# TODO: Fix the hack which requires us to know the number of particles
N = 100
sigmas = 0.0001*np.ones(D)
# Set the voltage transition dynamics to be a bit noisier
sigmas[squid_body.x_offset] = 0.25
prop = HodgkinHuxleyProposal(T, N, D, squid_body, sigmas, t, inpt)
# Set the observation model to observe only the voltage
etas = np.ones(1)
observed_dims = np.array([squid_body.x_offset]).astype(np.int32)
lkhd = PartialGaussianLikelihood(observed_dims, etas)
# Initialize the latent state matrix to sample N=1 particle
z = np.zeros((T,N,D))
z[0,0,:] = init.sample()
# Initialize the output matrix
x = np.zeros((T,D))
# Sample the latent state sequence
for i in np.arange(0,T-1):
# The interface kinda sucks. We have to tell it that
# the first particle is always its ancestor
prop.sample_next(z, i, np.array([0], dtype=np.int32))
# Sample observations
for i in np.arange(0,T):
lkhd.sample(z,x,i,0)
# Extract the first (and in this case only) particle
z = z[:,0,:].copy(order='C')
# Plot the first particle trajectory
plt.ion()
fig = plt.figure()
# fig.add_subplot(111, aspect='equal')
plt.plot(t, z[:,observed_dims[0]], 'k')
plt.plot(t, x[:,0], 'r')
plt.show()
plt.pause(0.01)
return t, z, x, init, prop, lkhd<๏ฝfimโhole๏ฝ>
# Now run the pMCMC inference
def sample_z_given_x(t, z_curr, x,
init, prop, lkhd,
N_particles=100,
plot=False):
T,D = z_curr.shape
T,O = x.shape
# import pdb; pdb.set_trace()
pf = ParticleGibbsAncestorSampling(T, N_particles, D)
pf.initialize(init, prop, lkhd, x, z_curr)
S = 100
z_smpls = np.zeros((S,T,D))
l = plt.plot(t, z_smpls[0,:,0], 'b')
for s in range(S):
print "Iteration %d" % s
# Reinitialize with the previous particle
pf.initialize(init, prop, lkhd, x, z_smpls[s,:,:])
z_smpls[s,:,:] = pf.sample()
l[0].set_data(t, z_smpls[s,:,0])
plt.pause(0.01)
z_mean = z_smpls.mean(axis=0)
z_std = z_smpls.std(axis=0)
z_env = np.zeros((T*2,2))
z_env[:,0] = np.concatenate((t, t[::-1]))
z_env[:,1] = np.concatenate((z_mean[:,0] + z_std[:,0], z_mean[::-1,0] - z_std[::-1,0]))
if plot:
plt.gca().add_patch(Polygon(z_env, facecolor='b', alpha=0.25, edgecolor='none'))
plt.plot(t, z_mean[:,0], 'b', lw=1)
# Plot a few random samples
# for s in range(10):
# si = np.random.randint(S)
# plt.plot(t, z_smpls[si,:,0], '-b', lw=0.5)
plt.ioff()
plt.show()
return z_smpls
t, z, x, init, prop, lkhd = sample_model()
sample_z_given_x(t, z, x, init, prop, lkhd, plot=True)<๏ฝfimโend๏ฝ> | |
<|file_name|>renderers.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from rest_framework import renderers
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.six.moves import StringIO
from django.utils.encoding import smart_text
from rest_framework.compat import six
from rest_framework import negotiation
import json
"""
@author: Jon Nordling
@date: 06/19/2016
XFormListRenderer, is a custom django rest framework, renderer
that passing a data object, will render serializes, data to xml for
the views
"""
class MediaFileContentNegotiation(negotiation.DefaultContentNegotiation):
def filter_renderers(self, renderers, format):
"""
If there is a '.json' style format suffix, filter the renderers
so that we only negotiation against those that accept that format.
If there is no renderer available, we use MediaFileRenderer.
"""
renderers = [renderer for renderer in renderers
if renderer.format == format]
if not renderers:
renderers = [MediaFileRenderer()]
return renderers
class MediaFileRenderer(renderers.BaseRenderer):
media_type = '*/*'
format = None
charset = None
render_style = 'binary'
def render(self, data, accepted_media_type=None, renderer_context=None):
return data
class XFormListRenderer(renderers.BaseRenderer):
"""
Renderer which serializes to XML.
"""
media_type = 'text/xml'
format = 'xml'
charset = 'utf-8'
root_node = 'xforms'
element_node = 'xform'
xmlns = "http://openrosa.org/xforms/xformsList"
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders *obj* into serialized XML.
"""
if data is None:
return ''
elif isinstance(data, six.string_types):
return data
stream = StringIO()
xml = SimplerXMLGenerator(stream, self.charset)
xml.startDocument()
xml.startElement(self.root_node, {'xmlns': self.xmlns})
self._to_xml(xml, data)
xml.endElement(self.root_node)
xml.endDocument()
return stream.getvalue()
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement(self.element_node, {})
self._to_xml(xml, item)
xml.endElement(self.element_node)
elif isinstance(data, dict):
for key, value in six.iteritems(data):
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
<๏ฝfimโhole๏ฝ> else:
xml.characters(smart_text(data))<๏ฝfimโend๏ฝ> | elif data is None:
# Don't output any value
pass
|
<|file_name|>cpNbnet.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
# coding:utf-8
import select
import socket
from nbNetUtils import DEBUG, dbgPrint
__all__ = ["nbNet"]
class STATE:
def __init__(self):
self.state = "accept" # ๅฎไน็ถๆ
self.have_read = 0 # ่ฎฐๅฝ่ฏปไบ็ๅญ่
self.need_read = 10 # ๅคดๆไปถ้่ฆ่ฏปๅ10ไธชๅญ่
self.have_write = 0 # ่ฎฐๅฝ่ฏปไบ็ๅญ่
self.need_write = 0 # ้่ฆๅ็ๅญ่
self.buff_read = "" # ่ฏป็ผๅญ
self.buff_write = "" # ๅ็ผๅญ
self.sock_obj = "" # sockๅฏน่ฑก
def printState(self):
if DEBUG:
dbgPrint('\n - current state of fd: %d' % self.sock_obj.fileno())
dbgPrint(" - - state: %s" % self.state)
dbgPrint(" - - have_read: %s" % self.have_read)
dbgPrint(" - - need_read: %s" % self.need_read)
dbgPrint(" - - have_write: %s" % self.have_write)
dbgPrint(" - - need_write: %s" % self.need_write)
dbgPrint(" - - buff_read: %s" % self.buff_read)
dbgPrint(" - - buff_write: %s" % self.buff_write)
dbgPrint(" - - sock_obj: %s" % self.sock_obj)
class nbNetBase:
def setFd(self, sock):
dbgPrint("\n setFd start")
tmp_state = STATE() # ๅฎไพๅ็ฑป
tmp_state.sock_obj = sock # ๅฎไน็ฑปไธญsock
self.conn_state[sock.fileno()] = tmp_state # ๆsockๅ ๅ
ฅๅฐๅญๅ
ธไธญ
self.conn_state[sock.fileno()].printState()
dbgPrint("\n setFd end")
def accept(self, fd):
dbgPrint("\n accept start!")
sock_state = self.conn_state[fd] # ๅๅบfdๅฏนๅบ่ฟๆฅ
sock = sock_state.sock_obj # ๅๅบfd็sock
conn, addr = sock.accept() # ๅๅบ่ฟๆฅ่ฏทๆฑ
conn.setblocking(0) # ่ฎพ็ฝฎ้้ปๅกๆจกๅผ
return conn # ่ฟๅ่ฟๆฅ
def close(self, fd):
try:
sock = self.conn_state[fd].sock_obj # ๅๅบfd็sock
sock.close() # ๅ
ณ้ญsock
except:
dbgPrint("Close fd: %s" % fd)
finally:
self.epoll_sock.unregister(fd) # ๅฐfd้epollไธญๆณจ้
self.conn_state.pop(fd) # ่ธขๅบๅญๅ
ธ
def read(self, fd):
try:
sock_state = self.conn_state[fd] # ๅๅบfdๅฏนๅบ่ฟๆฅ
conn = sock_state.sock_obj # ๅๅบfd่ฟๆฅ่ฏทๆฑ
if sock_state.need_read <= 0: # ้่ฆ่ฏปๅๅญ่ไธบ็ฉบๆฅ้
raise socket.error<๏ฝfimโhole๏ฝ> (fd, one_read, sock_state.need_read))
if len(one_read) == 0: # ่ฏปๅๆฐๆฎไธบ0ๆฅ้
raise socket.error
sock_state.buff_read += one_read # ๆ่ฏปๅๆฐๆฎๅญๅฐ่ฏป็ผๅญไธญ
sock_state.have_read += len(one_read) # ๅทฒ็ป่ฏปๅๅฎ็ๆฐๆฎ้
sock_state.need_read -= len(one_read) # ่ฟ้่ฆ่ฏปๅๆฐๆฎ็้
sock_state.printState()
if sock_state.have_read == 10: # 10ๅญ่ไธบๅคดๆไปถๅค็
header_said_need_read = int(sock_state.have_read) # ่ฏปๅๆฐๆฎ็้
if header_said_need_read <= 0: # ๅฆๆ่ฟ้่ฏป0ๅญ่ๆฅ้
raise socket.error
sock_state.need_read += header_said_need_read # ่ฟ้่ฏปๅๆฐ้ๅๅ
sock_state.buff_read = '' # ่ฏป็ผๅญๆธ
็ฉบ
sock_state.printState()
return "readcontent" # ่ฟ้่ฏปๅๆฐๆฎ
elif sock_state.need_read == 0:
return "process" # ่ฏปๅๆฐๆฎๅฎๆ๏ผ่ฝฌๆข็ถๆ
else:
return "readmore" # ่ฟ้่ฏปๅๆฐๆฎ
except (socket.error, ValueError), msg:
try:
if msg.errno == 11: # errno็ญไบ11๏ผๅฐ่ฏ่ฟ่กไธๆฌก่ฏปๅ
dbgPrint("11" + msg)
return "retry"
except:
pass
return "closing"
def write(self, fd):
sock_state = self.conn_state[fd] # ๅๅบfdๅฏนๅบ็่ฟๆฅๆ้ ไฝ
conn = sock_state.sock_obj # ๅๅบfdๅฏนไบ่ฟๆฅ
last_have_send = sock_state.have_write # ๅทฒ็ปๅๆฐๆฎ็้
try:
have_send = conn.send(
sock_state.buff_write[last_have_send:]) # ๅ้ๅฉไธ็ๆฐๆฎ
sock_state.have_write += have_send # ๅทฒ็ปๅ็ๆฐๆฎ้
sock_state.need_write -= have_send # ่ฟ้ๅ็ๆฐๆฎ้
if sock_state.need_write == 0 and sock_state.have_write != 0: # ๅๆฐๆฎๅฎๆ
sock_state.printState()
dbgPrint("\n write date end")
return "writecomplete" # ่ฟๅๅๅ
ฅๅฎๆ
else:
return "writemore" # ่ฟๅ่ฎก็ฎๅๅ
ฅ
except socket.error, msg:
return "closing"
def run(self):
while True:
epoll_list = self.epoll_sock.poll() # ๅฎไนpoll()ไบไปถๅ็็list
for fd, events in epoll_list:
sock_state = self.conn_state[fd] # ๅๅบfdๆ้ ไฝ
if select.EPOLLHUP & events: # ๆไปถๆ่ฟฐ็ฌฆๆๆญ
dbgPrint("EPOLLHUP")
sock_state.state = "closing" # fd็ถๆ่ฎพ็ฝฎไธบclosing
elif select.EPOLLERR & events:
dbgPrint("EPOLLERR") # ๆไปถๆ่ฟฐ็ฌฆๅบ้
sock_state.state = "closing" # ๅฏนๅบfd็ถๆไธบclosing
self.state_machine(fd) # ็ถๆๆบ่ฐ็จ
def state_machine(self, fd):
sock_state = self.conn_state[fd] # fdๆ้ ไฝ
self.sm[sock_state.state](fd) # ้่ฟsmๅญๅ
ธ่ฐ็จๅฏนๅบ็ถๆ็ๅฝๆฐ
class nbNet(nbNetBase):
def __init__(self, addr, port, logic):
dbgPrint('\n__init__: start!')
self.conn_state = {} # ๅฎไนๅญๅ
ธไฟๅญๆฏไธช่ฟๆฅ็ถๆ
self.listen_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
self.listen_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.listen_sock.bind((addr, port))
self.listen_sock.listen(10) # ๆ้้ฟๅบฆ
self.setFd(self.listen_sock) # ๅฎไนlisten socket ๆพๅ
ฅๅญๅ
ธconn_state
self.epoll_sock = select.epoll() # ๅๅงๅfd็epoll
self.epoll_sock.register(
self.listen_sock.fileno(), select.EPOLLIN) # lintenๅฏไปฅ่ฏป็ๆ่ฟฐ็ฌฆ
self.logic = logic # ไธๅกๅค็
self.sm = {
"accept": self.accept2read,
"read": self.read2process,
"write": self.write2read,
"process": self.process,
"closing": self.close,
} # ็ถๆ่ฐ็จๆบ็ๅญๅ
ธ
dbgPrint('\n__init__: end, register no: %s' %
self.listen_sock.fileno())
def process(self, fd):
sock_state = self.conn_state[fd]
response = self.logic(sock_state.buff_read) # ไธๅกๅฝๆฐๅค็
sock_state.buff_write = "%010d%s" % (len(response), response) # ๅ้็ๆฐๆฎ
sock_state.need_write = len(sock_state.buff_write) # ้่ฆๅ้็้ฟๅบฆ
sock_state.state = "write" # fdๅฏนๅบ็็ถๆ
self.epoll_sock.modify(fd, select.EPOLLOUT) # fdๅฏนๅบ็epollไธบๆนๅๆจกๅผ
sock_state.printState()
def accept2read(self, fd):
conn = self.accept(fd)
self.epoll_sock.register(
conn.fileno(), select.EPOLLIN) # ๅ้ๆฐๆฎๅ้ๆฐๅฐfd็epollๆนๆ่ฏป
self.setFd(conn) # fd็ๆๆ้ ไฝ
self.conn_state[conn.fileno()].state = "read" # fd็ถๆไธบread
dbgPrint("\n -- accept end!")
def read2process(self, fd):
read_ret = ""
# ็ถๆ่ฝฌๆข
try:
read_ret = self.read(fd) # readๅฝๆฐ่ฟๅๅผ
except (Exception), msg:
dbgPrint(msg)
read_ret = "closing"
if read_ret == "process": # ่ฏปๅๅฎๆ๏ผ่ฝฌๆขๅฐprocess
self.process(fd)
elif read_ret == "readcontent": # readcontentใreadmoreใretry ็ปง็ปญ่ฏปๅ
pass
elif read_ret == "readmore":
pass
elif read_ret == "retry":
pass
elif read_ret == "closing":
self.conn_state[fd].state = 'closing' # ็ถๆไธบclosingๅ
ณ้ญ่ฟๆฅ
self.state_machine(fd)
else:
raise Exception("impossible state returned by self.read")
def write2read(self, fd):
try:
write_ret = self.write(fd) # ๅฝๆฐwrite่ฟๅๅผ
except socket.error, msg: # ๅบ้ๅ
ณ้ญ่ฟๆฅ
write_ret = "closing"
if write_ret == "writemore": # ็ปง็ปญๅ
pass
elif write_ret == "writecomplete": # ๅๅฎๆ
sock_state = self.conn_state[fd]
conn = sock_state.sock_obj
self.setFd(conn) # ้็ฝฎ่ง่ฟๆฅfdๆ้ ไฝ
self.conn_state[fd].state = "read" # ๅฐfd็ถๆ่ฎพ็ฝฎไธบread
self.epoll_sock.modify(fd, select.EPOLLIN) # epoll็ถๆไธบๅฏ่ฏป
elif write_ret == "closing": # ๅ็้่ฏฏๅ
ณ้ญ
dbgPrint(msg)
self.conn_state[fd].state = 'closing'
self.state_machine(fd)
if __name__ == '__main__':
def logic(d_in):
return(d_in[::-1])
reverseD = nbNet('0.0.0.0', 9060, logic)
reverseD.run()<๏ฝfimโend๏ฝ> | one_read = conn.recv(sock_state.need_read) # ่ฏปๅไผ ่พ็ๅญ็ฌฆ
dbgPrint("\n func fd: %d, one_read: %s, need_read: %d" % |
<|file_name|>hfractal.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/python
"""
Visualizing H fractal with tkinter.
=======
License
=======
Copyright (c) 2017 Thomas Lehmann
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
if __name__ == "__main__":
import math
import Tkinter as tk
from concept.math.point import Point2d
from concept.math.vector import Vector2d
from concept.math.hfractal import hfractal
class Application(tk.Frame):
"""Simple tk application displaying a H fractal."""
def __init__(self):
"""Init canvas and display fractal."""
tk.Frame.__init__(self, tk.Tk())
self.angle = 0.0
self.scale = 1.0
self.depth = 0
self.master.title("H Fractal")
self.master.geometry("640x480+50+50")
self.canvas = tk.Canvas(self)
self.canvas['bg'] = "#ffffff"
self.canvas.bind("<Configure>", self.on_configure)
self.canvas.pack(fill=tk.BOTH, expand=tk.YES)
self.pack(fill=tk.BOTH, expand=tk.YES)
self.bind("<Left>", self.on_key_left)
self.bind("<Right>", self.on_key_right)
self.bind("<Up>", self.on_key_up)
self.bind("<Down>", self.on_key_down)
self.bind("+", self.on_key_plus)
self.bind("-", self.on_key_minus)
self.focus_set()
def set_title(self, count):
"""Change the title."""
self.master.title("H Fractal (%d H's, angle=%.2f Degree, scale=%.2f)"
% (count, self.angle * 180.0 / math.pi, self.scale))
def on_key_left(self, event):
"""Rotate hfractal to the left."""
self.angle -= 0.05
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())
def on_key_right(self, event):
"""Rotate hfractal to the right."""
self.angle += 0.05
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())
def on_key_up(self, event):
"""Scale hfractal (increase)."""
self.scale += 0.05
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())<๏ฝfimโhole๏ฝ> """Scale hfractal (decrease)."""
if self.scale >= (0.05 + 0.05):
self.scale -= 0.05
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())
def on_key_plus(self, event):
"""Increase hfractal depth."""
if self.depth < 7:
self.depth += 1
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())
def on_key_minus(self, event):
"""Decrease hfractal depth."""
if self.depth > 0:
self.depth -= 1
self.repaint(self.canvas.winfo_width(), self.canvas.winfo_height())
def on_configure(self, event):
"""Called to react on changes to width and height."""
self.repaint(event.width, event.height)
def repaint(self, width, height):
"""Repaint hfractal."""
# delete all previous lines
self.canvas.delete(tk.ALL)
center = Point2d(width / 2.0, height / 2.0)
direction = Vector2d(0.0, height / 2.0).scaled(self.scale).rotated(self.angle)
hdefs = hfractal(center, direction, 2.0, self.depth)
self.set_title(len(hdefs))
for hdef in hdefs:
for line in hdef.generate_lines():
self.canvas.create_line(
line[0].x,
line[0].y,
line[0].x + line[1].x,
line[0].y + line[1].y
)
def mainloop(self):
"""Application mainloop when called."""
self.master.mainloop()
def main():
"""Main function."""
app = Application()
app.mainloop()
main()<๏ฝfimโend๏ฝ> |
def on_key_down(self, event): |
<|file_name|>router.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from django.apps import apps
from django.conf import settings
from django_sharding_library.fields import BasePostgresShardGeneratedIDField
from django_sharding_library.exceptions import DjangoShardingException, InvalidMigrationException
from django_sharding_library.utils import (
is_model_class_on_database,
get_database_for_model_instance,
get_possible_databases_for_model,
)
class ShardedRouter(object):
"""
A router that is shard-aware and will prevent running migrations on
the wrong database as well as infer, when possible, which database to
read or write from.
"""
def get_shard_for_instance(self, instance):
return instance._state.db or instance.get_shard()
def get_shard_for_id_field(self, model, sharded_by_field_id):
try:
return model.get_shard_from_id(sharded_by_field_id)
except Exception:
# This is overly broad on purpose: we dont know what a user might try to do to get a shard from an ID (our
# example is a .get(), but someone could do anything) and if it excepts, we want to fall back to the next
# attempt at grabbing a shard based on other potential filters
return None
def get_shard_for_postgres_pk_field(self, model, pk_value):
group = getattr(model, 'django_sharding__shard_group', None)
shard_id_to_find = int(bin(pk_value)[-23:-10], 2) # We know where the shard id is stored in the PK's bits.
# We can check the shard id from the PK against the shard ID in the databases config
for alias, db_settings in settings.DATABASES.items():
if db_settings["SHARD_GROUP"] == group and db_settings["SHARD_ID"] == shard_id_to_find:
return alias
return None # Return None if we could not determine the shard so we can fall through to the next shard grab attempt
def get_read_db_routing_strategy(self, shard_group):
app_config_app_label = getattr(settings, 'DJANGO_SHARDING_SETTINGS', {}).get('APP_CONFIG_APP', 'django_sharding')
return apps.get_app_config(app_config_app_label).get_routing_strategy(shard_group)
def _get_shard(self, model, **hints):
shard = None
model_has_sharded_id_field = getattr(model, 'django_sharding__sharded_by_field', None) is not None
#####
#
# This is setup as multiple IF statements on purpose. If any attempt to get a shard fails, the function that
# tried to get the shard should return None that way the NEXT possible attempt to grab a shard can be run.
# In this way, future ways to automatically get the correct shard can be added, and at any point in the logic,
# and the first one to pick a valid shard will return the valid shard.
#
# Always return None if no valid shard was found so the default Django router will use the using() database if
# these all fail.
#
#####
if hints.get("instance", None):
shard = get_database_for_model_instance(instance=hints["instance"])
if shard is None and model_has_sharded_id_field:
sharded_by_field_id = hints.get('exact_lookups', {}).get(
getattr(model, 'django_sharding__sharded_by_field'), None
)
if sharded_by_field_id:
shard = self.get_shard_for_id_field(model, sharded_by_field_id)
is_pk_postgres_generated_id_field = issubclass(type(getattr(model._meta, 'pk')), BasePostgresShardGeneratedIDField)
lookup_pk = hints.get('exact_lookups', {}).get('pk') or hints.get('exact_lookups', {}).get('id')
if shard is None and is_pk_postgres_generated_id_field and lookup_pk is not None:
return self.get_shard_for_postgres_pk_field(model, lookup_pk)
return shard
def db_for_read(self, model, **hints):
possible_databases = get_possible_databases_for_model(model=model)
if len(possible_databases) == 1:
return possible_databases[0]
shard = self._get_shard(model, **hints)
if shard:
shard_group = getattr(model, 'django_sharding__shard_group', None)
if not shard_group:
raise DjangoShardingException('Unable to identify the shard_group for the {} model'.format(model))
routing_strategy = self.get_read_db_routing_strategy(shard_group)
return routing_strategy.pick_read_db(shard)
return None
def db_for_write(self, model, **hints):
possible_databases = get_possible_databases_for_model(model=model)
if len(possible_databases) == 1:
return possible_databases[0]
shard = self._get_shard(model, **hints)
if shard:
db_config = settings.DATABASES[shard]
return db_config.get('PRIMARY', shard)
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Only allow relationships between two items which are both on only one database or
between sharded items on the same shard.
"""
object1_databases = get_possible_databases_for_model(model=obj1._meta.model)
object2_databases = get_possible_databases_for_model(model=obj2._meta.model)
if (len(object1_databases) == len(object2_databases) == 1) and (object1_databases == object2_databases):
return True
return self.get_shard_for_instance(obj1) == self.get_shard_for_instance(obj2)
def allow_migrate(self, db, app_label, model_name=None, **hints):
if settings.DATABASES[db].get('PRIMARY', None):
return False
# Since the API for this function is limiting in a sharded environemnt,
# we provide an override to specify which databases to run the migrations
# on.
if hints.get("force_migrate_on_databases", None):
return db in hints["force_migrate_on_databases"]
model_name = model_name or hints.get('model_name')
model = hints.get('model')
if model:
model_name = model.__name__
# Return true if any model in the app is on this database.
if not model_name:
app = apps.get_app_config(app_label)
for model in app.get_models():
if is_model_class_on_database(model=model, database=db):
return True
return False
# Sometimes, when extending models from another app i.e. the User Model, the app label
# is the app label of the app where the change is defined but to app with the model is
# passed in with the model name.
try:<๏ฝfimโhole๏ฝ> app_label = model_name.split('.')[0]
app = apps.get_app_config(app_label)
model = app.get_model(model_name[len(app_label) + 1:])
try:
return is_model_class_on_database(model=model, database=db)
except DjangoShardingException as e:
raise InvalidMigrationException(
e.args[0]
)<๏ฝfimโend๏ฝ> | app = apps.get_app_config(app_label)
model = app.get_model(model_name)
except LookupError: |
<|file_name|>quote.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>namespace $ {
const regular_regex = /^\w+$/
export function $mol_view_tree2_prop_quote(name: $mol_tree2) {<๏ฝfimโhole๏ฝ>}<๏ฝfimโend๏ฝ> | if (regular_regex.test(name.value)) return name
return name.data(JSON.stringify(name.value))
} |
<|file_name|>backends.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>use std::rc::Rc;
use std::cell::RefCell;
use std::net::SocketAddr;
use std::collections::HashMap;
use mio::net::TcpStream;
use sozu_command::proxy::{self, LoadBalancingAlgorithms};
use super::{AppId,Backend,ConnectionError,load_balancing::*};
use server::push_event;
#[derive(Debug)]
pub struct BackendMap {
pub backends: HashMap<AppId, BackendList>,
pub max_failures: usize,
pub available: bool,
}
impl BackendMap {
pub fn new() -> BackendMap {
BackendMap {
backends: HashMap::new(),
max_failures: 3,
available: true,
}
}
pub fn import_configuration_state(&mut self, backends: &HashMap<AppId, Vec<proxy::Backend>>) {
self.backends.extend(backends.iter().map(|(ref app_id, ref backend_vec)| {
(app_id.to_string(), BackendList::import_configuration_state(backend_vec))
}));
}
pub fn add_backend(&mut self, app_id: &str, backend: Backend) {
self.backends.entry(app_id.to_string()).or_insert_with(BackendList::new).add_backend(backend);
}
pub fn remove_backend(&mut self, app_id: &str, backend_address: &SocketAddr) {
if let Some(backends) = self.backends.get_mut(app_id) {
backends.remove_backend(backend_address);
} else {
error!("Backend was already removed: app id {}, address {:?}", app_id, backend_address);
}
}
pub fn close_backend_connection(&mut self, app_id: &str, addr: &SocketAddr) {
if let Some(app_backends) = self.backends.get_mut(app_id) {
if let Some(ref mut backend) = app_backends.find_backend(addr) {
(*backend.borrow_mut()).dec_connections();
}
}
}
pub fn has_backend(&self, app_id: &str, backend: &Backend) -> bool {
self.backends.get(app_id).map(|backends| {
backends.has_backend(&backend.address)
}).unwrap_or(false)
}
pub fn backend_from_app_id(&mut self, app_id: &str) -> Result<(Rc<RefCell<Backend>>,TcpStream),ConnectionError> {
if let Some(ref mut app_backends) = self.backends.get_mut(app_id) {
if app_backends.backends.is_empty() {
self.available = false;
return Err(ConnectionError::NoBackendAvailable);
}
if let Some(ref mut b) = app_backends.next_available_backend() {
let ref mut backend = *b.borrow_mut();
debug!("Connecting {} -> {:?}", app_id, (backend.address, backend.active_connections, backend.failures));
let conn = backend.try_connect();
let res = conn.map(|c| {
(b.clone(), c)
}).map_err(|e| {<๏ฝfimโhole๏ฝ> e
});
if res.is_ok() {
self.available = true;
}
return res;
} else {
if self.available {
error!("no more available backends for app {}", app_id);
self.available = false;
push_event(proxy::ProxyEvent::NoAvailableBackends(app_id.to_string()));
}
return Err(ConnectionError::NoBackendAvailable);
}
} else {
Err(ConnectionError::NoBackendAvailable)
}
}
pub fn backend_from_sticky_session(&mut self, app_id: &str, sticky_session: &str) -> Result<(Rc<RefCell<Backend>>,TcpStream),ConnectionError> {
let sticky_conn: Option<Result<(Rc<RefCell<Backend>>,TcpStream),ConnectionError>> = self.backends
.get_mut(app_id)
.and_then(|app_backends| app_backends.find_sticky(sticky_session))
.map(|b| {
let ref mut backend = *b.borrow_mut();
let conn = backend.try_connect();
conn.map(|c| (b.clone(), c)).map_err(|e| {
error!("could not connect {} to {:?} using session {}ย ({} failures)",
app_id, backend.address, sticky_session, backend.failures);
e
})
});
if let Some(res) = sticky_conn {
return res;
} else {
debug!("Couldn't find a backend corresponding to sticky_session {} for app {}", sticky_session, app_id);
return self.backend_from_app_id(app_id);
}
}
pub fn set_load_balancing_policy_for_app(&mut self, app_id: &str, lb_algo: proxy::LoadBalancingAlgorithms, metric: Option<proxy::LoadMetric>) {
// The application can be created before the backends were registered because of the async config messages.
// So when we set the load balancing policy, we have to create the backend list if if it doesn't exist yet.
let app_backends = self.get_or_create_backend_list_for_app(app_id);
app_backends.set_load_balancing_policy(lb_algo, metric);
}
pub fn get_or_create_backend_list_for_app(&mut self, app_id: &str) -> &mut BackendList {
self.backends.entry(app_id.to_string()).or_insert_with(BackendList::new)
}
}
#[derive(Debug)]
pub struct BackendList {
pub backends: Vec<Rc<RefCell<Backend>>>,
pub next_id: u32,
pub load_balancing: Box<dyn LoadBalancingAlgorithm>,
}
impl BackendList {
pub fn new() -> BackendList {
BackendList {
backends: Vec::new(),
next_id: 0,
load_balancing: Box::new(Random),
}
}
pub fn import_configuration_state(backend_vec: &Vec<proxy::Backend>) -> BackendList {
let mut list = BackendList::new();
for ref backend in backend_vec {
let backend = Backend::new(&backend.backend_id, backend.address, backend.sticky_id.clone(), backend.load_balancing_parameters.clone(), backend.backup);
list.add_backend(backend);
}
list
}
pub fn add_backend(&mut self, backend: Backend) {
match self.backends.iter_mut().find(|b| {
(*b.borrow()).address == backend.address
&& (*b.borrow()).backend_id == backend.backend_id
}) {
None => {
let backend = Rc::new(RefCell::new(backend));
self.backends.push(backend);
self.next_id += 1;
},
// the backend already exists, update the configuration while
// keeping connection retry state
Some(old_backend) => {
let mut b = old_backend.borrow_mut();
b.sticky_id = backend.sticky_id.clone();
b.load_balancing_parameters = backend.load_balancing_parameters.clone();
b.backup = backend.backup;
}
}
}
pub fn remove_backend(&mut self, backend_address: &SocketAddr) {
self.backends.retain(|backend| &(*backend.borrow()).address != backend_address);
}
pub fn has_backend(&self, backend_address: &SocketAddr) -> bool {
self.backends.iter().any(|backend| &(*backend.borrow()).address == backend_address)
}
pub fn find_backend(&mut self, backend_address: &SocketAddr) -> Option<&mut Rc<RefCell<Backend>>> {
self.backends.iter_mut().find(|backend| &(*backend.borrow()).address == backend_address)
}
pub fn find_sticky(&mut self, sticky_session: &str) -> Option<&mut Rc<RefCell<Backend>>> {
self.backends.iter_mut()
.find(|b| b.borrow().sticky_id.as_ref().map(|s| s.as_str()) == Some(sticky_session) )
.and_then(|b| {
if b.borrow().can_open() {
Some(b)
} else {
None
}
})
}
pub fn available_backends(&mut self, backup: bool) -> Vec<Rc<RefCell<Backend>>> {
self.backends.iter()
.filter(|backend| (*backend.borrow()).backup == backup && (*backend.borrow()).can_open())
.map(|backend| (*backend).clone())
.collect()
}
pub fn next_available_backend(&mut self) -> Option<Rc<RefCell<Backend>>> {
let mut backends = self.available_backends(false);
if backends.is_empty() {
backends = self.available_backends(true);
}
if backends.is_empty() {
None
} else {
self.load_balancing.next_available_backend(&mut backends)
}
}
pub fn set_load_balancing_policy(&mut self, load_balancing_policy: LoadBalancingAlgorithms, metric: Option<proxy::LoadMetric>) {
match load_balancing_policy {
LoadBalancingAlgorithms::RoundRobin => self.load_balancing = Box::new(RoundRobin::new()),
LoadBalancingAlgorithms::Random => self.load_balancing = Box::new(Random{}),
LoadBalancingAlgorithms::LeastLoaded => self.load_balancing = Box::new(LeastLoaded{ metric: metric.clone().unwrap_or(proxy::LoadMetric::Connections) }),
LoadBalancingAlgorithms::PowerOfTwo => self.load_balancing = Box::new(PowerOfTwo{ metric: metric.clone().unwrap_or(proxy::LoadMetric::Connections) }),
}
}
}
#[cfg(test)]
mod backends_test {
use super::*;
use std::{thread,sync::mpsc::*,net::TcpListener};
fn run_mock_tcp_server(addr: &str, stopper: Receiver<()>) {
let mut run = true;
let listener = TcpListener::bind(addr).unwrap();
thread::spawn(move || {
while run {
for _stream in listener.incoming() {
// accept connections
if let Ok(()) = stopper.try_recv() {
run = false;
}
}
}
});
}
#[test]
fn it_should_retrieve_a_backend_from_app_id_when_backends_have_been_recorded() {
let mut backend_map = BackendMap::new();
let app_id = "myapp";
let backend_addr = "127.0.0.1:1236";
let (sender, receiver) = channel();
run_mock_tcp_server(backend_addr, receiver);
backend_map.add_backend(app_id, Backend::new(&format!("{}-1", app_id), backend_addr.parse().unwrap(), None, None, None));
assert!(backend_map.backend_from_app_id(app_id).is_ok());
sender.send(()).unwrap();
}
#[test]
fn it_should_not_retrieve_a_backend_from_app_id_when_backend_has_not_been_recorded() {
let mut backend_map = BackendMap::new();
let app_not_recorded = "not";
backend_map.add_backend("foo", Backend::new("foo-1", "127.0.0.1:9001".parse().unwrap(), None, None, None));
assert!(backend_map.backend_from_app_id(app_not_recorded).is_err());
}
#[test]
fn it_should_not_retrieve_a_backend_from_app_id_when_backend_list_is_empty() {
let mut backend_map = BackendMap::new();
assert!(backend_map.backend_from_app_id("dumb").is_err());
}
#[test]
fn it_should_retrieve_a_backend_from_sticky_session_when_the_backend_has_been_recorded() {
let mut backend_map = BackendMap::new();
let app_id = "myapp";
let sticky_session = "server-2";
let backend_addr = "127.0.0.1:3456";
let (sender, receiver) = channel();
run_mock_tcp_server(backend_addr, receiver);
backend_map.add_backend(app_id, Backend::new(&format!("{}-1", app_id), "127.0.0.1:9001".parse().unwrap(), Some("server-1".to_string()), None, None));
backend_map.add_backend(app_id, Backend::new(&format!("{}-2", app_id), "127.0.0.1:9000".parse().unwrap(), Some("server-2".to_string()), None, None));
// sticky backend
backend_map.add_backend(app_id, Backend::new(&format!("{}-3", app_id), backend_addr.parse().unwrap(), Some("server-3".to_string()), None, None));
assert!(backend_map.backend_from_sticky_session(app_id, sticky_session).is_ok());
sender.send(()).unwrap();
}
#[test]
fn it_should_not_retrieve_a_backend_from_sticky_session_when_the_backend_has_not_been_recorded() {
let mut backend_map = BackendMap::new();
let app_id = "myapp";
let sticky_session = "test";
assert!(backend_map.backend_from_sticky_session(app_id, sticky_session).is_err());
}
#[test]
fn it_should_not_retrieve_a_backend_from_sticky_session_when_the_backend_list_is_empty() {
let mut backend_map = BackendMap::new();
let myapp_not_recorded = "myapp";
let sticky_session = "test";
assert!(backend_map.backend_from_sticky_session(myapp_not_recorded, sticky_session).is_err());
}
#[test]
fn it_should_add_a_backend_when_he_doesnt_already_exist() {
let backend_id = "myback";
let mut backends_list = BackendList::new();
backends_list.add_backend(Backend::new(backend_id, "127.0.0.1:80".parse().unwrap(), None, None, None));
assert_eq!(1, backends_list.backends.len());
}
#[test]
fn it_should_not_add_a_backend_when_he_already_exist() {
let backend_id = "myback";
let mut backends_list = BackendList::new();
backends_list.add_backend(Backend::new(backend_id, "127.0.0.1:80".parse().unwrap(), None, None, None));
//same backend id
backends_list.add_backend(Backend::new(backend_id, "127.0.0.1:80".parse().unwrap(), None, None, None));
assert_eq!(1, backends_list.backends.len());
}
}<๏ฝfimโend๏ฝ> | error!("could not connect {} to {:?} ({} failures)", app_id, backend.address, backend.failures); |
<|file_name|>associated-types-projection-in-where-clause.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// run-pass
#![allow(dead_code)]
#![allow(unused_variables)]
// Test a where clause that uses a non-normalized projection type.
// pretty-expanded FIXME #23616
trait Int
{
type T;
fn dummy(&self) { }
}
trait NonZero
{
fn non_zero(self) -> bool;
}
fn foo<I:Int<T=J>,J>(t: I) -> bool
where <I as Int>::T : NonZero
// ^~~~~~~~~~~~~ canonical form is just J
{
bar::<J>()
}
fn bar<NZ:NonZero>() -> bool { true }<๏ฝfimโhole๏ฝ>}<๏ฝfimโend๏ฝ> |
fn main ()
{ |
<|file_name|>p_2_1_2_02.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// P_2_1_2_02
//
// Generative Gestaltung โ Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groร, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* changing module color and positions in a grid
*
* MOUSE
* position x : offset x
* position y : offset y
* left click : random position
*
* KEYS
* 1-3 : different sets of colors
* 0 : default
* arrow up/down : background module size
* arrow left/right : foreground module size
* s : save png
*/
use nannou::prelude::*;
use nannou::rand::rngs::StdRng;
use nannou::rand::{Rng, SeedableRng};
fn main() {
nannou::app(model).run();
}
struct Model {
tile_count: u32,
act_random_seed: u64,
module_color_background: Hsva,
module_color_foreground: Hsva,
module_alpha_background: f32,
module_alpha_foreground: f32,
module_radius_background: f32,
module_radius_foreground: f32,
}
fn model(app: &App) -> Model {
let _window = app
.new_window()
.size(600, 600)
.view(view)
.mouse_pressed(mouse_pressed)
.key_pressed(key_pressed)
.key_released(key_released)
.build()
.unwrap();
let module_alpha_background = 1.0;
let module_alpha_foreground = 1.0;
Model {
tile_count: 20,
act_random_seed: 0,
module_color_background: hsva(0.0, 0.0, 0.0, module_alpha_background),
module_color_foreground: hsva(0.0, 0.0, 1.0, module_alpha_foreground),
module_alpha_background,
module_alpha_foreground,
module_radius_background: 15.0,
module_radius_foreground: 7.5,
}
}
fn view(app: &App, model: &Model, frame: Frame) {
let draw = app.draw();
let win = app.window_rect();
draw.background().color(WHITE);
let mut rng = StdRng::seed_from_u64(model.act_random_seed);
let mx = clamp(win.right() + app.mouse.x, 0.0, win.w());
let my = clamp(win.top() - app.mouse.y, 0.0, win.h());
for grid_y in 0..model.tile_count {
for grid_x in 0..model.tile_count {
let tile_w = win.w() / model.tile_count as f32;
let tile_h = win.h() / model.tile_count as f32;
let pos_x = (win.left() + (tile_w / 2.0)) + tile_w * grid_x as f32;
let pos_y = (win.top() - (tile_h / 2.0)) - tile_h * grid_y as f32;
let shift_x = rng.gen_range(-mx, mx + 1.0) / 20.0;
let shift_y = rng.gen_range(-my, my + 1.0) / 20.0;
draw.ellipse()
.x_y(pos_x + shift_x, pos_y + shift_y)
.radius(model.module_radius_background)
.color(model.module_color_background);
}
}
for grid_y in 0..model.tile_count {
for grid_x in 0..model.tile_count {
let tile_w = win.w() / model.tile_count as f32;
let tile_h = win.h() / model.tile_count as f32;
let pos_x = (win.left() + (tile_w / 2.0)) + tile_w * grid_x as f32;
let pos_y = (win.top() - (tile_h / 2.0)) - tile_h * grid_y as f32;
draw.ellipse()
.x_y(pos_x, pos_y)
.radius(model.module_radius_foreground)<๏ฝfimโhole๏ฝ>
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
}
fn mouse_pressed(_app: &App, model: &mut Model, _button: MouseButton) {
model.act_random_seed = (random_f32() * 100000.0) as u64;
}
fn key_pressed(app: &App, _model: &mut Model, key: Key) {
if key == Key::S {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
fn key_released(_app: &App, model: &mut Model, key: Key) {
match key {
Key::Key1 => {
if model
.module_color_background
.eq(&hsva(0.0, 0.0, 0.0, model.module_alpha_background))
{
model.module_color_background =
hsva(0.758, 0.73, 0.51, model.module_alpha_background);
} else {
model.module_color_background = hsva(0.0, 0.0, 0.0, model.module_alpha_background);
}
}
Key::Key2 => {
if model
.module_color_foreground
.eq(&hsva(1.0, 1.0, 1.0, model.module_alpha_foreground))
{
model.module_color_foreground =
hsva(0.89, 1.0, 0.77, model.module_alpha_foreground);
} else {
model.module_color_foreground = hsva(1.0, 1.0, 1.0, model.module_alpha_foreground);
}
}
Key::Key3 => {
if model.module_alpha_background == 1.0 {
model.module_alpha_background = 0.5;
model.module_alpha_foreground = 0.5;
} else {
model.module_alpha_background = 1.0;
model.module_alpha_foreground = 1.0;
}
model.module_color_background.alpha = model.module_alpha_background;
model.module_color_foreground.alpha = model.module_alpha_foreground;
}
Key::Key0 => {
model.module_radius_background = 15.0;
model.module_radius_foreground = 7.5;
model.module_alpha_background = 1.0;
model.module_alpha_foreground = 1.0;
model.module_color_background = hsva(0.0, 0.0, 0.0, model.module_alpha_background);
model.module_color_foreground = hsva(0.0, 0.0, 1.0, model.module_alpha_foreground);
}
Key::Up => {
model.module_radius_background += 2.0;
}
Key::Down => {
model.module_radius_background = 5.0.max(model.module_radius_background - 2.0);
}
Key::Left => {
model.module_radius_foreground = 2.5.max(model.module_radius_foreground - 2.0);
}
Key::Right => {
model.module_radius_foreground += 2.0;
}
_other_key => {}
}
}<๏ฝfimโend๏ฝ> | .color(model.module_color_foreground);
}
} |
<|file_name|>tn_local_check.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/env python
""" This does mostly the same as the review token commit hook,
but is designed to run locally.
It will echo to standard out a fixed up version of the<๏ฝfimโhole๏ฝ> found (some of which will have been corrected in the
echoed output).
The basic idea is thus:
$ tn_local_check.py <token>.tn > foo
Check if you're happy with it all and then:
$ mv foo <token>.tn
"""
import sys
import os
from tn_lib import parse_tn, write_tn
# Deal with a bad command line
if len(sys.argv) != 2:
print >> sys.stderr, "You will need to specify a file to parse."
sys.exit(1)
# Parse TN
tmp = parse_tn(os.path.basename(sys.argv[1]), open(sys.argv[1]).read())
# Print out corrected TN
print write_tn(tmp).rstrip()
# Report on any errors
if len(tmp["errors"]):
print >> sys.stderr, "-" * 80
print >> sys.stderr, "The review token %s contains "\
"the following errors:" % tmp["ticket"]
for e in tmp["errors"]:
print >> sys.stderr, " - %s" % e<๏ฝfimโend๏ฝ> | review token given to it; this can be used to quickly
apply any format changes to a token (such as new fields).
It will then echo to standard error a list of problems |
<|file_name|>form_fields.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import json
from decimal import Decimal
from django import forms
class MoneyField(forms.DecimalField):
def __init__(self, **kwargs):
kwargs["decimal_places"] = 2
for f in ["min_value", "max_value"]:
if f in kwargs:
kwargs[f] = Decimal(kwargs[f]) / 100
super().__init__(**kwargs)
def prepare_value(self, value):
if isinstance(value, int):
return Decimal(value) / 100
return value
def clean(self, value):
value = super().clean(value)
return value and int(value * 100)
class AskAmountField(forms.DecimalField):
def __init__(
self, *, amount_choices=None, show_tax_credit=True, by_month=False, **kwargs
):
self.show_tax_credit = show_tax_credit
self.by_month = by_month
self._amount_choices = amount_choices
super().__init__(**kwargs)
if self.min_value is not None:
self.widget.attrs.setdefault(
"data-min-amount-error", self.error_messages["min_value"]
)
if self.max_value is not None:
self.widget.attrs.setdefault(
"data-max-amount-error", self.error_messages["max_value"]
)
self.widget.attrs.setdefault("data-by-month", self.by_month)
@property
def amount_choices(self):
return self._amount_choices
@amount_choices.setter
def amount_choices(self, amount_choices):
self._amount_choices = amount_choices
if self.widget:
self.widget.attrs["data-amount-choices"] = json.dumps(self._amount_choices)
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)<๏ฝfimโhole๏ฝ> if not self.show_tax_credit:
attrs.setdefault("data-hide-tax-credit", "Y")
if self.amount_choices is not None:
attrs.setdefault("data-amount-choices", json.dumps(self.amount_choices))
return attrs<๏ฝfimโend๏ฝ> | |
<|file_name|>complicajda.py<|end_file_name|><๏ฝfimโbegin๏ฝ># 1. del: funkcije
#gender: female = 2, male = 0
def calculate_score_for_gender(gender):
if gender == "male":
return 0
else: return 2
#age: 0-100 if age < 10 --> 0, 11 < age < 20 --> 5, 21 < age < 35 --> 2, 36 < age < 50 --> 4, 50+ --> 1
def calculate_score_for_age(age):
if (age > 11 and age <= 20) or (age > 36 and age <= 50):
return 5
elif age > 20 and age <= 35:
return 2
elif age < 10:
return 0
else:
return 1
#status: 0 = single, 1 = relationship, 2 = in open relationship, 3 = it's complicated, 4 = I'm a pizza, 5 = depends who's asking
def calculate_score_for_status(status):
if status == "single":
return 0
elif status == "in a relationship":
return 1
elif status == "in an open relationship":
return 2
elif status == "it's complicated":
return 3
elif status == "I'm a pizza":
return 0
else:
return 5
# ignorance: 0 = Problem is my challenge, 1 = Who gives a fuck, 2 = I'm an angel
def calculate_score_for_ignorance(ignorance):
if ignorance == "Ignorance is bliss":
return 0
elif ignorance == "not at all":
return 2
elif ignorance == "I'm an angel":
return 4
# money_have: -10000+ = 6, (-10000)-(-5000) = 5, -5000-0 = 4, 0-500 = 3, 500-3000 = 2, 3000-10000 = 1, 10000+ = 0
def calculate_score_for_money_have(money_have):
if money_have <= (-10000.0):
return 8.0
elif money_have > (-10000.0) and money_have <= (-5000.0):
return 5.0
elif money_have > (-5000.0) and money_have <= 0.0:
return 4.0
elif money_have > 0.0 and money_have <= 500.0:
return 3.0
elif money_have > 500.0 and money_have <= 3000.0:
return 2.0
else:
return 0.0
# ---ZAKAJ MI NE PREPOZNA POZITIVNIH FLOATING NUMBERS IN NOBENE NEGATIVE (INTEGER ALI FLOATING NEGATIVNE) KOT STEVILKO?
# -->PRED RAW INPUT MORAS DAT FLOAT, CE NI CELA STEVILKA IN ODSTRANI .ISDIGIT, KER .ISDIGIT JE LE ZA CELE STEVILKE!
# money_want: 0 = 0, 0-1000 = 1, 1000-5000 = 3, 5000-10000 = 4, 10000+ = 5
def caluculate_score_for_money_want(money_want):
if money_want == 0:
return 0
elif money_want > 0.0 and money_want <= 1000.0:
return 1
elif money_want > 1000.0 and money_want <= 5000.0:
return 3
elif money_want > 5000.0 and money_want <= 10000.0:
return 4
else:
return 5
#real friends: 0 = 5, 1-3 = 1, 4-6 = 2, 7-9 = 3, 10+ = 4
def calculate_score_for_rl_friends(rl_friends):
if rl_friends == 0:
return 5
elif rl_friends >= 1 and rl_friends <= 3:
return 1
elif rl_friends >= 4 and rl_friends <= 6:
return 2
elif rl_friends >= 7 and rl_friends <= 9:
return 3
else:
return 4
#children: 0 = 1, 1-2 = 2, 3 = 3, 4 = 4, 5+ = 5
def calculate_score_for_children(children):
if children == 0:
return 1
elif children == 1 and children == 2:
return 2
elif children == 3:
return 3
elif children == 4:
return 4
else:
return 5
<๏ฝfimโhole๏ฝ>def calculate_score(gender, age, status, ignorance, money_have, money_want, rl_friends, children):
result = calculate_score_for_gender(gender)
result += calculate_score_for_age(age)
result += calculate_score_for_status(status)
result += calculate_score_for_ignorance(ignorance)
result += calculate_score_for_money_have(money_have)
result += caluculate_score_for_money_want(money_want)
result += calculate_score_for_rl_friends(rl_friends)
result += calculate_score_for_children(children)
return result
# 3. del: ------------- output za userja
#gender
print "Are you male or female?"
gender = raw_input(">> ")
#note to self: "while" pomeni da cekira na loop, "if" cekira enkratno
while (gender != "male") and (gender != "female"):
gender = raw_input("Check your gender again: ")
#age
print "How old are you?"
age = raw_input(">> ")
while not age.isdigit():
age = raw_input("Admit it, you're old. Now write your real age: ")
#status
print "What is your marital status?"
status = raw_input(">> ")
while (status != "single") and (status != "in a relationship") and (status != "in an open relationship") and (status != "it's complicated") and (status != "I'm a pizza"):
status = raw_input("Yeah, right... Think again: ")
#ignorance
print "How ignorant are you?"
ignorance = raw_input(">> ")
while (ignorance != "problem is my challenge") and (ignorance != "who gives a fuck") and (ignorance != "I'm an angel"):
ignorance = raw_input("You can't be that ignorant. Try again: ")
#money_have
print "How much money have you got?"
money_have = float(raw_input(">> "))
while not money_have:
money_have = float(raw_input("We aren't tax collectors, so be honest: "))
# PRED RAW INPUT MORAS DAT FLOAT, CE NI CELA STEVILKA IN ODSTRANI .ISDIGIT, KER .ISDIGIT JE LE ZA CELE STEVILKE!
#money_want
print "In addition to the money you've got, how much money do you want to have?"
money_want = float(raw_input(">> "))
while money_want < 0: #---->zato, da je pozitivno stevilo!
money_want = float(raw_input("I didn't ask for apples and peaches. So, how much money do you want? "))
#rl_friends
print "How many real friends have you got?"
rl_friends = raw_input(">> ")
while not rl_friends.isdigit():
rl_friends = raw_input("Spock doesn't count. Think again - how many? ")
#children
print "How many children have you got?"
children = raw_input(">> ")
while not children.isdigit():
children = raw_input("No aliens, just humans, please: ")
# 4.del: sestevek
print "On a scale from 0 to 40, your life complication is : ", calculate_score(gender, int(age), status, ignorance, money_have, money_want, rl_friends, children)<๏ฝfimโend๏ฝ> |
# 2. del: sestevek funkcij
|
<|file_name|>app_softhangup.py<|end_file_name|><๏ฝfimโbegin๏ฝ># AsteriskLint -- an Asterisk PBX config syntax checker
# Copyright (C) 2019 Walter Doekes, OSSO B.V.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ..base import AppBase
class SoftHangup(AppBase):<๏ฝfimโhole๏ฝ>
def register(app_loader):
app_loader.register(SoftHangup())<๏ฝfimโend๏ฝ> | pass |
<|file_name|>SqlSessionTemplate.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright 2010-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.spring;
import static java.lang.reflect.Proxy.newProxyInstance;
import static org.apache.ibatis.reflection.ExceptionUtil.unwrapThrowable;
import static org.mybatis.spring.SqlSessionUtils.closeSqlSession;
import static org.mybatis.spring.SqlSessionUtils.getSqlSession;
import static org.mybatis.spring.SqlSessionUtils.isSqlSessionTransactional;<๏ฝfimโhole๏ฝ>import java.sql.Connection;
import java.util.List;
import java.util.Map;
import org.apache.ibatis.cursor.Cursor;
import org.apache.ibatis.exceptions.PersistenceException;
import org.apache.ibatis.executor.BatchResult;
import org.apache.ibatis.session.Configuration;
import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.ResultHandler;
import org.apache.ibatis.session.RowBounds;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.dao.support.PersistenceExceptionTranslator;
/**
* Thread safe, Spring managed, {@code SqlSession} that works with Spring
* transaction management to ensure that that the actual SqlSession used is the
* one associated with the current Spring transaction. In addition, it manages
* the session life-cycle, including closing, committing or rolling back the
* session as necessary based on the Spring transaction configuration.
* <p>
* The template needs a SqlSessionFactory to create SqlSessions, passed as a
* constructor argument. It also can be constructed indicating the executor type
* to be used, if not, the default executor type, defined in the session factory
* will be used.
* <p>
* This template converts MyBatis PersistenceExceptions into unchecked
* DataAccessExceptions, using, by default, a {@code MyBatisExceptionTranslator}.
* <p>
* Because SqlSessionTemplate is thread safe, a single instance can be shared
* by all DAOs; there should also be a small memory savings by doing this. This
* pattern can be used in Spring configuration files as follows:
*
* <pre class="code">
* {@code
* <bean id="sqlSessionTemplate" class="org.mybatis.spring.SqlSessionTemplate">
* <constructor-arg ref="sqlSessionFactory" />
* </bean>
* }
* </pre>
*
* @author Putthibong Boonbong
* @author Hunter Presnall
* @author Eduardo Macarron
*
* @see SqlSessionFactory
* @see MyBatisExceptionTranslator
* @version $Id$
*/
public class SqlSessionTemplate implements SqlSession, DisposableBean {
private final SqlSessionFactory sqlSessionFactory;
private final ExecutorType executorType;
private final SqlSession sqlSessionProxy;
private final PersistenceExceptionTranslator exceptionTranslator;
/**
* Constructs a Spring managed SqlSession with the {@code SqlSessionFactory}
* provided as an argument.
*
* @param sqlSessionFactory
*/
public SqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {
this(sqlSessionFactory, sqlSessionFactory.getConfiguration().getDefaultExecutorType());
}
/**
* Constructs a Spring managed SqlSession with the {@code SqlSessionFactory}
* provided as an argument and the given {@code ExecutorType}
* {@code ExecutorType} cannot be changed once the {@code SqlSessionTemplate}
* is constructed.
*
* @param sqlSessionFactory
* @param executorType
*/
public SqlSessionTemplate(SqlSessionFactory sqlSessionFactory, ExecutorType executorType) {
this(sqlSessionFactory, executorType,
new MyBatisExceptionTranslator(
sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(), true));
}
/**
* Constructs a Spring managed {@code SqlSession} with the given
* {@code SqlSessionFactory} and {@code ExecutorType}.
* A custom {@code SQLExceptionTranslator} can be provided as an
* argument so any {@code PersistenceException} thrown by MyBatis
* can be custom translated to a {@code RuntimeException}
* The {@code SQLExceptionTranslator} can also be null and thus no
* exception translation will be done and MyBatis exceptions will be
* thrown
*
* @param sqlSessionFactory
* @param executorType
* @param exceptionTranslator
*/
public SqlSessionTemplate(SqlSessionFactory sqlSessionFactory, ExecutorType executorType,
PersistenceExceptionTranslator exceptionTranslator) {
notNull(sqlSessionFactory, "Property 'sqlSessionFactory' is required");
notNull(executorType, "Property 'executorType' is required");
this.sqlSessionFactory = sqlSessionFactory;
this.executorType = executorType;
this.exceptionTranslator = exceptionTranslator;
this.sqlSessionProxy = (SqlSession) newProxyInstance(
SqlSessionFactory.class.getClassLoader(),
new Class[] { SqlSession.class },
new SqlSessionInterceptor());
}
public SqlSessionFactory getSqlSessionFactory() {
return this.sqlSessionFactory;
}
public ExecutorType getExecutorType() {
return this.executorType;
}
public PersistenceExceptionTranslator getPersistenceExceptionTranslator() {
return this.exceptionTranslator;
}
/**
* {@inheritDoc}
*/
@Override
public <T> T selectOne(String statement) {
return this.sqlSessionProxy.<T> selectOne(statement);
}
/**
* {@inheritDoc}
*/
@Override
public <T> T selectOne(String statement, Object parameter) {
return this.sqlSessionProxy.<T> selectOne(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public <K, V> Map<K, V> selectMap(String statement, String mapKey) {
return this.sqlSessionProxy.<K, V> selectMap(statement, mapKey);
}
/**
* {@inheritDoc}
*/
@Override
public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey) {
return this.sqlSessionProxy.<K, V> selectMap(statement, parameter, mapKey);
}
/**
* {@inheritDoc}
*/
@Override
public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey, RowBounds rowBounds) {
return this.sqlSessionProxy.<K, V> selectMap(statement, parameter, mapKey, rowBounds);
}
/**
* {@inheritDoc}
*/
@Override
public <T> Cursor<T> selectCursor(String statement) {
return this.sqlSessionProxy.selectCursor(statement);
}
/**
* {@inheritDoc}
*/
@Override
public <T> Cursor<T> selectCursor(String statement, Object parameter) {
return this.sqlSessionProxy.selectCursor(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public <T> Cursor<T> selectCursor(String statement, Object parameter, RowBounds rowBounds) {
return this.sqlSessionProxy.selectCursor(statement, parameter, rowBounds);
}
/**
* {@inheritDoc}
*/
@Override
public <E> List<E> selectList(String statement) {
return this.sqlSessionProxy.<E> selectList(statement);
}
/**
* {@inheritDoc}
*/
@Override
public <E> List<E> selectList(String statement, Object parameter) {
return this.sqlSessionProxy.<E> selectList(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public <E> List<E> selectList(String statement, Object parameter, RowBounds rowBounds) {
return this.sqlSessionProxy.<E> selectList(statement, parameter, rowBounds);
}
/**
* {@inheritDoc}
*/
@Override
public void select(String statement, ResultHandler handler) {
this.sqlSessionProxy.select(statement, handler);
}
/**
* {@inheritDoc}
*/
@Override
public void select(String statement, Object parameter, ResultHandler handler) {
this.sqlSessionProxy.select(statement, parameter, handler);
}
/**
* {@inheritDoc}
*/
@Override
public void select(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler) {
this.sqlSessionProxy.select(statement, parameter, rowBounds, handler);
}
/**
* {@inheritDoc}
*/
@Override
public int insert(String statement) {
return this.sqlSessionProxy.insert(statement);
}
/**
* {@inheritDoc}
*/
@Override
public int insert(String statement, Object parameter) {
return this.sqlSessionProxy.insert(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public int update(String statement) {
return this.sqlSessionProxy.update(statement);
}
/**
* {@inheritDoc}
*/
@Override
public int update(String statement, Object parameter) {
return this.sqlSessionProxy.update(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public int delete(String statement) {
return this.sqlSessionProxy.delete(statement);
}
/**
* {@inheritDoc}
*/
@Override
public int delete(String statement, Object parameter) {
return this.sqlSessionProxy.delete(statement, parameter);
}
/**
* {@inheritDoc}
*/
@Override
public <T> T getMapper(Class<T> type) {
return getConfiguration().getMapper(type, this);
}
/**
* {@inheritDoc}
*/
@Override
public void commit() {
throw new UnsupportedOperationException("Manual commit is not allowed over a Spring managed SqlSession");
}
/**
* {@inheritDoc}
*/
@Override
public void commit(boolean force) {
throw new UnsupportedOperationException("Manual commit is not allowed over a Spring managed SqlSession");
}
/**
* {@inheritDoc}
*/
@Override
public void rollback() {
throw new UnsupportedOperationException("Manual rollback is not allowed over a Spring managed SqlSession");
}
/**
* {@inheritDoc}
*/
@Override
public void rollback(boolean force) {
throw new UnsupportedOperationException("Manual rollback is not allowed over a Spring managed SqlSession");
}
/**
* {@inheritDoc}
*/
@Override
public void close() {
throw new UnsupportedOperationException("Manual close is not allowed over a Spring managed SqlSession");
}
/**
* {@inheritDoc}
*/
@Override
public void clearCache() {
this.sqlSessionProxy.clearCache();
}
/**
* {@inheritDoc}
*
*/
@Override
public Configuration getConfiguration() {
return this.sqlSessionFactory.getConfiguration();
}
/**
* {@inheritDoc}
*/
@Override
public Connection getConnection() {
return this.sqlSessionProxy.getConnection();
}
/**
* {@inheritDoc}
*
* @since 1.0.2
*
*/
@Override
public List<BatchResult> flushStatements() {
return this.sqlSessionProxy.flushStatements();
}
/**
* Allow gently dispose bean:
* <pre>
* {@code
*
* <bean id="sqlSession" class="org.mybatis.spring.SqlSessionTemplate">
* <constructor-arg index="0" ref="sqlSessionFactory" />
* </bean>
* }
*</pre>
*
* The implementation of {@link DisposableBean} forces spring context to use {@link DisposableBean#destroy()} method instead of {@link SqlSessionTemplate#close()} to shutdown gently.
*
* @see SqlSessionTemplate#close()
* @see org.springframework.beans.factory.support.DisposableBeanAdapter#inferDestroyMethodIfNecessary
* @see org.springframework.beans.factory.support.DisposableBeanAdapter#CLOSE_METHOD_NAME
*/
@Override
public void destroy() throws Exception {
//This method forces spring disposer to avoid call of SqlSessionTemplate.close() which gives UnsupportedOperationException
}
/**
* Proxy needed to route MyBatis method calls to the proper SqlSession got
* from Spring's Transaction Manager
* It also unwraps exceptions thrown by {@code Method#invoke(Object, Object...)} to
* pass a {@code PersistenceException} to the {@code PersistenceExceptionTranslator}.
*/
private class SqlSessionInterceptor implements InvocationHandler {
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
SqlSession sqlSession = getSqlSession(
SqlSessionTemplate.this.sqlSessionFactory,
SqlSessionTemplate.this.executorType,
SqlSessionTemplate.this.exceptionTranslator);
try {
Object result = method.invoke(sqlSession, args);
if (!isSqlSessionTransactional(sqlSession, SqlSessionTemplate.this.sqlSessionFactory)) {
// force commit even on non-dirty sessions because some databases require
// a commit/rollback before calling close()
sqlSession.commit(true);
}
return result;
} catch (Throwable t) {
Throwable unwrapped = unwrapThrowable(t);
if (SqlSessionTemplate.this.exceptionTranslator != null && unwrapped instanceof PersistenceException) {
// release the connection to avoid a deadlock if the translator is no loaded. See issue #22
closeSqlSession(sqlSession, SqlSessionTemplate.this.sqlSessionFactory);
sqlSession = null;
Throwable translated = SqlSessionTemplate.this.exceptionTranslator.translateExceptionIfPossible((PersistenceException) unwrapped);
if (translated != null) {
unwrapped = translated;
}
}
throw unwrapped;
} finally {
if (sqlSession != null) {
closeSqlSession(sqlSession, SqlSessionTemplate.this.sqlSessionFactory);
}
}
}
}
}<๏ฝfimโend๏ฝ> | import static org.springframework.util.Assert.notNull;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; |
<|file_name|>test_parser.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import unittest
from ctauto.exceptions import CTAutoMissingEndOfMetablockError, \
CTAutoBrokenEndOfMetablockError, \
CTAutoInvalidMetablockError, \
CTAutoInvalidIdError, \
CTAutoMissingEndOfStringError, \
CTAutoInvalidStringError, \
CTAutoIncompleteEscapeSequence, \
CTAutoInvalidEscapeSequence, \
CTAutoTrailingCharacterAfterQuotedText, \
CTAutoInvalidNumberError
from ctauto.blocks import Block, MetaBlock
from ctauto.tokens import SimpleTextToken, QuotedTextToken, NumericToken, \
DotToken, LeftSquareBracketToken, RightSquareBracketToken
from ctauto.parser import EndOfFileCharacter, Parser, TemplateParser
_TEST_CONTENT = "<% metacode 1 %>\n" \
"#include <stdio.h>\n" \
"\n" \
"int main(void)\n" \
"{\n" \
" <% metacode 2 %>\n" \
" // <% metacode 3 %>\n" \
" return 0;\n" \
" <% metacode 4 . [ 1 ] %>\n" \
"}\n"
class TestParser(unittest.TestCase):
def test_parse(self):
class TestParser(Parser):
def reset(self, content, source):
self.source = source
self.content = content
self.indexes = []
self.characters = []
return self.first
def finalize(self):
return self.indexes, self.characters
def first(self, index, character):
self.indexes.append(index)
self.characters.append(character)
return self.second
def second(self, index, character):
self.indexes.append(index)
self.characters.append(character)
return self.third
def third(self, index, character):
if character is EndOfFileCharacter:
self.indexes.append(index)
self.characters.append(character)
return
self.indexes[-1] = index
self.characters[-1] = character
return self.third
parser = TestParser()
indexes, characters = parser.parse(_TEST_CONTENT, "test")
self.assertEqual(parser.source, "test")
self.assertEqual(parser.content, _TEST_CONTENT)
length = len(_TEST_CONTENT)
self.assertEqual(indexes, [0, length-1, length])
self.assertEqual(characters, ['<', '\n', EndOfFileCharacter])
class TestTemplateParser(unittest.TestCase):
def test_template_parse(self):
parser = TemplateParser()
blocks = parser.parse(_TEST_CONTENT, "test")
self.assertEqual(parser.source, "test")
self.assertEqual(parser.content, _TEST_CONTENT)
self.assertEqual(len(blocks), 8)
block = blocks[0]
self.assertIsInstance(block, MetaBlock)
self.assertEqual(block.content, " metacode 1 ")
self.assertEqual(block.tokens,
[SimpleTextToken(1, "metacode"),
NumericToken(1, "1")])
block = blocks[1]
self.assertIsInstance(block, Block)
self.assertEqual(block.content, "\n"
"#include <stdio.h>\n"
"\n"
"int main(void)\n"
"{\n"
" ")
block = blocks[2]
self.assertIsInstance(block, MetaBlock)
self.assertEqual(block.content, " metacode 2 ")
self.assertEqual(block.tokens,
[SimpleTextToken(6, "metacode"),
NumericToken(6, "2")])
block = blocks[3]
self.assertIsInstance(block, Block)
self.assertEqual(block.content, "\n"
" // ")
block = blocks[4]
self.assertIsInstance(block, MetaBlock)
self.assertEqual(block.content, " metacode 3 ")
self.assertEqual(block.tokens,
[SimpleTextToken(7, "metacode"),
NumericToken(7, "3")])
block = blocks[5]<๏ฝfimโhole๏ฝ> self.assertIsInstance(block, Block)
self.assertEqual(block.content, "\n"
" return 0;\n"
" ")
block = blocks[6]
self.assertIsInstance(block, MetaBlock)
self.assertEqual(block.content, " metacode 4 . [ 1 ] ")
self.assertEqual(block.tokens,
[SimpleTextToken(9, "metacode"),
NumericToken(9, "4"),
DotToken(9),
LeftSquareBracketToken(9),
NumericToken(9, "1"),
RightSquareBracketToken(9)])
block = blocks[7]
self.assertIsInstance(block, Block)
self.assertEqual(block.content, "\n"
"}\n")
def test_invalid_ends_of_metablock(self):
parser = TemplateParser()
with self.assertRaises(CTAutoMissingEndOfMetablockError):
parser.parse("<% %", "test")
with self.assertRaises(CTAutoBrokenEndOfMetablockError):
parser.parse("<% %!", "test")
def test_invalid_metablock(self):
parser = TemplateParser()
with self.assertRaises(CTAutoInvalidMetablockError):
parser.parse("<% ! %>", "test")
def test_end_of_metablock_while_skipping_whitespaces(self):
parser = TemplateParser()
with self.assertRaises(CTAutoMissingEndOfMetablockError):
parser.parse(" <% ", "test")
def test_multiline_metablock(self):
parser = TemplateParser()
blocks = parser.parse("<%\tx\n\ty\n\tz\n\tt%>", "test")
self.assertEqual(blocks[0].tokens,
[SimpleTextToken(1, "x"),
SimpleTextToken(2, "y"),
SimpleTextToken(3, "z"),
SimpleTextToken(4, "t")])
def test_simple_text_token(self):
parser = TemplateParser()
blocks = parser.parse("<%test%>", "test")
self.assertEqual(blocks[0].tokens, [SimpleTextToken(1, "test")])
blocks = parser.parse("<% test %>", "test")
self.assertEqual(blocks[0].tokens, [SimpleTextToken(1, "test")])
with self.assertRaises(CTAutoMissingEndOfMetablockError):
parser.parse("<%s test", "test")
with self.assertRaises(CTAutoInvalidIdError):
parser.parse("<%s test! %>", "test")
def test_quoted_text_token(self):
parser = TemplateParser()
blocks = parser.parse("<%\"test\"%>", "test")
self.assertEqual(blocks[0].tokens, [QuotedTextToken(1, "test")])
blocks = parser.parse("<% \"test \\\\ \\\"test\\\" \\n \\t \\r \\a\" %>", "test")
self.assertEqual(blocks[0].tokens, [QuotedTextToken(1, "test \\ \"test\" \n \t \r \\a")])
with self.assertRaises(CTAutoMissingEndOfStringError):
parser.parse("<%\"test%>", "test")
with self.assertRaises(CTAutoInvalidStringError):
parser.parse("<%\"test\n%>", "test")
with self.assertRaises(CTAutoIncompleteEscapeSequence):
parser.parse("<% \"test \\", "test")
with self.assertRaises(CTAutoInvalidEscapeSequence):
parser.parse("<% \"test \\\n test\" %>", "test")
with self.assertRaises(CTAutoMissingEndOfMetablockError):
parser.parse("<% \"test\"", "test")
with self.assertRaises(CTAutoTrailingCharacterAfterQuotedText):
parser.parse("<% \"test\"test %>", "test")
def test_numeric_token(self):
parser = TemplateParser()
blocks = parser.parse("<% 1234567890 %>", "test")
self.assertEqual(blocks[0].tokens, [NumericToken(1, "1234567890")])
blocks = parser.parse("<%1234567890%>", "test")
self.assertEqual(blocks[0].tokens, [NumericToken(1, "1234567890")])
with self.assertRaises(CTAutoMissingEndOfMetablockError):
parser.parse("<%1234567890", "test")
with self.assertRaises(CTAutoInvalidNumberError):
parser.parse("<% 1234567890test %>", "test")
def test_simple_token_as_terminator(self):
parser = TemplateParser()
blocks = parser.parse("<% test.test %>", "test")
self.assertEqual(blocks[0].tokens,
[SimpleTextToken(1, "test"),
DotToken(1),
SimpleTextToken(1, "test")])
blocks = parser.parse("<% 1234567890[test %>", "test")
self.assertEqual(blocks[0].tokens,
[NumericToken(1, "1234567890"),
LeftSquareBracketToken(1),
SimpleTextToken(1, "test")])
blocks = parser.parse("<% \"test\"]test %>", "test")
self.assertEqual(blocks[0].tokens,
[QuotedTextToken(1, "test"),
RightSquareBracketToken(1),
SimpleTextToken(1, "test")])
test_suite = unittest.TestSuite([unittest.defaultTestLoader.loadTestsFromTestCase(TestParser),
unittest.defaultTestLoader.loadTestsFromTestCase(TestTemplateParser)])
if __name__ == '__main__':
unittest.main()<๏ฝfimโend๏ฝ> | |
<|file_name|>webintent.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* cordova Web Intent plugin
* Copyright (c) Boris Smus 2010
*
*/
var WebIntent = function() {
};
WebIntent.prototype.ACTION_SEND = "android.intent.action.SEND";
WebIntent.prototype.ACTION_VIEW= "android.intent.action.VIEW";
WebIntent.prototype.EXTRA_TEXT = "android.intent.extra.TEXT";
WebIntent.prototype.EXTRA_SUBJECT = "android.intent.extra.SUBJECT";
WebIntent.prototype.EXTRA_STREAM = "android.intent.extra.STREAM";
WebIntent.prototype.EXTRA_EMAIL = "android.intent.extra.EMAIL";
WebIntent.prototype.startActivity = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'startActivity', [params]);
};
WebIntent.prototype.hasExtra = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'hasExtra', [params]);
};
WebIntent.prototype.saveImage = function(b64String, params, win, fail) {
return cordova.exec(win, fail, "WebIntent", "saveImage", [b64String, params]);
};
WebIntent.prototype.createAccount = function(params, win, fail) {
return cordova.exec(win, fail, "WebIntent", "createAccount", [params]);
};
WebIntent.prototype.getUri = function(success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'getUri', []);
};
WebIntent.prototype.getExtra = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'getExtra', [params]);
};
WebIntent.prototype.getAccounts = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'getAccounts', [params]);
};
WebIntent.prototype.clearCookies = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'clearCookies', [params]);
};
WebIntent.prototype.onNewIntent = function(callback) {
return cordova.exec(function(args) {
callback(args);
}, function(args) {
}, 'WebIntent', 'onNewIntent', []);
};
WebIntent.prototype.sendBroadcast = function(params, success, fail) {
return cordova.exec(function(args) {
success(args);
}, function(args) {
fail(args);
}, 'WebIntent', 'sendBroadcast', [params]);
};<๏ฝfimโhole๏ฝ>
// backwards compatibility
window.plugins = window.plugins || {};
window.plugins.webintent = window.webintent;
});<๏ฝfimโend๏ฝ> |
cordova.addConstructor(function() {
window.webintent = new WebIntent(); |
<|file_name|>items.js<|end_file_name|><๏ฝfimโbegin๏ฝ>function Item(pl, st, pr, n) {
this._place = pl;
this._stats = st;
this._price = pr;
this._name = n;
}
ItemManager = {
//TODO: spawn items that aren't equipment
_qualities: [
//Bad
{ name: "Rotten", p: 0.3, minlevel: 0, maxlevel: 2 },
{ name: "Cracked", p: 0.7, minlevel: 0, maxlevel: 5 },
{ name: "Old", p: 0.9, minlevel: 0, maxlevel: 10 },
//Good
{ name: "Superior", p: 1.3, minlevel: 8, maxlevel: 100 },
{ name: "Master", p: 1.7, minlevel: 18, maxlevel: 100 },
{ name: "Legendary", p: 2.4, minlevel: 25, maxlevel: 100 }
],
_bases: [
<๏ฝfimโhole๏ฝ> { name: "Pendant", stat: 0, min: 0, max: 0, level: 10, price: 25 },
{ name: "Necklace", stat: 0, min: 0, max: 0, level: 25, price: 50 },
{ name: "Chain", stat: 0, min: 0, max: 0, level: 50, price: 100 } ],
[ { name: "Helmet", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Headgear", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Hat", stat: 5, min: 5, max: 20, level: 30, price: 25 },
{ name: "Skullcap", stat: 5, min: 18, max: 26, level: 60, price: 100 } ],
[ { name: "Ring", stat: 0, min: 0, max: 0, level: 0, price: 15 },
{ name: "Loop", stat: 0, min: 0, max: 0, level: 10, price: 25 },
{ name: "Band", stat: 0, min: 0, max: 0, level: 25, price: 50 } ],
[ { name: "Ring", stat: 0, min: 0, max: 0, level: 0, price: 15 },
{ name: "Loop", stat: 0, min: 0, max: 0, level: 10, price: 25 },
{ name: "Band", stat: 0, min: 0, max: 0, level: 25, price: 50 } ],
[ { name: "Pauldrons", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Spaulders", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Shoulder Pads", stat: 5, min: 5, max: 20, level: 30, price: 25 } ],
[ { name: "Cuirass", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Breastplate", stat: 5, min: 1, max: 5, level: 2, price: 5 },
{ name: "Plate", stat: 5, min: 4, max: 8, level: 5, price: 12 },
{ name: "Armour", stat: 5, min: 5, max: 12, level: 10, price: 17 },
{ name: "Vest", stat: 5, min: 5, max: 20, level: 30, price: 25 } ],
[ { name: "Gloves", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Mittens", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Bracers", stat: 5, min: 5, max: 20, level: 30, price: 25 } ],
[ { name: "Belt", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Girdle", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Cincture", stat: 5, min: 5, max: 20, level: 30, price: 25 },
{ name: "Waistband", stat: 5, min: 18, max: 26, level: 60, price: 100 } ],
[ { name: "Pants", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Leggings", stat: 5, min: 1, max: 10, level: 10, price: 10 } ],
[ { name: "Boots", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Greaves", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Shoes", stat: 5, min: 5, max: 20, level: 30, price: 25 } ],
[ { name: "Sword", stat: 6, min: 1, max: 2, level: 0, price: 1 },
{ name: "Axe", stat: 6, min: 1, max: 5, level: 2, price: 5 },
{ name: "Bastard Sword", stat: 6, min: 4, max: 8, level: 5, price: 12 },
{ name: "Dagger", stat: 6, min: 5, max: 12, level: 7, price: 14 },
{ name: "Club", stat: 6, min: 7, max: 15, level: 8, price: 17 },
{ name: "Mace", stat: 6, min: 9, max: 16, level: 10, price: 18 },
{ name: "Staff", stat: 6, min: 11, max: 18, level: 15, price: 20 },
{ name: "Falchion", stat: 6, min: 15, max: 20, level: 30, price: 35 } ],
[ { name: "Shield", stat: 5, min: 1, max: 2, level: 0, price: 1 },
{ name: "Aegis", stat: 5, min: 1, max: 10, level: 10, price: 10 },
{ name: "Bulwark", stat: 5, min: 5, max: 20, level: 30, price: 25 },
{ name: "Buckler", stat: 5, min: 18, max: 26, level: 60, price: 100 } ]
],
//TODO: Need a fn which input variables and it spits out a valid item (location, level, source(monster, chest)...
//TODO: Resistance items
_prefices: [
//DEF prefices
{ name: "Lined", stat: 5, min: 1, max: 4, level: 0, price: 2 },
{ name: "Reinforced", stat: 5, min: 3, max: 7, level: 10, price: 10 },
{ name: "Plated", stat: 5, min: 7, max: 12, level: 25, price: 200 },
//DMG prefices
{ name: "Lined", stat: 6, min: 1, max: 4, level: 0, price: 5 },
{ name: "Reinforced", stat: 6, min: 3, max: 7, level: 10, price: 20 },
{ name: "Plated", stat: 6, min: 7, max: 12, level: 25, price: 500 },
//HP prefices
{ name: "Healthy", stat: 3, min: 1, max: 10, level: 0, price: 5 },
{ name: "Enduring", stat: 3, min: 3, max: 17, level: 10, price: 15 },
{ name: "Hearty", stat: 3, min: 20, max: 40, level: 25, price: 750 },
//MP prefices
{ name: "Magic", stat: 4, min: 1, max: 20, level: 0, price: 2 },
{ name: "Wise", stat: 4, min: 5, max: 25, level: 10, price: 12 },
{ name: "Wizard's", stat: 4, min: 22, max: 50, level: 25, price: 300 }
],
_suffices: [
//STR suffices
{ name: "of the Harpy", stat: 0, min: 1, max: 4, level: 0, price: 1 },
{ name: "of the Boar", stat: 0, min: 3, max: 7, level: 10, price: 10 },
{ name: "of the Titan", stat: 0, min: 7, max: 12, level: 25, price: 200 },
//DEX suffices
{ name: "of Budding", stat: 1, min: 1, max: 4, level: 0, price: 1 },
{ name: "of Thorns", stat: 1, min: 3, max: 7, level: 2, price: 10 },
{ name: "of Spikes", stat: 1, min: 5, max: 10, level: 8, price: 200 },
//INT suffices
{ name: "of Dampness", stat: 2, min: 1, max: 4, level: 0, price: 1 },
{ name: "of Moisture", stat: 2, min: 2, max: 7, level: 8, price: 10 },
{ name: "of Juciness", stat: 2, min: 9, max: 21, level: 35, price: 200 },
//Speed suffices
{ name: "of Hitching", stat: 7, min: 1, max: 4, level: 0, price: 1 },
{ name: "of Running", stat: 7, min: 2, max: 7, level: 8, price: 10 },
{ name: "of Riding", stat: 7, min: 9, max: 14, level: 15, price: 100 }
],
getItem: function(m_lvl, c_lvl) {
var st = [0,0,0,0,0,0,0,0,0,0,0,0];
var pr = 0;
var pl = Math.floor(Math.random() * 12 + 1) - 1; //Random item type.
var rand = Math.floor(Math.random() * this._bases[pl].length + 1) - 1;
while(this._bases[rand].level > c_lvl) rand = Math.floor(Math.random() * this._bases.length + 1) - 1;
var b = this._bases[pl][rand].name;
st[this._bases[pl][rand].stat] += Math.floor(Math.random() * (this._bases[pl][rand].max - this._bases[pl][rand].min + 1)) + this._bases[pl][rand].min;
pr += this._bases[pl][rand].price;
rand = Math.floor(Math.random() * this._prefices.length + 1) - 1;
while(this._prefices[rand].level > c_lvl) rand = Math.floor(Math.random() * this._prefices.length + 1) - 1;
var p = this._prefices[rand].name;
st[this._prefices[rand].stat] += Math.floor(Math.random() * (this._prefices[rand].max - this._prefices[rand].min + 1)) + this._prefices[rand].min;
pr += this._prefices[rand].price;
rand = Math.floor(Math.random() * this._suffices.length + 1) - 1;
while(this._suffices[rand].level > c_lvl) rand = Math.floor(Math.random() * this._suffices.length + 1) - 1;
var s = this._suffices[rand].name;
st[this._suffices[rand].stat] += Math.floor(Math.random() * (this._suffices[rand].max - this._suffices[rand].min + 1)) + this._suffices[rand].min;
pr += this._suffices[rand].price;
//TODO: Not always have a quality
rand = Math.floor(Math.random() * this._qualities.length + 1) - 1;
while(this._qualities[rand].minlevel > c_lvl || this._qualities[rand].maxlevel <= c_lvl) rand = Math.floor(Math.random() * this._qualities.length + 1) - 1;
var q = this._qualities[rand].name;
//adjust mods with quality
for(ss in st) {
st[ss] = Math.round(st[ss] * this._qualities[rand].p);
}
pr = Math.round(pr * this._qualities[rand].p);
if(q != "") n = q+" "+p+" "+b+" "+s;
else n = p+" "+b+" "+s;
return new Item(pl, st, pr, n);
},
}<๏ฝfimโend๏ฝ> | //Amulets
[ { name: "Amulet", stat: 0, min: 0, max: 0, level: 0, price: 15 },
|
<|file_name|>multidispatch2.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<๏ฝfimโhole๏ฝ>
use std::fmt::Debug;
use std::default::Default;
trait MyTrait<T> {
fn get(&self) -> T;
}
impl<T> MyTrait<T> for T
where T : Default
{
fn get(&self) -> T {
Default::default()
}
}
#[derive(Copy)]
struct MyType {
dummy: uint
}
impl MyTrait<uint> for MyType {
fn get(&self) -> uint { self.dummy }
}
fn test_eq<T,M>(m: M, v: T)
where T : Eq + Debug,
M : MyTrait<T>
{
assert_eq!(m.get(), v);
}
pub fn main() {
test_eq(22_usize, 0_usize);
let value = MyType { dummy: 256 + 22 };
test_eq(value, value.dummy);
}<๏ฝfimโend๏ฝ> | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. |
<|file_name|>new_v1_formatted.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#![feature(core, collections)]
extern crate core;
extern crate collections;
#[cfg(test)]
mod tests {
use core::fmt::write;
use core::fmt::Result;
use core::fmt::Error;
use core::fmt::Write;
use core::fmt::ArgumentV1;
use core::fmt::Arguments;
use core::fmt::Display;
use collections::vec::Vec;
use core::fmt::rt::v1::Argument;
use core::fmt::rt::v1::Position;
use core::fmt::rt::v1::FormatSpec;
use core::fmt::rt::v1::Alignment;
use core::fmt::rt::v1::Count;
struct A {
buf: Vec<u8>
}
impl A {
fn new() -> A {
A { buf: vec!() }
}
}
impl Write for A {
fn write_str(&mut self, s: &str) -> Result {
for b in s.bytes() {
self.buf.push(b);
}
Ok(())
}
// fn write_char(&mut self, c: char) -> Result {
// let mut utf_8 = [0u8; 4];
// let bytes_written = c.encode_utf8(&mut utf_8).unwrap_or(0);
// self.write_str(unsafe { mem::transmute(&utf_8[..bytes_written]) })
// }
// fn write_fmt(&mut self, args: Arguments) -> Result {
// // This Adapter is needed to allow `self` (of type `&mut
// // Self`) to be cast to a Write (below) without
// // requiring a `Sized` bound.
// struct Adapter<'a,T: ?Sized +'a>(&'a mut T);
//
// impl<'a, T: ?Sized> Write for Adapter<'a, T>
// where T: Write
// {
// fn write_str(&mut self, s: &str) -> Result {
// self.0.write_str(s)
// }
//
// fn write_fmt(&mut self, args: Arguments) -> Result {
// self.0.write_fmt(args)
// }
// }
//
// write(&mut Adapter(self), args)
// }
}
// pub struct Arguments<'a> {
// // Format string pieces to print.
// pieces: &'a [&'a str],
//
// // Placeholder specs, or `None` if all specs are default (as in "{}{}").
// fmt: Option<&'a [rt::v1::Argument]>,
//
// // Dynamic arguments for interpolation, to be interleaved with string
// // pieces. (Every argument is preceded by a string piece.)
// args: &'a [ArgumentV1<'a>],
// }
// impl<'a> Arguments<'a> {
// /// When using the format_args!() macro, this function is used to generate the<๏ฝfimโhole๏ฝ> // pub fn new_v1(pieces: &'a [&'a str],
// args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
// Arguments {
// pieces: pieces,
// fmt: None,
// args: args
// }
// }
//
// /// This function is used to specify nonstandard formatting parameters.
// /// The `pieces` array must be at least as long as `fmt` to construct
// /// a valid Arguments structure. Also, any `Count` within `fmt` that is
// /// `CountIsParam` or `CountIsNextParam` has to point to an argument
// /// created with `argumentusize`. However, failing to do so doesn't cause
// /// unsafety, but will ignore invalid .
// #[doc(hidden)] #[inline]
// #[unstable(feature = "core", reason = "internal to format_args!")]
// pub fn new_v1_formatted(pieces: &'a [&'a str],
// args: &'a [ArgumentV1<'a>],
// fmt: &'a [rt::v1::Argument]) -> Arguments<'a> {
// Arguments {
// pieces: pieces,
// fmt: Some(fmt),
// args: args
// }
// }
// }
// #[derive(Copy, Clone)]
// pub struct Argument {
// pub position: Position,
// pub format: FormatSpec,
// }
#[test]
fn new_v1_test1() {
let mut a: A = A::new();
let pieces: &[&'static str] = &[ "" ];
let arg1: &'static str = "Hello, World!";
let argumentv1: ArgumentV1 = ArgumentV1::new(&arg1, Display::fmt);
let args: &[ArgumentV1] = &[ argumentv1 ];
let argument: Argument = Argument {
position: Position::At(0),
format: FormatSpec {
fill: ' ',
align: Alignment::Unknown,
flags: 0,
precision: Count::NextParam,
width: Count::NextParam
}
};
let fmt: &[Argument] = &[ argument ];
let args: Arguments = Arguments::new_v1_formatted(pieces, args, fmt);
let result: Result = write(&mut a, args);
match result {
Ok(()) => assert!(true),
Err(Error) => assert!(false)
}
assert_eq!(a.buf, "Hello, World!". as_bytes());
}
}<๏ฝfimโend๏ฝ> | // /// Arguments structure.
// #[doc(hidden)] #[inline]
// #[unstable(feature = "core", reason = "internal to format_args!")] |
<|file_name|>logistic_reg.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>//! Logistic Regression module
//!
//! Contains implemention of logistic regression using<๏ฝfimโhole๏ฝ>//! The regressor will automatically add the intercept term
//! so you do not need to format the input matrices yourself.
//!
//! # Usage
//!
//! ```
//! use rusty_machine::learning::logistic_reg::LogisticRegressor;
//! use rusty_machine::learning::SupModel;
//! use rusty_machine::linalg::Matrix;
//! use rusty_machine::linalg::Vector;
//!
//! let inputs = Matrix::new(4,1,vec![1.0,3.0,5.0,7.0]);
//! let targets = Vector::new(vec![0.,0.,1.,1.]);
//!
//! let mut log_mod = LogisticRegressor::default();
//!
//! // Train the model
//! log_mod.train(&inputs, &targets);
//!
//! // Now we'll predict a new point
//! let new_point = Matrix::new(1,1,vec![10.]);
//! let output = log_mod.predict(&new_point);
//!
//! // Hopefully we classified our new point correctly!
//! assert!(output[0] > 0.5, "Our classifier isn't very good!");
//! ```
//!
//! We could have been more specific about the learning of the model
//! by using the `new` constructor instead. This allows us to provide
//! a `GradientDesc` object with custom parameters.
use learning::SupModel;
use linalg::Matrix;
use linalg::Vector;
use learning::toolkit::activ_fn::{ActivationFunc, Sigmoid};
use learning::toolkit::cost_fn::{CostFunc, CrossEntropyError};
use learning::optim::grad_desc::GradientDesc;
use learning::optim::{OptimAlgorithm, Optimizable};
/// Logistic Regression Model.
///
/// Contains option for optimized parameter.
#[derive(Debug)]
pub struct LogisticRegressor<A>
where A: OptimAlgorithm<BaseLogisticRegressor>
{
base: BaseLogisticRegressor,
alg: A,
}
/// Constructs a default Logistic Regression model
/// using standard gradient descent.
impl Default for LogisticRegressor<GradientDesc> {
fn default() -> LogisticRegressor<GradientDesc> {
LogisticRegressor {
base: BaseLogisticRegressor::new(),
alg: GradientDesc::default(),
}
}
}
impl<A: OptimAlgorithm<BaseLogisticRegressor>> LogisticRegressor<A> {
/// Constructs untrained logistic regression model.
///
/// # Examples
///
/// ```
/// use rusty_machine::learning::logistic_reg::LogisticRegressor;
/// use rusty_machine::learning::optim::grad_desc::GradientDesc;
///
/// let gd = GradientDesc::default();
/// let mut logistic_mod = LogisticRegressor::new(gd);
/// ```
pub fn new(alg: A) -> LogisticRegressor<A> {
LogisticRegressor {
base: BaseLogisticRegressor::new(),
alg: alg,
}
}
/// Get the parameters from the model.
///
/// Returns an option that is None if the model has not been trained.
pub fn parameters(&self) -> Option<&Vector<f64>> {
self.base.parameters()
}
}
impl<A> SupModel<Matrix<f64>, Vector<f64>> for LogisticRegressor<A>
where A: OptimAlgorithm<BaseLogisticRegressor>
{
/// Train the logistic regression model.
///
/// Takes training data and output values as input.
///
/// # Examples
///
/// ```
/// use rusty_machine::learning::logistic_reg::LogisticRegressor;
/// use rusty_machine::linalg::Matrix;
/// use rusty_machine::linalg::Vector;
/// use rusty_machine::learning::SupModel;
///
/// let mut logistic_mod = LogisticRegressor::default();
/// let inputs = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);
/// let targets = Vector::new(vec![5.0, 6.0, 7.0]);
///
/// logistic_mod.train(&inputs, &targets);
/// ```
fn train(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) {
let ones = Matrix::<f64>::ones(inputs.rows(), 1);
let full_inputs = ones.hcat(inputs);
let initial_params = vec![0.5; full_inputs.cols()];
let optimal_w = self.alg.optimize(&self.base, &initial_params[..], &full_inputs, targets);
self.base.set_parameters(Vector::new(optimal_w));
}
/// Predict output value from input data.
///
/// Model must be trained before prediction can be made.
fn predict(&self, inputs: &Matrix<f64>) -> Vector<f64> {
if let Some(v) = self.base.parameters() {
let ones = Matrix::<f64>::ones(inputs.rows(), 1);
let full_inputs = ones.hcat(inputs);
(full_inputs * v).apply(&Sigmoid::func)
} else {
panic!("Model has not been trained.");
}
}
}
/// The Base Logistic Regression model.
///
/// This struct cannot be instantianated and is used internally only.
#[derive(Debug)]
pub struct BaseLogisticRegressor {
parameters: Option<Vector<f64>>,
}
impl BaseLogisticRegressor {
/// Construct a new BaseLogisticRegressor
/// with parameters set to None.
fn new() -> BaseLogisticRegressor {
BaseLogisticRegressor { parameters: None }
}
}
impl BaseLogisticRegressor {
/// Returns a reference to the parameters.
fn parameters(&self) -> Option<&Vector<f64>> {
self.parameters.as_ref()
}
/// Set the parameters to `Some` vector.
fn set_parameters(&mut self, params: Vector<f64>) {
self.parameters = Some(params);
}
}
/// Computing the gradient of the underlying Logistic
/// Regression model.
///
/// The gradient is given by
///
/// X<sup>T</sup>(h(Xb) - y) / m
///
/// where `h` is the sigmoid function and `b` the underlying model parameters.
impl Optimizable for BaseLogisticRegressor {
type Inputs = Matrix<f64>;
type Targets = Vector<f64>;
fn compute_grad(&self,
params: &[f64],
inputs: &Matrix<f64>,
targets: &Vector<f64>)
-> (f64, Vec<f64>) {
let beta_vec = Vector::new(params.to_vec());
let outputs = (inputs * beta_vec).apply(&Sigmoid::func);
let cost = CrossEntropyError::cost(&outputs, targets);
let grad = (inputs.transpose() * (outputs - targets)) / (inputs.rows() as f64);
(cost, grad.into_vec())
}
}<๏ฝfimโend๏ฝ> | //! gradient descent optimization.
//! |
<|file_name|>AnnotationSpecOrBuilder.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1/annotation_spec.proto
package com.google.cloud.automl.v1;
public interface AnnotationSpecOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.automl.v1.AnnotationSpec)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Output only. Resource name of the annotation spec.
* Form:
* 'projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationSpecs/{annotation_spec_id}'
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
java.lang.String getName();
/**
*
*
* <pre>
* Output only. Resource name of the annotation spec.
* Form:
* 'projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationSpecs/{annotation_spec_id}'<๏ฝfimโhole๏ฝ> * </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
com.google.protobuf.ByteString getNameBytes();
/**
*
*
* <pre>
* Required. The name of the annotation spec to show in the interface. The name can be
* up to 32 characters long and must match the regexp `[a-zA-Z0-9_]+`.
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The displayName.
*/
java.lang.String getDisplayName();
/**
*
*
* <pre>
* Required. The name of the annotation spec to show in the interface. The name can be
* up to 32 characters long and must match the regexp `[a-zA-Z0-9_]+`.
* </pre>
*
* <code>string display_name = 2;</code>
*
* @return The bytes for displayName.
*/
com.google.protobuf.ByteString getDisplayNameBytes();
/**
*
*
* <pre>
* Output only. The number of examples in the parent dataset
* labeled by the annotation spec.
* </pre>
*
* <code>int32 example_count = 9;</code>
*
* @return The exampleCount.
*/
int getExampleCount();
}<๏ฝfimโend๏ฝ> | |
<|file_name|>rtpsources.cpp<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
This file is a part of JRTPLIB
Copyright (c) 1999-2004 Jori Liesenborgs
Contact: [email protected]
This library was developed at the "Expertisecentrum Digitale Media"
(http://www.edm.luc.ac.be), a research center of the "Limburgs Universitair
Centrum" (http://www.luc.ac.be). The library is based upon work done for
my thesis at the School for Knowledge Technology (Belgium/The Netherlands).
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
*/
#include "rtpsources.h"
#include "rtperrors.h"
#include "rtprawpacket.h"
#include "rtpinternalsourcedata.h"
#include "rtptimeutilities.h"
#include "rtpdefines.h"
#include "rtcpcompoundpacket.h"
#include "rtcppacket.h"
#include "rtcpapppacket.h"
#include "rtcpbyepacket.h"
#include "rtcpsdespacket.h"
#include "rtcpsrpacket.h"
#include "rtcprrpacket.h"
#include "rtptransmitter.h"
#ifdef RTPDEBUG
#include <iostream>
#endif // RTPDEBUG
#include "rtpdebug.h"
#ifndef RTP_SUPPORT_INLINETEMPLATEPARAM
int RTPSources_GetHashIndex(const u_int32_t &ssrc) { return ssrc%RTPSOURCES_HASHSIZE; }
#endif // !RTP_SUPPORT_INLINETEMPLATEPARAM
RTPSources::RTPSources()
{
totalcount = 0;
sendercount = 0;
activecount = 0;
owndata = 0;
}
RTPSources::~RTPSources()
{
Clear();
}
void RTPSources::Clear()
{
ClearSourceList();
}
void RTPSources::ClearSourceList()
{
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *sourcedata;
sourcedata = sourcelist.GetCurrentElement();
delete sourcedata;
sourcelist.GotoNextElement();
}
sourcelist.Clear();
owndata = 0;
}
int RTPSources::CreateOwnSSRC(u_int32_t ssrc)
{
if (owndata != 0)
return ERR_RTP_SOURCES_ALREADYHAVEOWNSSRC;
if (GotEntry(ssrc))
return ERR_RTP_SOURCES_SSRCEXISTS;
int status;
bool created;
status = ObtainSourceDataInstance(ssrc,&owndata,&created);
if (status < 0)
{
owndata = 0; // just to make sure
return status;
}
owndata->SetOwnSSRC();
owndata->SetRTPDataAddress(0);
owndata->SetRTCPDataAddress(0);
// we've created a validated ssrc, so we should increase activecount
activecount++;
OnNewSource(owndata);
return 0;
}
int RTPSources::DeleteOwnSSRC()
{
if (owndata == 0)
return ERR_RTP_SOURCES_DONTHAVEOWNSSRC;
u_int32_t ssrc = owndata->GetSSRC();
sourcelist.GotoElement(ssrc);
sourcelist.DeleteCurrentElement();
totalcount--;
if (owndata->IsSender())
sendercount--;
if (owndata->IsActive())
activecount--;
OnRemoveSource(owndata);
delete owndata;
owndata = 0;
return 0;
}
void RTPSources::SentRTPPacket()
{
if (owndata == 0)
return;
bool prevsender = owndata->IsSender();
owndata->SentRTPPacket();
if (!prevsender && owndata->IsSender())
sendercount++;
}
int RTPSources::ProcessRawPacket(RTPRawPacket *rawpack,RTPTransmitter *rtptrans,bool acceptownpackets)
{
RTPTransmitter *transmitters[1];
int num;
transmitters[0] = rtptrans;
if (rtptrans == 0)
num = 0;
else
num = 1;
return ProcessRawPacket(rawpack,transmitters,num,acceptownpackets);
}
int RTPSources::ProcessRawPacket(RTPRawPacket *rawpack,RTPTransmitter *rtptrans[],int numtrans,bool acceptownpackets)
{
int status;
if (rawpack->IsRTP()) // RTP packet
{
RTPPacket *rtppack;
// First, we'll see if the packet can be parsed
rtppack = new RTPPacket(*rawpack);
if (rtppack == 0)
return ERR_RTP_OUTOFMEM;
if ((status = rtppack->GetCreationError()) < 0)
{
if (status == ERR_RTP_PACKET_INVALIDPACKET)
{
delete rtppack;
rtppack = 0;
}
else
{
delete rtppack;
return status;
}
}
// Check if the packet was valid
if (rtppack != 0)
{
bool stored = false;
bool ownpacket = false;
int i;
const RTPAddress *senderaddress = rawpack->GetSenderAddress();
for (i = 0 ; !ownpacket && i < numtrans ; i++)
{
if (rtptrans[i]->ComesFromThisTransmitter(senderaddress))
ownpacket = true;
}
// Check if the packet is our own.
if (ownpacket)
{
// Now it depends on the user's preference
// what to do with this packet:
if (acceptownpackets)
{
// sender addres for own packets has to be NULL!
if ((status = ProcessRTPPacket(rtppack,rawpack->GetReceiveTime(),0,&stored)) < 0)
{
if (!stored)
delete rtppack;
return status;
}
}
}
else
{
if ((status = ProcessRTPPacket(rtppack,rawpack->GetReceiveTime(),senderaddress,&stored)) < 0)
{
if (!stored)
delete rtppack;
return status;
}
}
if (!stored)
delete rtppack;
}
}
else // RTCP packet
{
RTCPCompoundPacket rtcpcomppack(*rawpack);
bool valid = false;
if ((status = rtcpcomppack.GetCreationError()) < 0)
{
if (status != ERR_RTP_RTCPCOMPOUND_INVALIDPACKET)
return status;
}
else
valid = true;
if (valid)
{
bool ownpacket = false;
int i;
const RTPAddress *senderaddress = rawpack->GetSenderAddress();
for (i = 0 ; !ownpacket && i < numtrans ; i++)
{
if (rtptrans[i]->ComesFromThisTransmitter(senderaddress))
ownpacket = true;
}
// First check if it's a packet of this session.
if (ownpacket)
{
if (acceptownpackets)
{
// sender address for own packets has to be NULL
status = ProcessRTCPCompoundPacket(&rtcpcomppack,rawpack->GetReceiveTime(),0);
if (status < 0)
return status;
}
}
else // not our own packet
{
status = ProcessRTCPCompoundPacket(&rtcpcomppack,rawpack->GetReceiveTime(),rawpack->GetSenderAddress());
if (status < 0)
return status;
}
}
}
return 0;
}
int RTPSources::ProcessRTPPacket(RTPPacket *rtppack,const RTPTime &receivetime,const RTPAddress *senderaddress,bool *stored)
{
u_int32_t ssrc;
RTPInternalSourceData *srcdat;
int status;
bool created;
OnRTPPacket(rtppack,receivetime,senderaddress);
*stored = false;
ssrc = rtppack->GetSSRC();
if ((status = ObtainSourceDataInstance(ssrc,&srcdat,&created)) < 0)
return status;
if (created)
{
if ((status = srcdat->SetRTPDataAddress(senderaddress)) < 0)
return status;
}
else // got a previously existing source
{
if (CheckCollision(srcdat,senderaddress,true))
return 0; // ignore packet on collision
}
bool prevsender = srcdat->IsSender();
bool prevactive = srcdat->IsActive();
// The packet comes from a valid source, we can process it further now
// The following function should delete rtppack itself if something goes
// wrong
if ((status = srcdat->ProcessRTPPacket(rtppack,receivetime,stored)) < 0)
return status;
if (!prevsender && srcdat->IsSender())
sendercount++;
if (!prevactive && srcdat->IsActive())
activecount++;
if (created)
OnNewSource(srcdat);
if (srcdat->IsValidated()) // process the CSRCs
{
RTPInternalSourceData *csrcdat;
bool createdcsrc;
int num = rtppack->GetCSRCCount();
int i;
for (i = 0 ; i < num ; i++)
{
if ((status = ObtainSourceDataInstance(rtppack->GetCSRC(i),&csrcdat,&createdcsrc)) < 0)
return status;
if (createdcsrc)
{
csrcdat->SetCSRC();
if (csrcdat->IsActive())
activecount++;
OnNewSource(csrcdat);
}
else // already found an entry, possibly because of RTCP data
{
if (!CheckCollision(csrcdat,senderaddress,true))
csrcdat->SetCSRC();
}
}
}
return 0;
}
int RTPSources::ProcessRTCPCompoundPacket(RTCPCompoundPacket *rtcpcomppack,const RTPTime &receivetime,const RTPAddress *senderaddress)
{
RTCPPacket *rtcppack;
int status;
bool gotownssrc = ((owndata == 0)?false:true);
u_int32_t ownssrc = ((owndata != 0)?owndata->GetSSRC():0);
OnRTCPCompoundPacket(rtcpcomppack,receivetime,senderaddress);
rtcpcomppack->GotoFirstPacket();
while ((rtcppack = rtcpcomppack->GetNextPacket()) != 0)
{
if (rtcppack->IsKnownFormat())
{
switch (rtcppack->GetPacketType())
{
case RTCPPacket::SR:
{
RTCPSRPacket *p = (RTCPSRPacket *)rtcppack;
u_int32_t senderssrc = p->GetSenderSSRC();
status = ProcessRTCPSenderInfo(senderssrc,p->GetNTPTimestamp(),p->GetRTPTimestamp(),
p->GetSenderPacketCount(),p->GetSenderOctetCount(),
receivetime,senderaddress);
if (status < 0)
return status;
bool gotinfo = false;
if (gotownssrc)
{
int i;
int num = p->GetReceptionReportCount();
for (i = 0 ; i < num ; i++)
{
if (p->GetSSRC(i) == ownssrc) // data is meant for us
{
gotinfo = true;
status = ProcessRTCPReportBlock(senderssrc,p->GetFractionLost(i),p->GetLostPacketCount(i),
p->GetExtendedHighestSequenceNumber(i),p->GetJitter(i),p->GetLSR(i),
p->GetDLSR(i),receivetime,senderaddress);
if (status < 0)
return status;
}<๏ฝfimโhole๏ฝ> }
if (!gotinfo)
{
status = UpdateReceiveTime(senderssrc,receivetime,senderaddress);
if (status < 0)
return status;
}
}
break;
case RTCPPacket::RR:
{
RTCPRRPacket *p = (RTCPRRPacket *)rtcppack;
u_int32_t senderssrc = p->GetSenderSSRC();
bool gotinfo = false;
if (gotownssrc)
{
int i;
int num = p->GetReceptionReportCount();
for (i = 0 ; i < num ; i++)
{
if (p->GetSSRC(i) == ownssrc)
{
gotinfo = true;
status = ProcessRTCPReportBlock(senderssrc,p->GetFractionLost(i),p->GetLostPacketCount(i),
p->GetExtendedHighestSequenceNumber(i),p->GetJitter(i),p->GetLSR(i),
p->GetDLSR(i),receivetime,senderaddress);
if (status < 0)
return status;
}
}
}
if (!gotinfo)
{
status = UpdateReceiveTime(senderssrc,receivetime,senderaddress);
if (status < 0)
return status;
}
}
break;
case RTCPPacket::SDES:
{
RTCPSDESPacket *p = (RTCPSDESPacket *)rtcppack;
if (p->GotoFirstChunk())
{
do
{
u_int32_t sdesssrc = p->GetChunkSSRC();
bool updated = false;
if (p->GotoFirstItem())
{
do
{
RTCPSDESPacket::ItemType t;
if ((t = p->GetItemType()) != RTCPSDESPacket::PRIV)
{
updated = true;
status = ProcessSDESNormalItem(sdesssrc,t,p->GetItemLength(),p->GetItemData(),receivetime,senderaddress);
if (status < 0)
return status;
}
#ifdef RTP_SUPPORT_SDESPRIV
else
{
updated = true;
status = ProcessSDESPrivateItem(sdesssrc,p->GetPRIVPrefixLength(),p->GetPRIVPrefixData(),p->GetPRIVValueLength(),
p->GetPRIVValueData(),receivetime,senderaddress);
if (status < 0)
return status;
}
#endif // RTP_SUPPORT_SDESPRIV
} while (p->GotoNextItem());
}
if (!updated)
{
status = UpdateReceiveTime(sdesssrc,receivetime,senderaddress);
if (status < 0)
return status;
}
} while (p->GotoNextChunk());
}
}
break;
case RTCPPacket::BYE:
{
RTCPBYEPacket *p = (RTCPBYEPacket *)rtcppack;
int i;
int num = p->GetSSRCCount();
for (i = 0 ; i < num ; i++)
{
u_int32_t byessrc = p->GetSSRC(i);
status = ProcessBYE(byessrc,p->GetReasonLength(),p->GetReasonData(),receivetime,senderaddress);
if (status < 0)
return status;
}
}
break;
case RTCPPacket::APP:
{
RTCPAPPPacket *p = (RTCPAPPPacket *)rtcppack;
OnAPPPacket(p,receivetime,senderaddress);
}
break;
case RTCPPacket::Unknown:
default:
{
OnUnknownPacketType(rtcppack,receivetime,senderaddress);
}
break;
}
}
else
{
OnUnknownPacketFormat(rtcppack,receivetime,senderaddress);
}
}
return 0;
}
bool RTPSources::GotoFirstSource()
{
sourcelist.GotoFirstElement();
if (sourcelist.HasCurrentElement())
return true;
return false;
}
bool RTPSources::GotoNextSource()
{
sourcelist.GotoNextElement();
if (sourcelist.HasCurrentElement())
return true;
return false;
}
bool RTPSources::GotoPreviousSource()
{
sourcelist.GotoPreviousElement();
if (sourcelist.HasCurrentElement())
return true;
return false;
}
bool RTPSources::GotoFirstSourceWithData()
{
bool found = false;
sourcelist.GotoFirstElement();
while (!found && sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat;
srcdat = sourcelist.GetCurrentElement();
if (srcdat->HasData())
found = true;
else
sourcelist.GotoNextElement();
}
return found;
}
bool RTPSources::GotoNextSourceWithData()
{
bool found = false;
sourcelist.GotoNextElement();
while (!found && sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat;
srcdat = sourcelist.GetCurrentElement();
if (srcdat->HasData())
found = true;
else
sourcelist.GotoNextElement();
}
return found;
}
bool RTPSources::GotoPreviousSourceWithData()
{
bool found = false;
sourcelist.GotoPreviousElement();
while (!found && sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat;
srcdat = sourcelist.GetCurrentElement();
if (srcdat->HasData())
found = true;
else
sourcelist.GotoNextElement();
}
return found;
}
RTPSourceData *RTPSources::GetCurrentSourceInfo()
{
if (!sourcelist.HasCurrentElement())
return 0;
return sourcelist.GetCurrentElement();
}
RTPSourceData *RTPSources::GetSourceInfo(u_int32_t ssrc)
{
if (sourcelist.GotoElement(ssrc) < 0)
return 0;
if (!sourcelist.HasCurrentElement())
return 0;
return sourcelist.GetCurrentElement();
}
bool RTPSources::GotEntry(u_int32_t ssrc)
{
return sourcelist.HasElement(ssrc);
}
RTPPacket *RTPSources::GetNextPacket()
{
if (!sourcelist.HasCurrentElement())
return 0;
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
RTPPacket *pack = srcdat->GetNextPacket();
return pack;
}
int RTPSources::ProcessRTCPSenderInfo(u_int32_t ssrc,const RTPNTPTime &ntptime,u_int32_t rtptime,
u_int32_t packetcount,u_int32_t octetcount,const RTPTime &receivetime,
const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created;
int status;
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
srcdat->ProcessSenderInfo(ntptime,rtptime,packetcount,octetcount,receivetime);
// Call the callback
if (created)
OnNewSource(srcdat);
return 0;
}
int RTPSources::ProcessRTCPReportBlock(u_int32_t ssrc,u_int8_t fractionlost,int32_t lostpackets,
u_int32_t exthighseqnr,u_int32_t jitter,u_int32_t lsr,
u_int32_t dlsr,const RTPTime &receivetime,const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created;
int status;
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
srcdat->ProcessReportBlock(fractionlost,lostpackets,exthighseqnr,jitter,lsr,dlsr,receivetime);
// Call the callback
if (created)
OnNewSource(srcdat);
return 0;
}
int RTPSources::ProcessSDESNormalItem(u_int32_t ssrc,RTCPSDESPacket::ItemType t,size_t itemlength,
const void *itemdata,const RTPTime &receivetime,const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created,cnamecollis;
int status;
u_int8_t id;
bool prevactive;
switch(t)
{
case RTCPSDESPacket::CNAME:
id = RTCP_SDES_ID_CNAME;
break;
case RTCPSDESPacket::NAME:
id = RTCP_SDES_ID_NAME;
break;
case RTCPSDESPacket::EMAIL:
id = RTCP_SDES_ID_EMAIL;
break;
case RTCPSDESPacket::PHONE:
id = RTCP_SDES_ID_PHONE;
break;
case RTCPSDESPacket::LOC:
id = RTCP_SDES_ID_LOCATION;
break;
case RTCPSDESPacket::TOOL:
id = RTCP_SDES_ID_TOOL;
break;
case RTCPSDESPacket::NOTE:
id = RTCP_SDES_ID_NOTE;
break;
default:
return ERR_RTP_SOURCES_ILLEGALSDESTYPE;
}
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
prevactive = srcdat->IsActive();
status = srcdat->ProcessSDESItem(id,(const u_int8_t *)itemdata,itemlength,receivetime,&cnamecollis);
if (!prevactive && srcdat->IsActive())
activecount++;
// Call the callback
if (created)
OnNewSource(srcdat);
if (cnamecollis)
OnCNAMECollision(srcdat,senderaddress,(const u_int8_t *)itemdata,itemlength);
return status;
}
#ifdef RTP_SUPPORT_SDESPRIV
int RTPSources::ProcessSDESPrivateItem(u_int32_t ssrc,size_t prefixlen,const void *prefixdata,
size_t valuelen,const void *valuedata,const RTPTime &receivetime,
const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created;
int status;
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
status = srcdat->ProcessPrivateSDESItem((const u_int8_t *)prefixdata,prefixlen,(const u_int8_t *)valuedata,valuelen,receivetime);
// Call the callback
if (created)
OnNewSource(srcdat);
return status;
}
#endif //RTP_SUPPORT_SDESPRIV
int RTPSources::ProcessBYE(u_int32_t ssrc,size_t reasonlength,const void *reasondata,
const RTPTime &receivetime,const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created;
int status;
bool prevactive;
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
// we'll ignore BYE packets for our own ssrc
if (srcdat == owndata)
return 0;
prevactive = srcdat->IsActive();
srcdat->ProcessBYEPacket((const u_int8_t *)reasondata,reasonlength,receivetime);
if (prevactive && !srcdat->IsActive())
activecount--;
// Call the callback
if (created)
OnNewSource(srcdat);
OnBYEPacket(srcdat);
return 0;
}
int RTPSources::ObtainSourceDataInstance(u_int32_t ssrc,RTPInternalSourceData **srcdat,bool *created)
{
RTPInternalSourceData *srcdat2;
int status;
if (sourcelist.GotoElement(ssrc) < 0) // No entry for this source
{
srcdat2 = new RTPInternalSourceData(ssrc);
if (srcdat2 == 0)
return ERR_RTP_OUTOFMEM;
if ((status = sourcelist.AddElement(ssrc,srcdat2)) < 0)
{
delete srcdat2;
return status;
}
*srcdat = srcdat2;
*created = true;
totalcount++;
}
else
{
*srcdat = sourcelist.GetCurrentElement();
*created = false;
}
return 0;
}
int RTPSources::GetRTCPSourceData(u_int32_t ssrc,const RTPAddress *senderaddress,
RTPInternalSourceData **srcdat2,bool *newsource)
{
int status;
bool created;
RTPInternalSourceData *srcdat;
*srcdat2 = 0;
if ((status = ObtainSourceDataInstance(ssrc,&srcdat,&created)) < 0)
return status;
if (created)
{
if ((status = srcdat->SetRTCPDataAddress(senderaddress)) < 0)
return status;
}
else // got a previously existing source
{
if (CheckCollision(srcdat,senderaddress,false))
return 0; // ignore packet on collision
}
*srcdat2 = srcdat;
*newsource = created;
return 0;
}
int RTPSources::UpdateReceiveTime(u_int32_t ssrc,const RTPTime &receivetime,const RTPAddress *senderaddress)
{
RTPInternalSourceData *srcdat;
bool created;
int status;
status = GetRTCPSourceData(ssrc,senderaddress,&srcdat,&created);
if (status < 0)
return status;
if (srcdat == 0)
return 0;
// We got valid SSRC info
srcdat->UpdateMessageTime(receivetime);
// Call the callback
if (created)
OnNewSource(srcdat);
return 0;
}
void RTPSources::Timeout(const RTPTime &curtime,const RTPTime &timeoutdelay)
{
int newtotalcount = 0;
int newsendercount = 0;
int newactivecount = 0;
RTPTime checktime = curtime;
checktime -= timeoutdelay;
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
RTPTime lastmsgtime = srcdat->INF_GetLastMessageTime();
// we don't want to time out ourselves
if ((srcdat != owndata) && (lastmsgtime < checktime)) // timeout
{
totalcount--;
if (srcdat->IsSender())
sendercount--;
if (srcdat->IsActive())
activecount--;
sourcelist.DeleteCurrentElement();
OnTimeout(srcdat);
OnRemoveSource(srcdat);
delete srcdat;
}
else
{
newtotalcount++;
if (srcdat->IsSender())
newsendercount++;
if (srcdat->IsActive())
newactivecount++;
sourcelist.GotoNextElement();
}
}
#ifdef RTPDEBUG
if (newtotalcount != totalcount)
{
std::cout << "New total count " << newtotalcount << " doesnt match old total count " << totalcount << std::endl;
SafeCountTotal();
}
if (newsendercount != sendercount)
{
std::cout << "New sender count " << newsendercount << " doesnt match old sender count " << sendercount << std::endl;
SafeCountSenders();
}
if (newactivecount != activecount)
{
std::cout << "New active count " << newactivecount << " doesnt match old active count " << activecount << std::endl;
SafeCountActive();
}
#endif // RTPDEBUG
totalcount = newtotalcount; // just to play it safe
sendercount = newsendercount;
activecount = newactivecount;
}
void RTPSources::SenderTimeout(const RTPTime &curtime,const RTPTime &timeoutdelay)
{
int newtotalcount = 0;
int newsendercount = 0;
int newactivecount = 0;
RTPTime checktime = curtime;
checktime -= timeoutdelay;
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
newtotalcount++;
if (srcdat->IsActive())
newactivecount++;
if (srcdat->IsSender())
{
RTPTime lastrtppacktime = srcdat->INF_GetLastRTPPacketTime();
if (lastrtppacktime < checktime) // timeout
{
srcdat->ClearSenderFlag();
sendercount--;
}
else
newsendercount++;
}
sourcelist.GotoNextElement();
}
#ifdef RTPDEBUG
if (newtotalcount != totalcount)
{
std::cout << "New total count " << newtotalcount << " doesnt match old total count " << totalcount << std::endl;
SafeCountTotal();
}
if (newsendercount != sendercount)
{
std::cout << "New sender count " << newsendercount << " doesnt match old sender count " << sendercount << std::endl;
SafeCountSenders();
}
if (newactivecount != activecount)
{
std::cout << "New active count " << newactivecount << " doesnt match old active count " << activecount << std::endl;
SafeCountActive();
}
#endif // RTPDEBUG
totalcount = newtotalcount; // just to play it safe
sendercount = newsendercount;
activecount = newactivecount;
}
void RTPSources::BYETimeout(const RTPTime &curtime,const RTPTime &timeoutdelay)
{
int newtotalcount = 0;
int newsendercount = 0;
int newactivecount = 0;
RTPTime checktime = curtime;
checktime -= timeoutdelay;
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
if (srcdat->ReceivedBYE())
{
RTPTime byetime = srcdat->GetBYETime();
if ((srcdat != owndata) && (checktime > byetime))
{
totalcount--;
if (srcdat->IsSender())
sendercount--;
if (srcdat->IsActive())
activecount--;
sourcelist.DeleteCurrentElement();
OnBYETimeout(srcdat);
OnRemoveSource(srcdat);
delete srcdat;
}
else
{
newtotalcount++;
if (srcdat->IsSender())
newsendercount++;
if (srcdat->IsActive())
newactivecount++;
sourcelist.GotoNextElement();
}
}
else
{
newtotalcount++;
if (srcdat->IsSender())
newsendercount++;
if (srcdat->IsActive())
newactivecount++;
sourcelist.GotoNextElement();
}
}
#ifdef RTPDEBUG
if (newtotalcount != totalcount)
{
std::cout << "New total count " << newtotalcount << " doesnt match old total count " << totalcount << std::endl;
SafeCountTotal();
}
if (newsendercount != sendercount)
{
std::cout << "New sender count " << newsendercount << " doesnt match old sender count " << sendercount << std::endl;
SafeCountSenders();
}
if (newactivecount != activecount)
{
std::cout << "New active count " << newactivecount << " doesnt match old active count " << activecount << std::endl;
SafeCountActive();
}
#endif // RTPDEBUG
totalcount = newtotalcount; // just to play it safe
sendercount = newsendercount;
activecount = newactivecount;
}
void RTPSources::NoteTimeout(const RTPTime &curtime,const RTPTime &timeoutdelay)
{
int newtotalcount = 0;
int newsendercount = 0;
int newactivecount = 0;
RTPTime checktime = curtime;
checktime -= timeoutdelay;
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
u_int8_t *note;
size_t notelen;
note = srcdat->SDES_GetNote(¬elen);
if (notelen != 0) // Note has been set
{
RTPTime notetime = srcdat->INF_GetLastSDESNoteTime();
if (checktime > notetime)
{
srcdat->ClearNote();
OnNoteTimeout(srcdat);
}
}
newtotalcount++;
if (srcdat->IsSender())
newsendercount++;
if (srcdat->IsActive())
newactivecount++;
sourcelist.GotoNextElement();
}
#ifdef RTPDEBUG
if (newtotalcount != totalcount)
{
std::cout << "New total count " << newtotalcount << " doesnt match old total count " << totalcount << std::endl;
SafeCountTotal();
}
if (newsendercount != sendercount)
{
std::cout << "New sender count " << newsendercount << " doesnt match old sender count " << sendercount << std::endl;
SafeCountSenders();
}
if (newactivecount != activecount)
{
std::cout << "New active count " << newactivecount << " doesnt match old active count " << activecount << std::endl;
SafeCountActive();
}
#endif // RTPDEBUG
totalcount = newtotalcount; // just to play it safe
sendercount = newsendercount;
activecount = newactivecount;
}
void RTPSources::MultipleTimeouts(const RTPTime &curtime,const RTPTime &sendertimeout,const RTPTime &byetimeout,const RTPTime &generaltimeout,const RTPTime ¬etimeout)
{
int newtotalcount = 0;
int newsendercount = 0;
int newactivecount = 0;
RTPTime senderchecktime = curtime;
RTPTime byechecktime = curtime;
RTPTime generaltchecktime = curtime;
RTPTime notechecktime = curtime;
senderchecktime -= sendertimeout;
byechecktime -= byetimeout;
generaltchecktime -= generaltimeout;
notechecktime -= notetimeout;
sourcelist.GotoFirstElement();
while (sourcelist.HasCurrentElement())
{
RTPInternalSourceData *srcdat = sourcelist.GetCurrentElement();
bool deleted,issender,isactive;
bool byetimeout,normaltimeout,notetimeout;
u_int8_t *note;
size_t notelen;
issender = srcdat->IsSender();
isactive = srcdat->IsActive();
deleted = false;
byetimeout = false;
normaltimeout = false;
notetimeout = false;
note = srcdat->SDES_GetNote(¬elen);
if (notelen != 0) // Note has been set
{
RTPTime notetime = srcdat->INF_GetLastSDESNoteTime();
if (notechecktime > notetime)
{
notetimeout = true;
srcdat->ClearNote();
}
}
if (srcdat->ReceivedBYE())
{
RTPTime byetime = srcdat->GetBYETime();
if ((srcdat != owndata) && (byechecktime > byetime))
{
sourcelist.DeleteCurrentElement();
deleted = true;
byetimeout = true;
}
}
if (!deleted)
{
RTPTime lastmsgtime = srcdat->INF_GetLastMessageTime();
if ((srcdat != owndata) && (lastmsgtime < generaltchecktime))
{
sourcelist.DeleteCurrentElement();
deleted = true;
normaltimeout = true;
}
}
if (!deleted)
{
newtotalcount++;
if (issender)
{
RTPTime lastrtppacktime = srcdat->INF_GetLastRTPPacketTime();
if (lastrtppacktime < senderchecktime)
{
srcdat->ClearSenderFlag();
sendercount--;
}
else
newsendercount++;
}
if (isactive)
newactivecount++;
if (notetimeout)
OnNoteTimeout(srcdat);
sourcelist.GotoNextElement();
}
else // deleted entry
{
if (issender)
sendercount--;
if (isactive)
activecount--;
totalcount--;
if (byetimeout)
OnBYETimeout(srcdat);
if (normaltimeout)
OnTimeout(srcdat);
delete srcdat;
}
}
#ifdef RTPDEBUG
if (newtotalcount != totalcount)
{
SafeCountTotal();
std::cout << "New total count " << newtotalcount << " doesnt match old total count " << totalcount << std::endl;
}
if (newsendercount != sendercount)
{
SafeCountSenders();
std::cout << "New sender count " << newsendercount << " doesnt match old sender count " << sendercount << std::endl;
}
if (newactivecount != activecount)
{
std::cout << "New active count " << newactivecount << " doesnt match old active count " << activecount << std::endl;
SafeCountActive();
}
#endif // RTPDEBUG
totalcount = newtotalcount; // just to play it safe
sendercount = newsendercount;
activecount = newactivecount;
}
#ifdef RTPDEBUG
void RTPSources::Dump()
{
std::cout << "Total count: " << totalcount << std::endl;
std::cout << "Sender count: " << sendercount << std::endl;
std::cout << "Active count: " << activecount << std::endl;
if (GotoFirstSource())
{
do
{
RTPSourceData *s;
s = GetCurrentSourceInfo();
s->Dump();
std::cout << std::endl;
} while (GotoNextSource());
}
}
void RTPSources::SafeCountTotal()
{
int count = 0;
if (GotoFirstSource())
{
do
{
count++;
} while (GotoNextSource());
}
std::cout << "Actual total count: " << count << std::endl;
}
void RTPSources::SafeCountSenders()
{
int count = 0;
if (GotoFirstSource())
{
do
{
RTPSourceData *s;
s = GetCurrentSourceInfo();
if (s->IsSender())
count++;
} while (GotoNextSource());
}
std::cout << "Actual sender count: " << count << std::endl;
}
void RTPSources::SafeCountActive()
{
int count = 0;
if (GotoFirstSource())
{
do
{
RTPSourceData *s;
s = GetCurrentSourceInfo();
if (s->IsActive())
count++;
} while (GotoNextSource());
}
std::cout << "Actual active count: " << count << std::endl;
}
#endif // RTPDEBUG
bool RTPSources::CheckCollision(RTPInternalSourceData *srcdat,const RTPAddress *senderaddress,bool isrtp)
{
bool isset,otherisset;
const RTPAddress *addr,*otheraddr;
if (isrtp)
{
isset = srcdat->IsRTPAddressSet();
addr = srcdat->GetRTPDataAddress();
otherisset = srcdat->IsRTCPAddressSet();
otheraddr = srcdat->GetRTCPDataAddress();
}
else
{
isset = srcdat->IsRTCPAddressSet();
addr = srcdat->GetRTCPDataAddress();
otherisset = srcdat->IsRTPAddressSet();
otheraddr = srcdat->GetRTPDataAddress();
}
if (!isset)
{
if (otherisset) // got other address, can check if it comes from same host
{
if (otheraddr == 0) // other came from our own session
{
if (senderaddress != 0)
{
OnSSRCCollision(srcdat,senderaddress,isrtp);
return true;
}
// Ok, store it
if (isrtp)
srcdat->SetRTPDataAddress(senderaddress);
else
srcdat->SetRTCPDataAddress(senderaddress);
}
else
{
if (!otheraddr->IsFromSameHost(senderaddress))
{
OnSSRCCollision(srcdat,senderaddress,isrtp);
return true;
}
// Ok, comes from same host, store the address
if (isrtp)
srcdat->SetRTPDataAddress(senderaddress);
else
srcdat->SetRTCPDataAddress(senderaddress);
}
}
else // no other address, store this one
{
if (isrtp)
srcdat->SetRTPDataAddress(senderaddress);
else
srcdat->SetRTCPDataAddress(senderaddress);
}
}
else // already got an address
{
if (addr == 0)
{
if (senderaddress != 0)
{
OnSSRCCollision(srcdat,senderaddress,isrtp);
return true;
}
}
else
{
if (!addr->IsSameAddress(senderaddress))
{
OnSSRCCollision(srcdat,senderaddress,isrtp);
return true;
}
}
}
return false;
}<๏ฝfimโend๏ฝ> | } |
<|file_name|>test_ncftp.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import pytest
class TestNcftp:<๏ฝfimโhole๏ฝ>
@pytest.mark.complete("ncftp -", require_cmd=True)
def test_2(self, completion):
assert completion<๏ฝfimโend๏ฝ> | @pytest.mark.complete("ncftp ")
def test_1(self, completion):
assert completion |
<|file_name|>__init__.py<|end_file_name|><๏ฝfimโbegin๏ฝ>"""
Parsing resource files.
See base.py for the ParsedResource base class.
"""
import os.path
from pontoon.sync.formats import (
compare_locales,
ftl,
json_extensions,
lang,
po,
silme,
xliff,
)
# To add support for a new resource format, add an entry to this dict<๏ฝfimโhole๏ฝ>SUPPORTED_FORMAT_PARSERS = {
".dtd": silme.parse_dtd,
".ftl": ftl.parse,
".inc": silme.parse_inc,
".ini": silme.parse_ini,
".json": json_extensions.parse,
".lang": lang.parse,
".po": po.parse,
".pot": po.parse,
".properties": silme.parse_properties,
".xlf": xliff.parse,
".xliff": xliff.parse,
".xml": compare_locales.parse,
}
def are_compatible_formats(extension_a, extension_b):
"""
Return True if given file extensions belong to the same file format.
We test that by comparing parsers used by each file extenion.
Note that some formats (e.g. Gettext, XLIFF) use multiple file extensions.
"""
try:
return (
SUPPORTED_FORMAT_PARSERS[extension_a]
== SUPPORTED_FORMAT_PARSERS[extension_b]
)
# File extension not supported
except KeyError:
return False
def parse(path, source_path=None, locale=None):
"""
Parse the resource file at the given path and return a
ParsedResource with its translations.
:param path:
Path to the resource file to parse.
:param source_path:
Path to the corresponding resource file in the source directory
for the resource we're parsing. Asymmetric formats need this
for saving. Defaults to None.
:param locale:
Object which describes information about currently processed locale.
Some of the formats require information about things like e.g. plural form.
"""
root, extension = os.path.splitext(path)
if extension in SUPPORTED_FORMAT_PARSERS:
return SUPPORTED_FORMAT_PARSERS[extension](
path, source_path=source_path, locale=locale
)
else:
raise ValueError("Translation format {0} is not supported.".format(extension))<๏ฝfimโend๏ฝ> | # where the key is the extension you're parsing and the value is a
# callable returning an instance of a ParsedResource subclass. |
<|file_name|>panel.py<|end_file_name|><๏ฝfimโbegin๏ฝ># vim: tabstop=4 shiftwidth=4 softtabstop=4
<๏ฝfimโhole๏ฝ># Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import horizon
from horizon.dashboards.nova import dashboard
class InstancesAndVolumes(horizon.Panel):
name = "Instances & Volumes"
slug = 'instances_and_volumes'
dashboard.Nova.register(InstancesAndVolumes)<๏ฝfimโend๏ฝ> | |
<|file_name|>main.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>use std::hash::{Hash, Hasher};
use std::collections::HashSet;
#[derive(Debug, Clone, Copy)]
struct Foo {
elem: usize,
}
impl PartialEq for Foo {
#[inline]
fn eq(&self, other: &Self) -> bool {
self as *const Self == other as *const Self
}
}
impl Eq for Foo {}
impl Hash for Foo {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
(self as *const Self).hash(state);
}
}
fn main() {
let a = Foo { elem: 5 };
let b = a.clone();
let c = a;
let mut set = HashSet::new();
assert!(a != b);
assert!(a != c);
assert!(b != c);
if !set.insert(a) {
unreachable!();
}
if !set.insert(b) {
unreachable!();
}<๏ฝfimโhole๏ฝ>
if !set.insert(c) {
unreachable!();
}
println!("{:#?}", set);
}<๏ฝfimโend๏ฝ> | |
<|file_name|>status_checks.py<|end_file_name|><๏ฝfimโbegin๏ฝ>#!/usr/bin/python3
#
# Checks that the upstream DNS has been set correctly and that
# TLS certificates have been signed, etc., and if not tells the user
# what to do next.
import sys, os, os.path, re, subprocess, datetime, multiprocessing.pool
import dns.reversename, dns.resolver
import dateutil.parser, dateutil.tz
import idna
import psutil
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config, get_secondary_dns, get_custom_dns_record
from web_update import get_web_domains, get_domains_with_a_records
from ssl_certificates import get_ssl_certificates, get_domain_ssl_files, check_certificate
from mailconfig import get_mail_domains, get_mail_aliases
from utils import shell, sort_domains, load_env_vars_from_file, load_settings
def run_checks(rounded_values, env, output, pool):
# run systems checks
output.add_heading("System")
# check that services are running
if not run_services_checks(env, output, pool):
# If critical services are not running, stop. If bind9 isn't running,
# all later DNS checks will timeout and that will take forever to
# go through, and if running over the web will cause a fastcgi timeout.
return
# clear bind9's DNS cache so our DNS checks are up to date
# (ignore errors; if bind9/rndc isn't running we'd already report
# that in run_services checks.)
shell('check_call', ["/usr/sbin/rndc", "flush"], trap=True)
run_system_checks(rounded_values, env, output)
# perform other checks asynchronously
run_network_checks(env, output)
run_domain_checks(rounded_values, env, output, pool)
def get_ssh_port():
# Returns ssh port
try:
output = shell('check_output', ['sshd', '-T'])
except FileNotFoundError:
# sshd is not installed. That's ok.
return None
returnNext = False
for e in output.split():
if returnNext:
return int(e)
if e == "port":
returnNext = True
# Did not find port!
return None
def run_services_checks(env, output, pool):
# Check that system services are running.
services = [
{ "name": "Local DNS (bind9)", "port": 53, "public": False, },
#{ "name": "NSD Control", "port": 8952, "public": False, },
{ "name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
{ "name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
{ "name": "Postgrey", "port": 10023, "public": False, },
{ "name": "Spamassassin", "port": 10025, "public": False, },
{ "name": "OpenDKIM", "port": 8891, "public": False, },
{ "name": "OpenDMARC", "port": 8893, "public": False, },
{ "name": "Memcached", "port": 11211, "public": False, },
{ "name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
{ "name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
{ "name": "Public DNS (nsd4)", "port": 53, "public": True, },
{ "name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
{ "name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
#{ "name": "Postfix/master", "port": 10587, "public": True, },
{ "name": "IMAPS (dovecot)", "port": 993, "public": True, },
{ "name": "Mail Filters (Sieve/dovecot)", "port": 4190, "public": True, },
{ "name": "HTTP Web (nginx)", "port": 80, "public": True, },
{ "name": "HTTPS Web (nginx)", "port": 443, "public": True, },
]
all_running = True
fatal = False
ret = pool.starmap(check_service, ((i, service, env) for i, service in enumerate(services)), chunksize=1)
for i, running, fatal2, output2 in sorted(ret):
if output2 is None: continue # skip check (e.g. no port was set, e.g. no sshd)
all_running = all_running and running
fatal = fatal or fatal2
output2.playback(output)
if all_running:
output.print_ok("All system services are running.")
return not fatal
def check_service(i, service, env):
if not service["port"]:
# Skip check (no port, e.g. no sshd).
return (i, None, None, None)
output = BufferedOutput()
running = False
fatal = False
# Helper function to make a connection to the service, since we try
# up to three ways (localhost, IPv4 address, IPv6 address).
def try_connect(ip):
# Connect to the given IP address on the service's port with a one-second timeout.
import socket
s = socket.socket(socket.AF_INET if ":" not in ip else socket.AF_INET6, socket.SOCK_STREAM)
s.settimeout(1)
try:
s.connect((ip, service["port"]))
return True
except OSError as e:
# timed out or some other odd error
return False
finally:
s.close()
if service["public"]:
# Service should be publicly accessible.
if try_connect(env["PUBLIC_IP"]):
# IPv4 ok.
if not env.get("PUBLIC_IPV6") or service.get("ipv6") is False or try_connect(env["PUBLIC_IPV6"]):
# No IPv6, or service isn't meant to run on IPv6, or IPv6 is good.
running = True
# IPv4 ok but IPv6 failed. Try the PRIVATE_IPV6 address to see if the service is bound to the interface.
elif service["port"] != 53 and try_connect(env["PRIVATE_IPV6"]):
output.print_error("%s is running (and available over IPv4 and the local IPv6 address), but it is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is running and available over IPv4 but is not accessible over IPv6 at %s port %d." % (service['name'], env['PUBLIC_IPV6'], service['port']))
# IPv4 failed. Try the private IP to see if the service is running but not accessible (except DNS because a different service runs on the private IP).
elif service["port"] != 53 and try_connect("127.0.0.1"):
output.print_error("%s is running but is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Why is nginx not running?
if not running and service["port"] in (80, 443):
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
else:
# Service should be running locally.
if try_connect("127.0.0.1"):
running = True
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Flag if local DNS is not running.
if not running and service["port"] == 53 and service["public"] == False:
fatal = True
return (i, running, fatal, output)
def run_system_checks(rounded_values, env, output):
check_ssh_password(env, output)
check_software_updates(env, output)
check_miab_version(env, output)
check_system_aliases(env, output)
check_free_disk_space(rounded_values, env, output)
check_free_memory(rounded_values, env, output)
def check_ssh_password(env, output):
# Check that SSH login with password is disabled. The openssh-server
# package may not be installed so check that before trying to access
# the configuration file.
if not os.path.exists("/etc/ssh/sshd_config"):
return
sshd = open("/etc/ssh/sshd_config").read()
if re.search("\nPasswordAuthentication\s+yes", sshd) \
or not re.search("\nPasswordAuthentication\s+no", sshd):
output.print_error("""The SSH server on this machine permits password-based login. A more secure
way to log in is using a public key. Add your SSH public key to $HOME/.ssh/authorized_keys, check
that you can log in without a password, set the option 'PasswordAuthentication no' in
/etc/ssh/sshd_config, and then restart the openssh via 'sudo service ssh restart'.""")
else:
output.print_ok("SSH disallows password-based login.")
def is_reboot_needed_due_to_package_installation():
return os.path.exists("/var/run/reboot-required")
def check_software_updates(env, output):
# Check for any software package updates.
pkgs = list_apt_updates(apt_update=False)
if is_reboot_needed_due_to_package_installation():
output.print_error("System updates have been installed and a reboot of the machine is required.")
elif len(pkgs) == 0:
output.print_ok("System software is up to date.")
else:
output.print_error("There are %d software packages that can be updated." % len(pkgs))
for p in pkgs:
output.print_line("%s (%s)" % (p["package"], p["version"]))
def check_system_aliases(env, output):
# Check that the administrator alias exists since that's where all
# admin email is automatically directed.
check_alias_exists("System administrator address", "administrator@" + env['PRIMARY_HOSTNAME'], env, output)
def check_free_disk_space(rounded_values, env, output):
# Check free disk space.
st = os.statvfs(env['STORAGE_ROOT'])
bytes_total = st.f_blocks * st.f_frsize
bytes_free = st.f_bavail * st.f_frsize
if not rounded_values:
disk_msg = "The disk has %s GB space remaining." % str(round(bytes_free/1024.0/1024.0/1024.0*10.0)/10)
else:
disk_msg = "The disk has less than %s%% space left." % str(round(bytes_free/bytes_total/10 + .5)*10)
if bytes_free > .3 * bytes_total:
output.print_ok(disk_msg)
elif bytes_free > .15 * bytes_total:
output.print_warning(disk_msg)
else:
output.print_error(disk_msg)
def check_free_memory(rounded_values, env, output):
# Check free memory.
percent_free = 100 - psutil.virtual_memory().percent
memory_msg = "System memory is %s%% free." % str(round(percent_free))
if percent_free >= 20:
if rounded_values: memory_msg = "System free memory is at least 20%."
output.print_ok(memory_msg)
elif percent_free >= 10:
if rounded_values: memory_msg = "System free memory is below 20%."
output.print_warning(memory_msg)
else:
if rounded_values: memory_msg = "System free memory is below 10%."
output.print_error(memory_msg)
def run_network_checks(env, output):
# Also see setup/network-checks.sh.
output.add_heading("Network")
# Stop if we cannot make an outbound connection on port 25. Many residential
# networks block outbound port 25 to prevent their network from sending spam.
# See if we can reach one of Google's MTAs with a 5-second timeout.
code, ret = shell("check_call", ["/bin/nc", "-z", "-w5", "aspmx.l.google.com", "25"], trap=True)
if ret == 0:
output.print_ok("Outbound mail (SMTP port 25) is not blocked.")
else:
output.print_error("""Outbound mail (SMTP port 25) seems to be blocked by your network. You
will not be able to send any mail. Many residential networks block port 25 to prevent hijacked
machines from being able to send spam. A quick connection test to Google's mail server on port 25
failed.""")
# Stop if the IPv4 address is listed in the ZEN Spamhaus Block List.
# The user might have ended up on an IP address that was previously in use
# by a spammer, or the user may be deploying on a residential network. We
# will not be able to reliably send mail in these cases.
rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.')))
zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None)
if zen is None:
output.print_ok("IP address is not blacklisted by zen.spamhaus.org.")
else:
output.print_error("""The IP address of this machine %s is listed in the Spamhaus Block List (code %s),
which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/%s."""
% (env['PUBLIC_IP'], zen, env['PUBLIC_IP']))
def run_domain_checks(rounded_time, env, output, pool):
# Get the list of domains we handle mail for.
mail_domains = get_mail_domains(env)
# Get the list of domains we serve DNS zones for (i.e. does not include subdomains).
dns_zonefiles = dict(get_dns_zones(env))
dns_domains = set(dns_zonefiles)
# Get the list of domains we serve HTTPS for.
web_domains = set(get_web_domains(env))
domains_to_check = mail_domains | dns_domains | web_domains
# Get the list of domains that we don't serve web for because of a custom CNAME/A record.
domains_with_a_records = get_domains_with_a_records(env)
# Serial version:
#for domain in sort_domains(domains_to_check, env):
# run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains)
# Parallelize the checks across a worker pool.
args = ((domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains, domains_with_a_records)
for domain in domains_to_check)
ret = pool.starmap(run_domain_checks_on_domain, args, chunksize=1)
ret = dict(ret) # (domain, output) => { domain: output }
for domain in sort_domains(ret, env):
ret[domain].playback(output)
def run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains, domains_with_a_records):
output = BufferedOutput()
# we'd move this up, but this returns non-pickleable values
ssl_certificates = get_ssl_certificates(env)
# The domain is IDNA-encoded in the database, but for display use Unicode.
try:
domain_display = idna.decode(domain.encode('ascii'))
output.add_heading(domain_display)
except (ValueError, UnicodeError, idna.IDNAError) as e:
# Looks like we have some invalid data in our database.
output.add_heading(domain)
output.print_error("Domain name is invalid: " + str(e))
if domain == env["PRIMARY_HOSTNAME"]:
check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles)
if domain in dns_domains:
check_dns_zone(domain, env, output, dns_zonefiles)
if domain in mail_domains:
check_mail_domain(domain, env, output)
if domain in web_domains:
check_web_domain(domain, rounded_time, ssl_certificates, env, output)
if domain in dns_domains:
check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records)
return (domain, output)
def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
# If a DS record is set on the zone containing this domain, check DNSSEC now.
has_dnssec = False
for zone in dns_domains:
if zone == domain or domain.endswith("." + zone):
if query_dns(zone, "DS", nxdomain=None) is not None:
has_dnssec = True
check_dnssec(zone, env, output, dns_zonefiles, is_checking_primary=True)
ip = query_dns(domain, "A")
ns_ips = query_dns("ns1." + domain, "A") + '/' + query_dns("ns2." + domain, "A")
my_ips = env['PUBLIC_IP'] + ((" / "+env['PUBLIC_IPV6']) if env.get("PUBLIC_IPV6") else "")
# Check that the ns1/ns2 hostnames resolve to A records. This information probably
# comes from the TLD since the information is set at the registrar as glue records.
# We're probably not actually checking that here but instead checking that we, as
# the nameserver, are reporting the right info --- but if the glue is incorrect this
# will probably fail.
if ns_ips == env['PUBLIC_IP'] + '/' + env['PUBLIC_IP']:
output.print_ok("Nameserver glue records are correct at registrar. [ns1/ns2.%s โฆ %s]" % (env['PRIMARY_HOSTNAME'], env['PUBLIC_IP']))
elif ip == env['PUBLIC_IP']:
# The NS records are not what we expect, but the domain resolves correctly, so
# the user may have set up external DNS. List this discrepancy as a warning.
output.print_warning("""Nameserver glue records (ns1.%s and ns2.%s) should be configured at your domain name
registrar as having the IP address of this box (%s). They currently report addresses of %s. If you have set up External DNS, this may be OK."""
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
else:
output.print_error("""Nameserver glue records are incorrect. The ns1.%s and ns2.%s nameservers must be configured at your domain name
registrar as having the IP address %s. They currently report addresses of %s. It may take several hours for
public DNS to update after a change."""
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
# Check that PRIMARY_HOSTNAME resolves to PUBLIC_IP[V6] in public DNS.
ipv6 = query_dns(domain, "AAAA") if env.get("PUBLIC_IPV6") else None
if ip == env['PUBLIC_IP'] and ipv6 in (None, env['PUBLIC_IPV6']):
output.print_ok("Domain resolves to box's IP address. [%s โฆ %s]" % (env['PRIMARY_HOSTNAME'], my_ips))
else:
output.print_error("""This domain must resolve to your box's IP address (%s) in public DNS but it currently resolves
to %s. It may take several hours for public DNS to update after a change. This problem may result from other
issues listed above."""
% (my_ips, ip + ((" / " + ipv6) if ipv6 is not None else "")))
# Check reverse DNS matches the PRIMARY_HOSTNAME. Note that it might not be
# a DNS zone if it is a subdomain of another domain we have a zone for.
existing_rdns_v4 = query_dns(dns.reversename.from_address(env['PUBLIC_IP']), "PTR")
existing_rdns_v6 = query_dns(dns.reversename.from_address(env['PUBLIC_IPV6']), "PTR") if env.get("PUBLIC_IPV6") else None
if existing_rdns_v4 == domain and existing_rdns_v6 in (None, domain):
output.print_ok("Reverse DNS is set correctly at ISP. [%s โฆ %s]" % (my_ips, env['PRIMARY_HOSTNAME']))
elif existing_rdns_v4 == existing_rdns_v6 or existing_rdns_v6 is None:
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box.""" % (existing_rdns_v4, domain) )
else:
output.print_error("""Your box's reverse DNS is currently %s (IPv4) and %s (IPv6), but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box.""" % (existing_rdns_v4, existing_rdns_v6, domain) )
# Check the TLSA record.
tlsa_qname = "_25._tcp." + domain
tlsa25 = query_dns(tlsa_qname, "TLSA", nxdomain=None)
tlsa25_expected = build_tlsa_record(env)
if tlsa25 == tlsa25_expected:
output.print_ok("""The DANE TLSA record for incoming mail is correct (%s).""" % tlsa_qname,)
elif tlsa25 is None:
if has_dnssec:
# Omit a warning about it not being set if DNSSEC isn't enabled,
# since TLSA shouldn't be used without DNSSEC.
output.print_warning("""The DANE TLSA record for incoming mail is not set. This is optional.""")
else:
output.print_error("""The DANE TLSA record for incoming mail (%s) is not correct. It is '%s' but it should be '%s'.
It may take several hours for public DNS to update after a change."""
% (tlsa_qname, tlsa25, tlsa25_expected))
# Check that the hostmaster@ email address exists.
check_alias_exists("Hostmaster contact address", "hostmaster@" + domain, env, output)
def check_alias_exists(alias_name, alias, env, output):
mail_aliases = dict([(address, receivers) for address, receivers, *_ in get_mail_aliases(env)])
if alias in mail_aliases:
if mail_aliases[alias]:
output.print_ok("%s exists as a mail alias. [%s โฆ %s]" % (alias_name, alias, mail_aliases[alias]))
else:
output.print_error("""You must set the destination of the mail alias for %s to direct email to you or another administrator.""" % alias)
else:
output.print_error("""You must add a mail alias for %s which directs email to you or another administrator.""" % alias)
def check_dns_zone(domain, env, output, dns_zonefiles):
# If a DS record is set at the registrar, check DNSSEC first because it will affect the NS query.
# If it is not set, we suggest it last.
if query_dns(domain, "DS", nxdomain=None) is not None:
check_dnssec(domain, env, output, dns_zonefiles)
# We provide a DNS zone for the domain. It should have NS records set up
# at the domain name's registrar pointing to this box. The secondary DNS
# server may be customized.
# (I'm not sure whether this necessarily tests the TLD's configuration,
# as it should, or if one successful NS line at the TLD will result in
# this query being answered by the box, which would mean the test is only
# half working.)
custom_dns_records = list(get_custom_dns_config(env)) # generator => list so we can reuse it
correct_ip = get_custom_dns_record(custom_dns_records, domain, "A") or env['PUBLIC_IP']
custom_secondary_ns = get_secondary_dns(custom_dns_records, mode="NS")
secondary_ns = custom_secondary_ns or ["ns2." + env['PRIMARY_HOSTNAME']]
existing_ns = query_dns(domain, "NS")
correct_ns = "; ".join(sorted(["ns1." + env['PRIMARY_HOSTNAME']] + secondary_ns))
ip = query_dns(domain, "A")
probably_external_dns = False
if existing_ns.lower() == correct_ns.lower():
output.print_ok("Nameservers are set correctly at registrar. [%s]" % correct_ns)
elif ip == correct_ip:
# The domain resolves correctly, so maybe the user is using External DNS.
output.print_warning("""The nameservers set on this domain at your domain name registrar should be %s. They are currently %s.
If you are using External DNS, this may be OK."""
% (correct_ns, existing_ns) )
probably_external_dns = True
else:
output.print_error("""The nameservers set on this domain are incorrect. They are currently %s. Use your domain name registrar's
control panel to set the nameservers to %s."""
% (existing_ns, correct_ns) )
# Check that each custom secondary nameserver resolves the IP address.
if custom_secondary_ns and not probably_external_dns:
for ns in custom_secondary_ns:
# We must first resolve the nameserver to an IP address so we can query it.
ns_ip = query_dns(ns, "A")
if not ns_ip:
output.print_error("Secondary nameserver %s is not valid (it doesn't resolve to an IP address)." % ns)
continue
# Now query it to see what it says about this domain.
ip = query_dns(domain, "A", at=ns_ip, nxdomain=None)
if ip == correct_ip:
output.print_ok("Secondary nameserver %s resolved the domain correctly." % ns)
elif ip is None:
output.print_error("Secondary nameserver %s is not configured to resolve this domain." % ns)
else:
output.print_error("Secondary nameserver %s is not configured correctly. (It resolved this domain as %s. It should be %s.)" % (ns, ip, correct_ip))
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records):
# Warn if a custom DNS record is preventing this or the automatic www redirect from
# being served.
if domain in domains_with_a_records:
output.print_warning("""Web has been disabled for this domain because you have set a custom DNS record.""")
if "www." + domain in domains_with_a_records:
output.print_warning("""A redirect from 'www.%s' has been disabled for this domain because you have set a custom DNS record on the www subdomain.""" % domain)
# Since DNSSEC is optional, if a DS record is NOT set at the registrar suggest it.
# (If it was set, we did the check earlier.)
if query_dns(domain, "DS", nxdomain=None) is None:
check_dnssec(domain, env, output, dns_zonefiles)
def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
# See if the domain has a DS record set at the registrar. The DS record may have
# several forms. We have to be prepared to check for any valid record. We've
# pre-generated all of the valid digests --- read them in.
ds_file = '/etc/nsd/zones/' + dns_zonefiles[domain] + '.ds'
if not os.path.exists(ds_file): return # Domain is in our database but DNS has not yet been updated.
ds_correct = open(ds_file).read().strip().split("\n")
digests = { }
for rr_ds in ds_correct:
ds_keytag, ds_alg, ds_digalg, ds_digest = rr_ds.split("\t")[4].split(" ")
digests[ds_digalg] = ds_digest
# Some registrars may want the public key so they can compute the digest. The DS
# record that we suggest using is for the KSK (and that's how the DS records were generated).
alg_name_map = { '7': 'RSASHA1-NSEC3-SHA1', '8': 'RSASHA256' }
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
dnsssec_pubkey = open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key')).read().split("\t")[3].split(" ")[3]
# Query public DNS for the DS record at the registrar.
ds = query_dns(domain, "DS", nxdomain=None)
ds_looks_valid = ds and len(ds.split(" ")) == 4
if ds_looks_valid: ds = ds.split(" ")
if ds_looks_valid and ds[0] == ds_keytag and ds[1] == ds_alg and ds[3] == digests.get(ds[2]):
if is_checking_primary: return
output.print_ok("DNSSEC 'DS' record is set correctly at registrar.")
else:
if ds == None:
if is_checking_primary: return
output.print_warning("""This domain's DNSSEC DS record is not set. The DS record is optional. The DS record activates DNSSEC.
To set a DS record, you must follow the instructions provided by your domain name registrar and provide to them this information:""")
else:
if is_checking_primary:
output.print_error("""The DNSSEC 'DS' record for %s is incorrect. See further details below.""" % domain)
return
output.print_error("""This domain's DNSSEC DS record is incorrect. The chain of trust is broken between the public DNS system
and this machine's DNS server. It may take several hours for public DNS to update after a change. If you did not recently
make a change, you must resolve this immediately by following the instructions provided by your domain name registrar and
provide to them this information:""")
output.print_line("")
output.print_line("Key Tag: " + ds_keytag + ("" if not ds_looks_valid or ds[0] == ds_keytag else " (Got '%s')" % ds[0]))
output.print_line("Key Flags: KSK")
output.print_line(
("Algorithm: %s / %s" % (ds_alg, alg_name_map[ds_alg]))
+ ("" if not ds_looks_valid or ds[1] == ds_alg else " (Got '%s')" % ds[1]))
# see http://www.iana.org/assignments/dns-sec-alg-numbers/dns-sec-alg-numbers.xhtml
output.print_line("Digest Type: 2 / SHA-256")
# http://www.ietf.org/assignments/ds-rr-types/ds-rr-types.xml
output.print_line("Digest: " + digests['2'])
if ds_looks_valid and ds[3] != digests.get(ds[2]):
output.print_line("(Got digest type %s and digest %s which do not match.)" % (ds[2], ds[3]))
output.print_line("Public Key: ")
output.print_line(dnsssec_pubkey, monospace=True)
output.print_line("")
output.print_line("Bulk/Record Format:")
output.print_line("" + ds_correct[0])
output.print_line("")
def check_mail_domain(domain, env, output):
# Check the MX record.
recommended_mx = "10 " + env['PRIMARY_HOSTNAME']
mx = query_dns(domain, "MX", nxdomain=None)
if mx is None:
mxhost = None
else:
# query_dns returns a semicolon-delimited list
# of priority-host pairs.
mxhost = mx.split('; ')[0].split(' ')[1]
if mxhost == None:
# A missing MX record is okay on the primary hostname because
# the primary hostname's A record (the MX fallback) is... itself,
# which is what we want the MX to be.
if domain == env['PRIMARY_HOSTNAME']:
output.print_ok("Domain's email is directed to this domain. [%s has no MX record, which is ok]" % (domain,))
# And a missing MX record is okay on other domains if the A record
# matches the A record of the PRIMARY_HOSTNAME. Actually this will
# probably confuse DANE TLSA, but we'll let that slide for now.
else:
domain_a = query_dns(domain, "A", nxdomain=None)
primary_a = query_dns(env['PRIMARY_HOSTNAME'], "A", nxdomain=None)
if domain_a != None and domain_a == primary_a:
output.print_ok("Domain's email is directed to this domain. [%s has no MX record but its A record is OK]" % (domain,))
else:
output.print_error("""This domain's DNS MX record is not set. It should be '%s'. Mail will not
be delivered to this box. It may take several hours for public DNS to update after a
change. This problem may result from other issues listed here.""" % (recommended_mx,))
elif mxhost == env['PRIMARY_HOSTNAME']:
good_news = "Domain's email is directed to this domain. [%s โฆ %s]" % (domain, mx)
if mx != recommended_mx:
good_news += " This configuration is non-standard. The recommended configuration is '%s'." % (recommended_mx,)
output.print_ok(good_news)
else:
output.print_error("""This domain's DNS MX record is incorrect. It is currently set to '%s' but should be '%s'. Mail will not
be delivered to this box. It may take several hours for public DNS to update after a change. This problem may result from
other issues listed here.""" % (mx, recommended_mx))
# Check that the postmaster@ email address exists. Not required if the domain has a
# catch-all address or domain alias.
if "@" + domain not in [address for address, *_ in get_mail_aliases(env)]:
check_alias_exists("Postmaster contact address", "postmaster@" + domain, env, output)
# Stop if the domain is listed in the Spamhaus Domain Block List.
# The user might have chosen a domain that was previously in use by a spammer
# and will not be able to reliably send mail.
dbl = query_dns(domain+'.dbl.spamhaus.org', "A", nxdomain=None)
if dbl is None:
output.print_ok("Domain is not blacklisted by dbl.spamhaus.org.")
else:
output.print_error("""This domain is listed in the Spamhaus Domain Block List (code %s),
which may prevent recipients from receiving your mail.
See http://www.spamhaus.org/dbl/ and http://www.spamhaus.org/query/domain/%s.""" % (dbl, domain))
def check_web_domain(domain, rounded_time, ssl_certificates, env, output):
# See if the domain's A record resolves to our PUBLIC_IP. This is already checked
# for PRIMARY_HOSTNAME, for which it is required for mail specifically. For it and
# other domains, it is required to access its website.
if domain != env['PRIMARY_HOSTNAME']:
ok_values = []
for (rtype, expected) in (("A", env['PUBLIC_IP']), ("AAAA", env.get('PUBLIC_IPV6'))):
if not expected: continue # IPv6 is not configured
value = query_dns(domain, rtype)
if value == expected:
ok_values.append(value)
else:
output.print_error("""This domain should resolve to your box's IP address (%s %s) if you would like the box to serve
webmail or a website on this domain. The domain currently resolves to %s in public DNS. It may take several hours for
public DNS to update after a change. This problem may result from other issues listed here.""" % (rtype, expected, value))
return
# If both A and AAAA are correct...
output.print_ok("Domain resolves to this box's IP address. [%s โฆ %s]" % (domain, '; '.join(ok_values)))
# We need a TLS certificate for PRIMARY_HOSTNAME because that's where the
# user will log in with IMAP or webmail. Any other domain we serve a
# website for also needs a signed certificate.
check_ssl_cert(domain, rounded_time, ssl_certificates, env, output)
def query_dns(qname, rtype, nxdomain='[Not Set]', at=None):
# Make the qname absolute by appending a period. Without this, dns.resolver.query
# will fall back a failed lookup to a second query with this machine's hostname
# appended. This has been causing some false-positive Spamhaus reports. The
# reverse DNS lookup will pass a dns.name.Name instance which is already
# absolute so we should not modify that.
if isinstance(qname, str):
qname += "."
# Use the default nameservers (as defined by the system, which is our locally
# running bind server), or if the 'at' argument is specified, use that host
# as the nameserver.
resolver = dns.resolver.get_default_resolver()
if at:
resolver = dns.resolver.Resolver()
resolver.nameservers = [at]
# Set a timeout so that a non-responsive server doesn't hold us back.
resolver.timeout = 5
# Do the query.
try:
response = resolver.query(qname, rtype)
except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):<๏ฝfimโhole๏ฝ> except dns.exception.Timeout:
return "[timeout]"
# There may be multiple answers; concatenate the response. Remove trailing
# periods from responses since that's how qnames are encoded in DNS but is
# confusing for us. The order of the answers doesn't matter, so sort so we
# can compare to a well known order.
return "; ".join(sorted(str(r).rstrip('.') for r in response))
def check_ssl_cert(domain, rounded_time, ssl_certificates, env, output):
# Check that TLS certificate is signed.
# Skip the check if the A record is not pointed here.
if query_dns(domain, "A", None) not in (env['PUBLIC_IP'], None): return
# Where is the certificate file stored?
tls_cert = get_domain_ssl_files(domain, ssl_certificates, env, allow_missing_cert=True)
if tls_cert is None:
output.print_warning("""No TLS (SSL) certificate is installed for this domain. Visitors to a website on
this domain will get a security warning. If you are not serving a website on this domain, you do
not need to take any action. Use the TLS Certificates page in the control panel to install a
TLS certificate.""")
return
# Check that the certificate is good.
cert_status, cert_status_details = check_certificate(domain, tls_cert["certificate"], tls_cert["private-key"], rounded_time=rounded_time)
if cert_status == "OK":
# The certificate is ok. The details has expiry info.
output.print_ok("TLS (SSL) certificate is signed & valid. " + cert_status_details)
elif cert_status == "SELF-SIGNED":
# Offer instructions for purchasing a signed certificate.
if domain == env['PRIMARY_HOSTNAME']:
output.print_error("""The TLS (SSL) certificate for this domain is currently self-signed. You will get a security
warning when you check or send email and when visiting this domain in a web browser (for webmail or
static site hosting).""")
else:
output.print_error("""The TLS (SSL) certificate for this domain is self-signed.""")
else:
output.print_error("The TLS (SSL) certificate has a problem: " + cert_status)
if cert_status_details:
output.print_line("")
output.print_line(cert_status_details)
output.print_line("")
_apt_updates = None
def list_apt_updates(apt_update=True):
# See if we have this information cached recently.
# Keep the information for 8 hours.
global _apt_updates
if _apt_updates is not None and _apt_updates[0] > datetime.datetime.now() - datetime.timedelta(hours=8):
return _apt_updates[1]
# Run apt-get update to refresh package list. This should be running daily
# anyway, so on the status checks page don't do this because it is slow.
if apt_update:
shell("check_call", ["/usr/bin/apt-get", "-qq", "update"])
# Run apt-get upgrade in simulate mode to get a list of what
# it would do.
simulated_install = shell("check_output", ["/usr/bin/apt-get", "-qq", "-s", "upgrade"])
pkgs = []
for line in simulated_install.split('\n'):
if line.strip() == "":
continue
if re.match(r'^Conf .*', line):
# remove these lines, not informative
continue
m = re.match(r'^Inst (.*) \[(.*)\] \((\S*)', line)
if m:
pkgs.append({ "package": m.group(1), "version": m.group(3), "current_version": m.group(2) })
else:
pkgs.append({ "package": "[" + line + "]", "version": "", "current_version": "" })
# Cache for future requests.
_apt_updates = (datetime.datetime.now(), pkgs)
return pkgs
def what_version_is_this(env):
# This function runs `git describe --abbrev=0` on the Mail-in-a-Box installation directory.
# Git may not be installed and Mail-in-a-Box may not have been cloned from github,
# so this function may raise all sorts of exceptions.
miab_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
tag = shell("check_output", ["/usr/bin/git", "describe", "--abbrev=0"], env={"GIT_DIR": os.path.join(miab_dir, '.git')}).strip()
return tag
def get_latest_miab_version():
# This pings https://mailinabox.email/setup.sh and extracts the tag named in
# the script to determine the current product version.
import urllib.request
return re.search(b'TAG=(.*)', urllib.request.urlopen("https://mailinabox.email/setup.sh?ping=1").read()).group(1).decode("utf8")
def check_miab_version(env, output):
config = load_settings(env)
if config.get("privacy", True):
output.print_warning("Mail-in-a-Box version check disabled by privacy setting.")
else:
try:
this_ver = what_version_is_this(env)
except:
this_ver = "Unknown"
latest_ver = get_latest_miab_version()
if this_ver == latest_ver:
output.print_ok("Mail-in-a-Box is up to date. You are running version %s." % this_ver)
else:
output.print_error("A new version of Mail-in-a-Box is available. You are running version %s. The latest version is %s. For upgrade instructions, see https://mailinabox.email. "
% (this_ver, latest_ver))
def run_and_output_changes(env, pool):
import json
from difflib import SequenceMatcher
out = ConsoleOutput()
# Run status checks.
cur = BufferedOutput()
run_checks(True, env, cur, pool)
# Load previously saved status checks.
cache_fn = "/var/cache/mailinabox/status_checks.json"
if os.path.exists(cache_fn):
prev = json.load(open(cache_fn))
# Group the serial output into categories by the headings.
def group_by_heading(lines):
from collections import OrderedDict
ret = OrderedDict()
k = []
ret["No Category"] = k
for line_type, line_args, line_kwargs in lines:
if line_type == "add_heading":
k = []
ret[line_args[0]] = k
else:
k.append((line_type, line_args, line_kwargs))
return ret
prev_status = group_by_heading(prev)
cur_status = group_by_heading(cur.buf)
# Compare the previous to the current status checks
# category by category.
for category, cur_lines in cur_status.items():
if category not in prev_status:
out.add_heading(category + " -- Added")
BufferedOutput(with_lines=cur_lines).playback(out)
else:
# Actual comparison starts here...
prev_lines = prev_status[category]
def stringify(lines):
return [json.dumps(line) for line in lines]
diff = SequenceMatcher(None, stringify(prev_lines), stringify(cur_lines)).get_opcodes()
for op, i1, i2, j1, j2 in diff:
if op == "replace":
out.add_heading(category + " -- Previously:")
elif op == "delete":
out.add_heading(category + " -- Removed")
if op in ("replace", "delete"):
BufferedOutput(with_lines=prev_lines[i1:i2]).playback(out)
if op == "replace":
out.add_heading(category + " -- Currently:")
elif op == "insert":
out.add_heading(category + " -- Added")
if op in ("replace", "insert"):
BufferedOutput(with_lines=cur_lines[j1:j2]).playback(out)
for category, prev_lines in prev_status.items():
if category not in cur_status:
out.add_heading(category)
out.print_warning("This section was removed.")
# Store the current status checks output for next time.
os.makedirs(os.path.dirname(cache_fn), exist_ok=True)
with open(cache_fn, "w") as f:
json.dump(cur.buf, f, indent=True)
class FileOutput:
def __init__(self, buf, width):
self.buf = buf
self.width = width
def add_heading(self, heading):
print(file=self.buf)
print(heading, file=self.buf)
print("=" * len(heading), file=self.buf)
def print_ok(self, message):
self.print_block(message, first_line="โ ")
def print_error(self, message):
self.print_block(message, first_line="โ ")
def print_warning(self, message):
self.print_block(message, first_line="? ")
def print_block(self, message, first_line=" "):
print(first_line, end='', file=self.buf)
message = re.sub("\n\s*", " ", message)
words = re.split("(\s+)", message)
linelen = 0
for w in words:
if self.width and (linelen + len(w) > self.width-1-len(first_line)):
print(file=self.buf)
print(" ", end="", file=self.buf)
linelen = 0
if linelen == 0 and w.strip() == "": continue
print(w, end="", file=self.buf)
linelen += len(w)
print(file=self.buf)
def print_line(self, message, monospace=False):
for line in message.split("\n"):
self.print_block(line)
class ConsoleOutput(FileOutput):
def __init__(self):
self.buf = sys.stdout
# Do nice line-wrapping according to the size of the terminal.
# The 'stty' program queries standard input for terminal information.
if sys.stdin.isatty():
try:
self.width = int(shell('check_output', ['stty', 'size']).split()[1])
except:
self.width = 76
else:
# However if standard input is not a terminal, we would get
# "stty: standard input: Inappropriate ioctl for device". So
# we test with sys.stdin.isatty first, and if it is not a
# terminal don't do any line wrapping. When this script is
# run from cron, or if stdin has been redirected, this happens.
self.width = None
class BufferedOutput:
# Record all of the instance method calls so we can play them back later.
def __init__(self, with_lines=None):
self.buf = [] if not with_lines else with_lines
def __getattr__(self, attr):
if attr not in ("add_heading", "print_ok", "print_error", "print_warning", "print_block", "print_line"):
raise AttributeError
# Return a function that just records the call & arguments to our buffer.
def w(*args, **kwargs):
self.buf.append((attr, args, kwargs))
return w
def playback(self, output):
for attr, args, kwargs in self.buf:
getattr(output, attr)(*args, **kwargs)
if __name__ == "__main__":
from utils import load_environment
env = load_environment()
pool = multiprocessing.pool.Pool(processes=10)
if len(sys.argv) == 1:
run_checks(False, env, ConsoleOutput(), pool)
elif sys.argv[1] == "--show-changes":
run_and_output_changes(env, pool)
elif sys.argv[1] == "--check-primary-hostname":
# See if the primary hostname appears resolvable and has a signed certificate.
domain = env['PRIMARY_HOSTNAME']
if query_dns(domain, "A") != env['PUBLIC_IP']:
sys.exit(1)
ssl_certificates = get_ssl_certificates(env)
tls_cert = get_domain_ssl_files(domain, ssl_certificates, env)
if not os.path.exists(tls_cert["certificate"]):
sys.exit(1)
cert_status, cert_status_details = check_certificate(domain, tls_cert["certificate"], tls_cert["private-key"], warn_if_expiring_soon=False)
if cert_status != "OK":
sys.exit(1)
sys.exit(0)
elif sys.argv[1] == "--version":
print(what_version_is_this(env))<๏ฝfimโend๏ฝ> | # Host did not have an answer for this query; not sure what the
# difference is between the two exceptions.
return nxdomain |
<|file_name|>testbinding.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// check-tidy: no specs after this line
use dom::bindings::codegen::Bindings::EventListenerBinding::EventListener;
use dom::bindings::codegen::Bindings::FunctionBinding::Function;
use dom::bindings::codegen::Bindings::TestBindingBinding;
use dom::bindings::codegen::Bindings::TestBindingBinding::{TestBindingMethods, TestDictionary};
use dom::bindings::codegen::Bindings::TestBindingBinding::{TestDictionaryDefaults, TestEnum};
use dom::bindings::codegen::UnionTypes::{BlobOrBoolean, BlobOrBlobSequence};
use dom::bindings::codegen::UnionTypes::{BlobOrString, BlobOrUnsignedLong, EventOrString};
use dom::bindings::codegen::UnionTypes::{EventOrUSVString, HTMLElementOrLong};
use dom::bindings::codegen::UnionTypes::{HTMLElementOrUnsignedLongOrStringOrBoolean, LongSequenceOrBoolean};
use dom::bindings::codegen::UnionTypes::{StringOrLongSequence, StringOrStringSequence, StringSequenceOrUnsignedLong};
use dom::bindings::codegen::UnionTypes::{StringOrUnsignedLong, StringOrBoolean, UnsignedLongOrBoolean};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object};
use dom::bindings::str::{ByteString, DOMString, USVString};
use dom::bindings::weakref::MutableWeakRef;
use dom::blob::{Blob, BlobImpl};
use dom::url::URL;
use js::jsapi::{HandleObject, HandleValue, JSContext, JSObject};
use js::jsval::{JSVal, NullValue};
use std::borrow::ToOwned;
use std::ptr;
use std::rc::Rc;
use util::prefs::get_pref;
#[dom_struct]
pub struct TestBinding {
reflector_: Reflector,
url: MutableWeakRef<URL>,
}
impl TestBinding {
fn new_inherited() -> TestBinding {
TestBinding {
reflector_: Reflector::new(),
url: MutableWeakRef::new(None),
}
}
pub fn new(global: GlobalRef) -> Root<TestBinding> {
reflect_dom_object(box TestBinding::new_inherited(),
global, TestBindingBinding::Wrap)
}
pub fn Constructor(global: GlobalRef) -> Fallible<Root<TestBinding>> {
Ok(TestBinding::new(global))
}
#[allow(unused_variables)]
pub fn Constructor_(global: GlobalRef, nums: Vec<f64>) -> Fallible<Root<TestBinding>> {
Ok(TestBinding::new(global))
}
#[allow(unused_variables)]
pub fn Constructor__(global: GlobalRef, num: f64) -> Fallible<Root<TestBinding>> {
Ok(TestBinding::new(global))
}
}
impl TestBindingMethods for TestBinding {
fn BooleanAttribute(&self) -> bool { false }
fn SetBooleanAttribute(&self, _: bool) {}
fn ByteAttribute(&self) -> i8 { 0 }
fn SetByteAttribute(&self, _: i8) {}
fn OctetAttribute(&self) -> u8 { 0 }
fn SetOctetAttribute(&self, _: u8) {}
fn ShortAttribute(&self) -> i16 { 0 }
fn SetShortAttribute(&self, _: i16) {}
fn UnsignedShortAttribute(&self) -> u16 { 0 }
fn SetUnsignedShortAttribute(&self, _: u16) {}
fn LongAttribute(&self) -> i32 { 0 }
fn SetLongAttribute(&self, _: i32) {}
fn UnsignedLongAttribute(&self) -> u32 { 0 }
fn SetUnsignedLongAttribute(&self, _: u32) {}
fn LongLongAttribute(&self) -> i64 { 0 }
fn SetLongLongAttribute(&self, _: i64) {}
fn UnsignedLongLongAttribute(&self) -> u64 { 0 }
fn SetUnsignedLongLongAttribute(&self, _: u64) {}
fn UnrestrictedFloatAttribute(&self) -> f32 { 0. }
fn SetUnrestrictedFloatAttribute(&self, _: f32) {}
fn FloatAttribute(&self) -> Finite<f32> { Finite::wrap(0.) }
fn SetFloatAttribute(&self, _: Finite<f32>) {}
fn UnrestrictedDoubleAttribute(&self) -> f64 { 0. }
fn SetUnrestrictedDoubleAttribute(&self, _: f64) {}
fn DoubleAttribute(&self) -> Finite<f64> { Finite::wrap(0.) }
fn SetDoubleAttribute(&self, _: Finite<f64>) {}
fn StringAttribute(&self) -> DOMString { DOMString::new() }
fn SetStringAttribute(&self, _: DOMString) {}
fn UsvstringAttribute(&self) -> USVString { USVString("".to_owned()) }
fn SetUsvstringAttribute(&self, _: USVString) {}
fn ByteStringAttribute(&self) -> ByteString { ByteString::new(vec!()) }
fn SetByteStringAttribute(&self, _: ByteString) {}
fn EnumAttribute(&self) -> TestEnum { TestEnum::_empty }
fn SetEnumAttribute(&self, _: TestEnum) {}
fn InterfaceAttribute(&self) -> Root<Blob> {
Blob::new(self.global().r(), BlobImpl::new_from_empty_slice(), "".to_owned())
}
fn SetInterfaceAttribute(&self, _: &Blob) {}
fn UnionAttribute(&self) -> HTMLElementOrLong { HTMLElementOrLong::Long(0) }
fn SetUnionAttribute(&self, _: HTMLElementOrLong) {}
fn Union2Attribute(&self) -> EventOrString { EventOrString::String(DOMString::new()) }
fn SetUnion2Attribute(&self, _: EventOrString) {}
fn Union3Attribute(&self) -> EventOrUSVString {
EventOrUSVString::USVString(USVString("".to_owned()))
}
fn SetUnion3Attribute(&self, _: EventOrUSVString) {}
fn Union4Attribute(&self) -> StringOrUnsignedLong {
StringOrUnsignedLong::UnsignedLong(0u32)
}
fn SetUnion4Attribute(&self, _: StringOrUnsignedLong) {}
fn Union5Attribute(&self) -> StringOrBoolean {
StringOrBoolean::Boolean(true)
}
fn SetUnion5Attribute(&self, _: StringOrBoolean) {}
fn Union6Attribute(&self) -> UnsignedLongOrBoolean {
UnsignedLongOrBoolean::Boolean(true)
}
fn SetUnion6Attribute(&self, _: UnsignedLongOrBoolean) {}
fn Union7Attribute(&self) -> BlobOrBoolean {
BlobOrBoolean::Boolean(true)
}
fn SetUnion7Attribute(&self, _: BlobOrBoolean) {}
fn Union8Attribute(&self) -> BlobOrUnsignedLong {
BlobOrUnsignedLong::UnsignedLong(0u32)
}
fn SetUnion8Attribute(&self, _: BlobOrUnsignedLong) {}
fn ArrayAttribute(&self, _: *mut JSContext) -> *mut JSObject { NullValue().to_object_or_null() }
fn AnyAttribute(&self, _: *mut JSContext) -> JSVal { NullValue() }
fn SetAnyAttribute(&self, _: *mut JSContext, _: HandleValue) {}
fn ObjectAttribute(&self, _: *mut JSContext) -> *mut JSObject { panic!() }
fn SetObjectAttribute(&self, _: *mut JSContext, _: *mut JSObject) {}
fn GetBooleanAttributeNullable(&self) -> Option<bool> { Some(false) }
fn SetBooleanAttributeNullable(&self, _: Option<bool>) {}
fn GetByteAttributeNullable(&self) -> Option<i8> { Some(0) }
fn SetByteAttributeNullable(&self, _: Option<i8>) {}
fn GetOctetAttributeNullable(&self) -> Option<u8> { Some(0) }
fn SetOctetAttributeNullable(&self, _: Option<u8>) {}
fn GetShortAttributeNullable(&self) -> Option<i16> { Some(0) }
fn SetShortAttributeNullable(&self, _: Option<i16>) {}
fn GetUnsignedShortAttributeNullable(&self) -> Option<u16> { Some(0) }
fn SetUnsignedShortAttributeNullable(&self, _: Option<u16>) {}
fn GetLongAttributeNullable(&self) -> Option<i32> { Some(0) }
fn SetLongAttributeNullable(&self, _: Option<i32>) {}
fn GetUnsignedLongAttributeNullable(&self) -> Option<u32> { Some(0) }
fn SetUnsignedLongAttributeNullable(&self, _: Option<u32>) {}
fn GetLongLongAttributeNullable(&self) -> Option<i64> { Some(0) }
fn SetLongLongAttributeNullable(&self, _: Option<i64>) {}
fn GetUnsignedLongLongAttributeNullable(&self) -> Option<u64> { Some(0) }
fn SetUnsignedLongLongAttributeNullable(&self, _: Option<u64>) {}
fn GetUnrestrictedFloatAttributeNullable(&self) -> Option<f32> { Some(0.) }
fn SetUnrestrictedFloatAttributeNullable(&self, _: Option<f32>) {}
fn GetFloatAttributeNullable(&self) -> Option<Finite<f32>> { Some(Finite::wrap(0.)) }
fn SetFloatAttributeNullable(&self, _: Option<Finite<f32>>) {}
fn GetUnrestrictedDoubleAttributeNullable(&self) -> Option<f64> { Some(0.) }
fn SetUnrestrictedDoubleAttributeNullable(&self, _: Option<f64>) {}
fn GetDoubleAttributeNullable(&self) -> Option<Finite<f64>> { Some(Finite::wrap(0.)) }
fn SetDoubleAttributeNullable(&self, _: Option<Finite<f64>>) {}
fn GetByteStringAttributeNullable(&self) -> Option<ByteString> { Some(ByteString::new(vec!())) }
fn SetByteStringAttributeNullable(&self, _: Option<ByteString>) {}
fn GetStringAttributeNullable(&self) -> Option<DOMString> { Some(DOMString::new()) }
fn SetStringAttributeNullable(&self, _: Option<DOMString>) {}
fn GetUsvstringAttributeNullable(&self) -> Option<USVString> { Some(USVString("".to_owned())) }
fn SetUsvstringAttributeNullable(&self, _: Option<USVString>) {}
fn SetBinaryRenamedAttribute(&self, _: DOMString) {}
fn ForwardedAttribute(&self) -> Root<TestBinding> { Root::from_ref(self) }
fn BinaryRenamedAttribute(&self) -> DOMString { DOMString::new() }
fn SetBinaryRenamedAttribute2(&self, _: DOMString) {}
fn BinaryRenamedAttribute2(&self) -> DOMString { DOMString::new() }
fn Attr_to_automatically_rename(&self) -> DOMString { DOMString::new() }
fn SetAttr_to_automatically_rename(&self, _: DOMString) {}
fn GetEnumAttributeNullable(&self) -> Option<TestEnum> { Some(TestEnum::_empty) }
fn GetInterfaceAttributeNullable(&self) -> Option<Root<Blob>> {
Some(Blob::new(self.global().r(), BlobImpl::new_from_empty_slice(), "".to_owned()))
}
fn SetInterfaceAttributeNullable(&self, _: Option<&Blob>) {}
fn GetInterfaceAttributeWeak(&self) -> Option<Root<URL>> {
self.url.root()
}
fn SetInterfaceAttributeWeak(&self, url: Option<&URL>) {
self.url.set(url);
}
fn GetObjectAttributeNullable(&self, _: *mut JSContext) -> *mut JSObject { ptr::null_mut() }
fn SetObjectAttributeNullable(&self, _: *mut JSContext, _: *mut JSObject) {}
fn GetUnionAttributeNullable(&self) -> Option<HTMLElementOrLong> {
Some(HTMLElementOrLong::Long(0))
}
fn SetUnionAttributeNullable(&self, _: Option<HTMLElementOrLong>) {}
fn GetUnion2AttributeNullable(&self) -> Option<EventOrString> {
Some(EventOrString::String(DOMString::new()))
}
fn SetUnion2AttributeNullable(&self, _: Option<EventOrString>) {}
fn GetUnion3AttributeNullable(&self) -> Option<BlobOrBoolean> {
Some(BlobOrBoolean::Boolean(true))
}
fn SetUnion3AttributeNullable(&self, _: Option<BlobOrBoolean>) {}
fn GetUnion4AttributeNullable(&self) -> Option<UnsignedLongOrBoolean> {
Some(UnsignedLongOrBoolean::Boolean(true))
}
fn SetUnion4AttributeNullable(&self, _: Option<UnsignedLongOrBoolean>) {}
fn GetUnion5AttributeNullable(&self) -> Option<StringOrBoolean> {
Some(StringOrBoolean::Boolean(true))
}
fn SetUnion5AttributeNullable(&self, _: Option<StringOrBoolean>) {}
fn BinaryRenamedMethod(&self) -> () {}
fn ReceiveVoid(&self) -> () {}
fn ReceiveBoolean(&self) -> bool { false }
fn ReceiveByte(&self) -> i8 { 0 }
fn ReceiveOctet(&self) -> u8 { 0 }
fn ReceiveShort(&self) -> i16 { 0 }
fn ReceiveUnsignedShort(&self) -> u16 { 0 }
fn ReceiveLong(&self) -> i32 { 0 }
fn ReceiveUnsignedLong(&self) -> u32 { 0 }
fn ReceiveLongLong(&self) -> i64 { 0 }
fn ReceiveUnsignedLongLong(&self) -> u64 { 0 }
fn ReceiveUnrestrictedFloat(&self) -> f32 { 0. }
fn ReceiveFloat(&self) -> Finite<f32> { Finite::wrap(0.) }
fn ReceiveUnrestrictedDouble(&self) -> f64 { 0. }
fn ReceiveDouble(&self) -> Finite<f64> { Finite::wrap(0.) }
fn ReceiveString(&self) -> DOMString { DOMString::new() }
fn ReceiveUsvstring(&self) -> USVString { USVString("".to_owned()) }
fn ReceiveByteString(&self) -> ByteString { ByteString::new(vec!()) }
fn ReceiveEnum(&self) -> TestEnum { TestEnum::_empty }
fn ReceiveInterface(&self) -> Root<Blob> {
Blob::new(self.global().r(), BlobImpl::new_from_empty_slice(), "".to_owned())
}
fn ReceiveAny(&self, _: *mut JSContext) -> JSVal { NullValue() }
fn ReceiveObject(&self, _: *mut JSContext) -> *mut JSObject { panic!() }
fn ReceiveUnion(&self) -> HTMLElementOrLong { HTMLElementOrLong::Long(0) }
fn ReceiveUnion2(&self) -> EventOrString { EventOrString::String(DOMString::new()) }
fn ReceiveUnion3(&self) -> StringOrLongSequence { StringOrLongSequence::LongSequence(vec![]) }
fn ReceiveUnion4(&self) -> StringOrStringSequence { StringOrStringSequence::StringSequence(vec![]) }
fn ReceiveUnion5(&self) -> BlobOrBlobSequence { BlobOrBlobSequence::BlobSequence(vec![]) }
fn ReceiveUnion6(&self) -> StringOrUnsignedLong { StringOrUnsignedLong::String(DOMString::new()) }
fn ReceiveUnion7(&self) -> StringOrBoolean { StringOrBoolean::Boolean(true) }
fn ReceiveUnion8(&self) -> UnsignedLongOrBoolean { UnsignedLongOrBoolean::UnsignedLong(0u32) }
fn ReceiveUnion9(&self) -> HTMLElementOrUnsignedLongOrStringOrBoolean {
HTMLElementOrUnsignedLongOrStringOrBoolean::Boolean(true)
}
fn ReceiveSequence(&self) -> Vec<i32> { vec![1] }
fn ReceiveInterfaceSequence(&self) -> Vec<Root<Blob>> {
vec![Blob::new(self.global().r(), BlobImpl::new_from_empty_slice(), "".to_owned())]
}
fn ReceiveNullableBoolean(&self) -> Option<bool> { Some(false) }
fn ReceiveNullableByte(&self) -> Option<i8> { Some(0) }
fn ReceiveNullableOctet(&self) -> Option<u8> { Some(0) }
fn ReceiveNullableShort(&self) -> Option<i16> { Some(0) }
fn ReceiveNullableUnsignedShort(&self) -> Option<u16> { Some(0) }
fn ReceiveNullableLong(&self) -> Option<i32> { Some(0) }
fn ReceiveNullableUnsignedLong(&self) -> Option<u32> { Some(0) }
fn ReceiveNullableLongLong(&self) -> Option<i64> { Some(0) }
fn ReceiveNullableUnsignedLongLong(&self) -> Option<u64> { Some(0) }
fn ReceiveNullableUnrestrictedFloat(&self) -> Option<f32> { Some(0.) }
fn ReceiveNullableFloat(&self) -> Option<Finite<f32>> { Some(Finite::wrap(0.)) }
fn ReceiveNullableUnrestrictedDouble(&self) -> Option<f64> { Some(0.) }
fn ReceiveNullableDouble(&self) -> Option<Finite<f64>> { Some(Finite::wrap(0.)) }
fn ReceiveNullableString(&self) -> Option<DOMString> { Some(DOMString::new()) }
fn ReceiveNullableUsvstring(&self) -> Option<USVString> { Some(USVString("".to_owned())) }
fn ReceiveNullableByteString(&self) -> Option<ByteString> { Some(ByteString::new(vec!())) }
fn ReceiveNullableEnum(&self) -> Option<TestEnum> { Some(TestEnum::_empty) }
fn ReceiveNullableInterface(&self) -> Option<Root<Blob>> {
Some(Blob::new(self.global().r(), BlobImpl::new_from_empty_slice(), "".to_owned()))
}
fn ReceiveNullableObject(&self, _: *mut JSContext) -> *mut JSObject { ptr::null_mut() }
fn ReceiveNullableUnion(&self) -> Option<HTMLElementOrLong> {
Some(HTMLElementOrLong::Long(0))
}
fn ReceiveNullableUnion2(&self) -> Option<EventOrString> {
Some(EventOrString::String(DOMString::new()))
}
fn ReceiveNullableUnion3(&self) -> Option<StringOrLongSequence> {
Some(StringOrLongSequence::String(DOMString::new()))
}
fn ReceiveNullableUnion4(&self) -> Option<LongSequenceOrBoolean> {
Some(LongSequenceOrBoolean::Boolean(true))
}
fn ReceiveNullableUnion5(&self) -> Option<UnsignedLongOrBoolean> {
Some(UnsignedLongOrBoolean::UnsignedLong(0u32))
}
fn ReceiveNullableSequence(&self) -> Option<Vec<i32>> { Some(vec![1]) }
fn ReceiveTestDictionaryWithSuccessOnKeyword(&self) -> TestDictionary {
TestDictionary {
anyValue: NullValue(),
booleanValue: None,
byteValue: None,
dict: TestDictionaryDefaults {
UnrestrictedDoubleValue: 0.0,
anyValue: NullValue(),
booleanValue: false,
byteValue: 0,
doubleValue: Finite::new(1.0).unwrap(),
enumValue: TestEnum::Foo,
floatValue: Finite::new(1.0).unwrap(),
longLongValue: 54,
longValue: 12,
nullableBooleanValue: None,
nullableByteValue: None,
nullableDoubleValue: None,
nullableFloatValue: None,
nullableLongLongValue: None,
nullableLongValue: None,
nullableObjectValue: ptr::null_mut(),
nullableOctetValue: None,
nullableShortValue: None,
nullableStringValue: None,
nullableUnrestrictedDoubleValue: None,
nullableUnrestrictedFloatValue: None,
nullableUnsignedLongLongValue: None,
nullableUnsignedLongValue: None,
nullableUnsignedShortValue: None,
nullableUsvstringValue: None,
octetValue: 0,
shortValue: 0,
stringValue: DOMString::new(),
unrestrictedFloatValue: 0.0,
unsignedLongLongValue: 0,
unsignedLongValue: 0,
unsignedShortValue: 0,
usvstringValue: USVString("".to_owned()),
},
doubleValue: None,
enumValue: None,
floatValue: None,
interfaceValue: None,
longLongValue: None,
longValue: None,
objectValue: None,
octetValue: None,
requiredValue: true,
seqDict: None,
shortValue: None,
stringValue: None,
type_: Some(DOMString::from("success")),
unrestrictedDoubleValue: None,
unrestrictedFloatValue: None,
unsignedLongLongValue: None,
unsignedLongValue: None,
unsignedShortValue: None,
usvstringValue: None,
nonRequiredNullable: None,
nonRequiredNullable2: Some(None), // null
}
}
fn DictMatchesPassedValues(&self, arg: &TestDictionary) -> bool {
arg.type_.as_ref().map(|s| s == "success").unwrap_or(false) &&
arg.nonRequiredNullable.is_none() &&
arg.nonRequiredNullable2 == Some(None)
}
fn PassBoolean(&self, _: bool) {}
fn PassByte(&self, _: i8) {}
fn PassOctet(&self, _: u8) {}
fn PassShort(&self, _: i16) {}
fn PassUnsignedShort(&self, _: u16) {}
fn PassLong(&self, _: i32) {}
fn PassUnsignedLong(&self, _: u32) {}
fn PassLongLong(&self, _: i64) {}<๏ฝfimโhole๏ฝ> fn PassUnrestrictedFloat(&self, _: f32) {}
fn PassFloat(&self, _: Finite<f32>) {}
fn PassUnrestrictedDouble(&self, _: f64) {}
fn PassDouble(&self, _: Finite<f64>) {}
fn PassString(&self, _: DOMString) {}
fn PassUsvstring(&self, _: USVString) {}
fn PassByteString(&self, _: ByteString) {}
fn PassEnum(&self, _: TestEnum) {}
fn PassInterface(&self, _: &Blob) {}
fn PassUnion(&self, _: HTMLElementOrLong) {}
fn PassUnion2(&self, _: EventOrString) {}
fn PassUnion3(&self, _: BlobOrString) {}
fn PassUnion4(&self, _: StringOrStringSequence) {}
fn PassUnion5(&self, _: StringOrBoolean) {}
fn PassUnion6(&self, _: UnsignedLongOrBoolean) {}
fn PassUnion7(&self, _: StringSequenceOrUnsignedLong) {}
fn PassAny(&self, _: *mut JSContext, _: HandleValue) {}
fn PassObject(&self, _: *mut JSContext, _: *mut JSObject) {}
fn PassCallbackFunction(&self, _: Rc<Function>) {}
fn PassCallbackInterface(&self, _: Rc<EventListener>) {}
fn PassSequence(&self, _: Vec<i32>) {}
fn PassStringSequence(&self, _: Vec<DOMString>) {}
fn PassInterfaceSequence(&self, _: Vec<Root<Blob>>) {}
fn PassNullableBoolean(&self, _: Option<bool>) {}
fn PassNullableByte(&self, _: Option<i8>) {}
fn PassNullableOctet(&self, _: Option<u8>) {}
fn PassNullableShort(&self, _: Option<i16>) {}
fn PassNullableUnsignedShort(&self, _: Option<u16>) {}
fn PassNullableLong(&self, _: Option<i32>) {}
fn PassNullableUnsignedLong(&self, _: Option<u32>) {}
fn PassNullableLongLong(&self, _: Option<i64>) {}
fn PassNullableUnsignedLongLong(&self, _: Option<u64>) {}
fn PassNullableUnrestrictedFloat(&self, _: Option<f32>) {}
fn PassNullableFloat(&self, _: Option<Finite<f32>>) {}
fn PassNullableUnrestrictedDouble(&self, _: Option<f64>) {}
fn PassNullableDouble(&self, _: Option<Finite<f64>>) {}
fn PassNullableString(&self, _: Option<DOMString>) {}
fn PassNullableUsvstring(&self, _: Option<USVString>) {}
fn PassNullableByteString(&self, _: Option<ByteString>) {}
// fn PassNullableEnum(self, _: Option<TestEnum>) {}
fn PassNullableInterface(&self, _: Option<&Blob>) {}
fn PassNullableObject(&self, _: *mut JSContext, _: *mut JSObject) {}
fn PassNullableUnion(&self, _: Option<HTMLElementOrLong>) {}
fn PassNullableUnion2(&self, _: Option<EventOrString>) {}
fn PassNullableUnion3(&self, _: Option<StringOrLongSequence>) {}
fn PassNullableUnion4(&self, _: Option<LongSequenceOrBoolean>) {}
fn PassNullableUnion5(&self, _: Option<UnsignedLongOrBoolean>) {}
fn PassNullableCallbackFunction(&self, _: Option<Rc<Function>>) {}
fn PassNullableCallbackInterface(&self, _: Option<Rc<EventListener>>) {}
fn PassNullableSequence(&self, _: Option<Vec<i32>>) {}
fn PassOptionalBoolean(&self, _: Option<bool>) {}
fn PassOptionalByte(&self, _: Option<i8>) {}
fn PassOptionalOctet(&self, _: Option<u8>) {}
fn PassOptionalShort(&self, _: Option<i16>) {}
fn PassOptionalUnsignedShort(&self, _: Option<u16>) {}
fn PassOptionalLong(&self, _: Option<i32>) {}
fn PassOptionalUnsignedLong(&self, _: Option<u32>) {}
fn PassOptionalLongLong(&self, _: Option<i64>) {}
fn PassOptionalUnsignedLongLong(&self, _: Option<u64>) {}
fn PassOptionalUnrestrictedFloat(&self, _: Option<f32>) {}
fn PassOptionalFloat(&self, _: Option<Finite<f32>>) {}
fn PassOptionalUnrestrictedDouble(&self, _: Option<f64>) {}
fn PassOptionalDouble(&self, _: Option<Finite<f64>>) {}
fn PassOptionalString(&self, _: Option<DOMString>) {}
fn PassOptionalUsvstring(&self, _: Option<USVString>) {}
fn PassOptionalByteString(&self, _: Option<ByteString>) {}
fn PassOptionalEnum(&self, _: Option<TestEnum>) {}
fn PassOptionalInterface(&self, _: Option<&Blob>) {}
fn PassOptionalUnion(&self, _: Option<HTMLElementOrLong>) {}
fn PassOptionalUnion2(&self, _: Option<EventOrString>) {}
fn PassOptionalUnion3(&self, _: Option<StringOrLongSequence>) {}
fn PassOptionalUnion4(&self, _: Option<LongSequenceOrBoolean>) {}
fn PassOptionalUnion5(&self, _: Option<UnsignedLongOrBoolean>) {}
fn PassOptionalAny(&self, _: *mut JSContext, _: HandleValue) {}
fn PassOptionalObject(&self, _: *mut JSContext, _: Option<*mut JSObject>) {}
fn PassOptionalCallbackFunction(&self, _: Option<Rc<Function>>) {}
fn PassOptionalCallbackInterface(&self, _: Option<Rc<EventListener>>) {}
fn PassOptionalSequence(&self, _: Option<Vec<i32>>) {}
fn PassOptionalNullableBoolean(&self, _: Option<Option<bool>>) {}
fn PassOptionalNullableByte(&self, _: Option<Option<i8>>) {}
fn PassOptionalNullableOctet(&self, _: Option<Option<u8>>) {}
fn PassOptionalNullableShort(&self, _: Option<Option<i16>>) {}
fn PassOptionalNullableUnsignedShort(&self, _: Option<Option<u16>>) {}
fn PassOptionalNullableLong(&self, _: Option<Option<i32>>) {}
fn PassOptionalNullableUnsignedLong(&self, _: Option<Option<u32>>) {}
fn PassOptionalNullableLongLong(&self, _: Option<Option<i64>>) {}
fn PassOptionalNullableUnsignedLongLong(&self, _: Option<Option<u64>>) {}
fn PassOptionalNullableUnrestrictedFloat(&self, _: Option<Option<f32>>) {}
fn PassOptionalNullableFloat(&self, _: Option<Option<Finite<f32>>>) {}
fn PassOptionalNullableUnrestrictedDouble(&self, _: Option<Option<f64>>) {}
fn PassOptionalNullableDouble(&self, _: Option<Option<Finite<f64>>>) {}
fn PassOptionalNullableString(&self, _: Option<Option<DOMString>>) {}
fn PassOptionalNullableUsvstring(&self, _: Option<Option<USVString>>) {}
fn PassOptionalNullableByteString(&self, _: Option<Option<ByteString>>) {}
// fn PassOptionalNullableEnum(self, _: Option<Option<TestEnum>>) {}
fn PassOptionalNullableInterface(&self, _: Option<Option<&Blob>>) {}
fn PassOptionalNullableObject(&self, _: *mut JSContext, _: Option<*mut JSObject>) {}
fn PassOptionalNullableUnion(&self, _: Option<Option<HTMLElementOrLong>>) {}
fn PassOptionalNullableUnion2(&self, _: Option<Option<EventOrString>>) {}
fn PassOptionalNullableUnion3(&self, _: Option<Option<StringOrLongSequence>>) {}
fn PassOptionalNullableUnion4(&self, _: Option<Option<LongSequenceOrBoolean>>) {}
fn PassOptionalNullableUnion5(&self, _: Option<Option<UnsignedLongOrBoolean>>) {}
fn PassOptionalNullableCallbackFunction(&self, _: Option<Option<Rc<Function>>>) {}
fn PassOptionalNullableCallbackInterface(&self, _: Option<Option<Rc<EventListener>>>) {}
fn PassOptionalNullableSequence(&self, _: Option<Option<Vec<i32>>>) {}
fn PassOptionalBooleanWithDefault(&self, _: bool) {}
fn PassOptionalByteWithDefault(&self, _: i8) {}
fn PassOptionalOctetWithDefault(&self, _: u8) {}
fn PassOptionalShortWithDefault(&self, _: i16) {}
fn PassOptionalUnsignedShortWithDefault(&self, _: u16) {}
fn PassOptionalLongWithDefault(&self, _: i32) {}
fn PassOptionalUnsignedLongWithDefault(&self, _: u32) {}
fn PassOptionalLongLongWithDefault(&self, _: i64) {}
fn PassOptionalUnsignedLongLongWithDefault(&self, _: u64) {}
fn PassOptionalStringWithDefault(&self, _: DOMString) {}
fn PassOptionalUsvstringWithDefault(&self, _: USVString) {}
fn PassOptionalEnumWithDefault(&self, _: TestEnum) {}
fn PassOptionalNullableBooleanWithDefault(&self, _: Option<bool>) {}
fn PassOptionalNullableByteWithDefault(&self, _: Option<i8>) {}
fn PassOptionalNullableOctetWithDefault(&self, _: Option<u8>) {}
fn PassOptionalNullableShortWithDefault(&self, _: Option<i16>) {}
fn PassOptionalNullableUnsignedShortWithDefault(&self, _: Option<u16>) {}
fn PassOptionalNullableLongWithDefault(&self, _: Option<i32>) {}
fn PassOptionalNullableUnsignedLongWithDefault(&self, _: Option<u32>) {}
fn PassOptionalNullableLongLongWithDefault(&self, _: Option<i64>) {}
fn PassOptionalNullableUnsignedLongLongWithDefault(&self, _: Option<u64>) {}
// fn PassOptionalNullableUnrestrictedFloatWithDefault(self, _: Option<f32>) {}
// fn PassOptionalNullableFloatWithDefault(self, _: Option<Finite<f32>>) {}
// fn PassOptionalNullableUnrestrictedDoubleWithDefault(self, _: Option<f64>) {}
// fn PassOptionalNullableDoubleWithDefault(self, _: Option<Finite<f64>>) {}
fn PassOptionalNullableStringWithDefault(&self, _: Option<DOMString>) {}
fn PassOptionalNullableUsvstringWithDefault(&self, _: Option<USVString>) {}
fn PassOptionalNullableByteStringWithDefault(&self, _: Option<ByteString>) {}
// fn PassOptionalNullableEnumWithDefault(self, _: Option<TestEnum>) {}
fn PassOptionalNullableInterfaceWithDefault(&self, _: Option<&Blob>) {}
fn PassOptionalNullableObjectWithDefault(&self, _: *mut JSContext, _: *mut JSObject) {}
fn PassOptionalNullableUnionWithDefault(&self, _: Option<HTMLElementOrLong>) {}
fn PassOptionalNullableUnion2WithDefault(&self, _: Option<EventOrString>) {}
// fn PassOptionalNullableCallbackFunctionWithDefault(self, _: Option<Function>) {}
fn PassOptionalNullableCallbackInterfaceWithDefault(&self, _: Option<Rc<EventListener>>) {}
fn PassOptionalAnyWithDefault(&self, _: *mut JSContext, _: HandleValue) {}
fn PassOptionalNullableBooleanWithNonNullDefault(&self, _: Option<bool>) {}
fn PassOptionalNullableByteWithNonNullDefault(&self, _: Option<i8>) {}
fn PassOptionalNullableOctetWithNonNullDefault(&self, _: Option<u8>) {}
fn PassOptionalNullableShortWithNonNullDefault(&self, _: Option<i16>) {}
fn PassOptionalNullableUnsignedShortWithNonNullDefault(&self, _: Option<u16>) {}
fn PassOptionalNullableLongWithNonNullDefault(&self, _: Option<i32>) {}
fn PassOptionalNullableUnsignedLongWithNonNullDefault(&self, _: Option<u32>) {}
fn PassOptionalNullableLongLongWithNonNullDefault(&self, _: Option<i64>) {}
fn PassOptionalNullableUnsignedLongLongWithNonNullDefault(&self, _: Option<u64>) {}
// fn PassOptionalNullableUnrestrictedFloatWithNonNullDefault(self, _: Option<f32>) {}
// fn PassOptionalNullableFloatWithNonNullDefault(self, _: Option<Finite<f32>>) {}
// fn PassOptionalNullableUnrestrictedDoubleWithNonNullDefault(self, _: Option<f64>) {}
// fn PassOptionalNullableDoubleWithNonNullDefault(self, _: Option<Finite<f64>>) {}
fn PassOptionalNullableStringWithNonNullDefault(&self, _: Option<DOMString>) {}
fn PassOptionalNullableUsvstringWithNonNullDefault(&self, _: Option<USVString>) {}
// fn PassOptionalNullableEnumWithNonNullDefault(self, _: Option<TestEnum>) {}
fn PassVariadicBoolean(&self, _: Vec<bool>) {}
fn PassVariadicBooleanAndDefault(&self, _: bool, _: Vec<bool>) {}
fn PassVariadicByte(&self, _: Vec<i8>) {}
fn PassVariadicOctet(&self, _: Vec<u8>) {}
fn PassVariadicShort(&self, _: Vec<i16>) {}
fn PassVariadicUnsignedShort(&self, _: Vec<u16>) {}
fn PassVariadicLong(&self, _: Vec<i32>) {}
fn PassVariadicUnsignedLong(&self, _: Vec<u32>) {}
fn PassVariadicLongLong(&self, _: Vec<i64>) {}
fn PassVariadicUnsignedLongLong(&self, _: Vec<u64>) {}
fn PassVariadicUnrestrictedFloat(&self, _: Vec<f32>) {}
fn PassVariadicFloat(&self, _: Vec<Finite<f32>>) {}
fn PassVariadicUnrestrictedDouble(&self, _: Vec<f64>) {}
fn PassVariadicDouble(&self, _: Vec<Finite<f64>>) {}
fn PassVariadicString(&self, _: Vec<DOMString>) {}
fn PassVariadicUsvstring(&self, _: Vec<USVString>) {}
fn PassVariadicByteString(&self, _: Vec<ByteString>) {}
fn PassVariadicEnum(&self, _: Vec<TestEnum>) {}
fn PassVariadicInterface(&self, _: &[&Blob]) {}
fn PassVariadicUnion(&self, _: Vec<HTMLElementOrLong>) {}
fn PassVariadicUnion2(&self, _: Vec<EventOrString>) {}
fn PassVariadicUnion3(&self, _: Vec<BlobOrString>) {}
fn PassVariadicUnion4(&self, _: Vec<BlobOrBoolean>) {}
fn PassVariadicUnion5(&self, _: Vec<StringOrUnsignedLong>) {}
fn PassVariadicUnion6(&self, _: Vec<UnsignedLongOrBoolean>) {}
fn PassVariadicAny(&self, _: *mut JSContext, _: Vec<HandleValue>) {}
fn PassVariadicObject(&self, _: *mut JSContext, _: Vec<*mut JSObject>) {}
fn BooleanMozPreference(&self, pref_name: DOMString) -> bool {
get_pref(pref_name.as_ref()).as_boolean().unwrap_or(false)
}
fn StringMozPreference(&self, pref_name: DOMString) -> DOMString {
get_pref(pref_name.as_ref()).as_string().map(|s| DOMString::from(s)).unwrap_or_else(|| DOMString::new())
}
fn PrefControlledAttributeDisabled(&self) -> bool { false }
fn PrefControlledAttributeEnabled(&self) -> bool { false }
fn PrefControlledMethodDisabled(&self) {}
fn PrefControlledMethodEnabled(&self) {}
fn FuncControlledAttributeDisabled(&self) -> bool { false }
fn FuncControlledAttributeEnabled(&self) -> bool { false }
fn FuncControlledMethodDisabled(&self) {}
fn FuncControlledMethodEnabled(&self) {}
#[allow(unsafe_code)]
fn CrashHard(&self) {
static READ_ONLY_VALUE: i32 = 0;
unsafe {
let p: *mut u32 = &READ_ONLY_VALUE as *const _ as *mut _;
ptr::write_volatile(p, 0xbaadc0de);
}
}
}
impl TestBinding {
pub fn BooleanAttributeStatic(_: GlobalRef) -> bool { false }
pub fn SetBooleanAttributeStatic(_: GlobalRef, _: bool) {}
pub fn ReceiveVoidStatic(_: GlobalRef) {}
pub fn PrefControlledStaticAttributeDisabled(_: GlobalRef) -> bool { false }
pub fn PrefControlledStaticAttributeEnabled(_: GlobalRef) -> bool { false }
pub fn PrefControlledStaticMethodDisabled(_: GlobalRef) {}
pub fn PrefControlledStaticMethodEnabled(_: GlobalRef) {}
pub fn FuncControlledStaticAttributeDisabled(_: GlobalRef) -> bool { false }
pub fn FuncControlledStaticAttributeEnabled(_: GlobalRef) -> bool { false }
pub fn FuncControlledStaticMethodDisabled(_: GlobalRef) {}
pub fn FuncControlledStaticMethodEnabled(_: GlobalRef) {}
}
#[allow(unsafe_code)]
impl TestBinding {
pub unsafe fn condition_satisfied(_: *mut JSContext, _: HandleObject) -> bool { true }
pub unsafe fn condition_unsatisfied(_: *mut JSContext, _: HandleObject) -> bool { false }
}<๏ฝfimโend๏ฝ> | fn PassUnsignedLongLong(&self, _: u64) {} |
<|file_name|>value.py<|end_file_name|><๏ฝfimโbegin๏ฝ>from keras.models import Sequential
from keras.layers import convolutional
from keras.layers.core import Dense, Flatten
from SGD_exponential_decay import SGD_exponential_decay as SGD
### Parameters obtained from paper ###
K = 152 # depth of convolutional layers
LEARNING_RATE = .003 # initial learning rate
DECAY = 8.664339379294006e-08 # rate of exponential learning_rate decay
class value_trainer:
def __init__(self):
self.model = Sequential()
self.model.add(convolutional.Convolution2D(input_shape=(49, 19, 19), nb_filter=K, nb_row=5, nb_col=5,
init='uniform', activation='relu', border_mode='same'))
for i in range(2,13):
self.model.add(convolutional.Convolution2D(nb_filter=K, nb_row=3, nb_col=3,<๏ฝfimโhole๏ฝ> init='uniform', activation='relu', border_mode='same'))
self.model.add(convolutional.Convolution2D(nb_filter=1, nb_row=1, nb_col=1,
init='uniform', activation='linear', border_mode='same'))
self.model.add(Flatten())
self.model.add(Dense(256,init='uniform'))
self.model.add(Dense(1,init='uniform',activation="tanh"))
sgd = SGD(lr=LEARNING_RATE, decay=DECAY)
self.model.compile(loss='mean_squared_error', optimizer=sgd)
def get_samples(self):
# TODO non-terminating loop that draws training samples uniformly at random
pass
def train(self):
# TODO use self.model.fit_generator to train from data source
pass
if __name__ == '__main__':
trainer = value_trainer()
# TODO command line instantiation<๏ฝfimโend๏ฝ> | |
<|file_name|>types.go<|end_file_name|><๏ฝfimโbegin๏ฝ>// Copyright (c) Dropbox, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN<๏ฝfimโhole๏ฝ>
import "github.com/dropbox/dropbox-sdk-go-unofficial/dropbox"
// PlatformType : Possible platforms on which a user may view content.
type PlatformType struct {
dropbox.Tagged
}
// Valid tag values for PlatformType
const (
PlatformTypeWeb = "web"
PlatformTypeMobile = "mobile"
PlatformTypeDesktop = "desktop"
PlatformTypeUnknown = "unknown"
PlatformTypeOther = "other"
)<๏ฝfimโend๏ฝ> | // THE SOFTWARE.
// Package seen_state : has no documentation (yet)
package seen_state |
<|file_name|>io_split.rs<|end_file_name|><๏ฝfimโbegin๏ฝ>#![warn(rust_2018_idioms)]
#![cfg(feature = "full")]
use tokio::io::{split, AsyncRead, AsyncWrite, ReadBuf, ReadHalf, WriteHalf};
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
struct RW;
impl AsyncRead for RW {
fn poll_read(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
buf.put_slice(&[b'z']);
Poll::Ready(Ok(()))
}
}
impl AsyncWrite for RW {
fn poll_write(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
_buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
Poll::Ready(Ok(1))
}
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
}
#[test]
fn is_send_and_sync() {
fn assert_bound<T: Send + Sync>() {}
assert_bound::<ReadHalf<RW>>();
assert_bound::<WriteHalf<RW>>();
}
#[test]
fn split_stream_id() {
let (r1, w1) = split(RW);
let (r2, w2) = split(RW);
assert!(r1.is_pair_of(&w1));
assert!(!r1.is_pair_of(&w2));
assert!(r2.is_pair_of(&w2));
assert!(!r2.is_pair_of(&w1));
}
#[test]
fn unsplit_ok() {
let (r, w) = split(RW);
r.unsplit(w);
}
#[test]
#[should_panic]
fn unsplit_err1() {
let (r, _) = split(RW);
let (_, w) = split(RW);
r.unsplit(w);
}
#[test]<๏ฝfimโhole๏ฝ>#[should_panic]
fn unsplit_err2() {
let (_, w) = split(RW);
let (r, _) = split(RW);
r.unsplit(w);
}<๏ฝfimโend๏ฝ> | |
<|file_name|>conversion_traits.hpp<|end_file_name|><๏ฝfimโbegin๏ฝ>// (c) Copyright Fernando Luis Cacciola Carballal 2000-2004
// Use, modification, and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See library home page at http://www.boost.org/libs/numeric/conversion
//
// Contact the author at: [email protected]
//
#ifndef BOOST_NUMERIC_CONVERSION_CONVERSION_TRAITS_FLC_12NOV2002_HPP
#define BOOST_NUMERIC_CONVERSION_CONVERSION_TRAITS_FLC_12NOV2002_HPP
<๏ฝfimโhole๏ฝ>#include "boost/config.hpp"
namespace abt_boost{} namespace boost = abt_boost; namespace abt_boost{ namespace numeric
{
template<class T, class S>
struct conversion_traits
: convdetail::get_conversion_traits<T,S>::type
{
#if BOOST_WORKAROUND(BOOST_MSVC, <= 1300)
typedef typename convdetail::get_conversion_traits<T,S>::type base_;
typedef typename base_::target_type target_type;
typedef typename base_::source_type source_type;
typedef typename base_::result_type result_type;
typedef typename base_::argument_type argument_type;
#endif
} ;
} } // namespace abt_boost::numeric
#endif
//
///////////////////////////////////////////////////////////////////////////////////////////////<๏ฝfimโend๏ฝ> | #include "boost/numeric/conversion/detail/conversion_traits.hpp"
#include "boost/detail/workaround.hpp" |
<|file_name|>my_xmlparser.py<|end_file_name|><๏ฝfimโbegin๏ฝ>fn='/home/thomas/Dokumente/freecad_buch/D006_landxml/Survey.xml'
fn='/home/thomas/Dokumente/freecad_buch/D006_landxml/bratton farm-2.0.xml'
# fn='/home/thomas/Dokumente/freecad_buch/D006_landxml/test.xml'
fn='/home/thomas/Dokumente/freecad_buch/D006_landxml/Portsmouth Heights.xml'
fn='/home/thomas/Dokumente/freecad_buch/D006_landxml/FreeCAD_Document.xml'
# demo files
# http://www.landxml.org/webapps/LandXMLSamples.aspx
# http://landxml.org/schema/LandXML-2.0/samples/Carlson Software/corridor from CD3-2.0.xml
import geodat
import re
from geodat.say import say
import PySide
from PySide import QtGui
import FreeCADGui as Gui
import FreeCAD
class node():
def __init__(self,typ):
# print("erzuegen node,type ",typ)
self.typ=typ
self.params={}
self.content=[]
def getParam(self,param):
return self.params[param]
def getNodes(self,typ):
ns=[]<๏ฝfimโhole๏ฝ> ns += [c]
return ns
def addContent(self,c):
self.content += [c]
def __str__(self):
return self.typ
def getiterator(self,typ):
rc=[]
for obj in self.content:
if obj.typ==typ:
rc += [obj]
rc += obj.getiterator(typ)
return rc
def parseParams(string):
params={}
s=string
while s!="":
res = re.search(r"(\S+)=\"([^\"]*)\"\s+(\S.*)", s)
if res != None:
assert len(res.groups())==3
k,v,s=res.group(1),res.group(2),res.group(3)
params[k]=v
continue
res = re.search(r"(\S+)=\"(.*)\"", s)
if res != None:
assert len(res.groups())==2
k,v,s=res.group(1),res.group(2),""
params[k]=v
continue
else:
raise Exception("parse Params Fehler:"+ s)
s=""
return params
def getData(fn,pb=None):
if pb==None:
pb=QtGui.QProgressBar()
pb.show()
stack=[0,0]*4
stackpointer=-1
objs=[]
say("Read data from cache file ...")
say(fn)
f=open(fn,"r")
content=f.readlines()
c2=[]
cl=len(content)
# FreeCAD File hack
if content[2].startswith(" FreeCAD Document, see http://www.freecadweb.org"):
content=content[4:]
cl=len(content)
say(cl)
i=-1
pb.setMaximum(cl)
while i<cl-1:
pb.setValue(i)
i += 1
line=content[i].strip()
j=0
while re.search(r">\s*$", line) == None and j<60:
i += 1
j += 1
line += content[i]
c2 +=[line]
line=''
content=c2
pb.setMaximum(len(content))
for lc,line in enumerate(content):
if "<TextureHexString>" in line:
say ("break A")
continue
pb.setValue(lc)
# if lc%100 == 0:
# say(lc)
# Gui.updateGui()
# if stackpointer != -1:
# print (res.groups())
# print (stackpointer)
# print ("\n-------------NEXT:")
# print(line)
# print ("--- PARSE IT------------------------")
if re.search(r"^\s*$",line):
continue
# ein satz
res = re.search(r"^\s*<(\S+)\s+([^<]*)/>\s*$", line)
if res != None:
# print ("complete! ",res.groups())
assert len(res.groups())==2
typ=res.group(1)
obj=node(typ)
paramstring=res.group(2)
obj.params=parseParams(paramstring)
objs += [obj]
if stackpointer != -1:
stack[stackpointer].content += [obj]
# print stack[stackpointer]
# for c in stack[stackpointer].content:
# print c,",",
# print
continue
res = re.search(r"^\s*<(\S+)\s+([^<]*)>\s*$", line)
if res != None:
# print ("!start! ",res.groups())
assert len(res.groups())==2
typ=res.group(1)
obj=node(typ)
paramstring=res.group(2)
obj.params=parseParams(paramstring)
objs += [obj]
if stackpointer != -1:
stack[stackpointer].content += [obj]
# for c in stack[stackpointer].content:
# print c,
stackpointer += 1
stack[stackpointer]=obj
continue
res = re.search(r"^\s*</([^<]*)>\s*$", line)
if res != None:
# print ("!ende---------STACKPOINTER down! ",res.groups())
assert len(res.groups())==1
stackpointer -= 1
continue
res = re.search(r"^\s*<([^<\s]*)>\s*$", line)
if res != None:
# print ("!simple start! ",res.groups())
assert len(res.groups())==1
typ=res.group(1)
obj=node(typ)
if stackpointer != -1:
stack[stackpointer].content += [obj]
stackpointer += 1
stack[stackpointer] = obj
continue
#auf und zu
res = re.search(r"^\s*<(\S+)\s*([^<]*)>(.*)</([^<]+)>\s*$", line)
if res != None:
# print ("!alles! ",res.groups())
assert len(res.groups())==4
typ=res.group(1)
obj=node(typ)
paramstring=res.group(2)
obj.params=parseParams(paramstring)
obj.text=res.group(3)
objs += [obj]
if stackpointer != -1:
stack[stackpointer].content += [obj]
# for c in stack[stackpointer].content:
# print c,
# stackpointer += 1
# stack[stackpointer]=obj
continue
raise Exception("unerwartet :" +line +":")
# x = re.findall('<([^<]*)>', line)
# for xl in x:
# print(xl)
# say("done getit--------")
FreeCAD.stackpointer=stackpointer
FreeCAD.stack=stack
FreeCAD.objs=objs
return stack[0]
if 0:
#----------------------------
# import landxml
pb=QtGui.QProgressBar()
pb.show()
# progressbar.setValue(0)
#import geodat.my_xmlparser
#reload (geodat.my_xmlparser)
from say import *
# tree=geodat.my_xmlparser.getData(fn)
tree=getData(fn)
# tree=FreeCAD.stack[0]
say("import done")
Gui.updateGui()
Ps={}
pnodes=tree.getiterator('P')
pb.setMaximum(len(pnodes))
for i,element in enumerate(pnodes):
pb.setValue(i)
# say((element.params,element.text))
_coords=element.text.split(' ')
Ps[element.params['id']]=FreeCAD.Vector(float(_coords[0]),float(_coords[1]),float(_coords[2]))
import Points
ptsa=Ps.values()
Points.show(Points.Points(ptsa))
App.activeDocument().recompute()
Gui.SendMsgToActiveView("ViewFit")
Gui.updateGui()
if 0:
for element in tree.getiterator('PntList3D')[:4]:
say((element.params,element.text))
say("Some Faces")
for element in tree.getiterator('F')[:4]:
say((element.params,element.text))
say("BREAKLINES")
for element in tree.getiterator('Breakline')[:3]:
# say((element.typ,element.params))
# say(element.content[0].text)
_coords=element.content[0].text.split(' ')
coords=np.array([float(a) for a in _coords])
coords=coords.reshape(len(_coords)/3,3)
pts=[FreeCAD.Vector(p) for p in coords]
Part.show(Part.makePolygon(pts))
App.ActiveDocument.ActiveObject.Label=element.params['desc']
Gui.updateGui()
for element in tree.getiterator('Boundary')[:10]:
say((element.typ,element.params))
# say("relations")
# for element in tree.getiterator('relation'):
# say(element.params)
1/0
col=[]
for element in tree.getiterator('F'):
say((element.params,element.text))
ixs=element.text.split(' ')
ptsf=[Ps[ix] for ix in ixs]
ptsf += [ptsf[0]]
col +=[Part.makePolygon(ptsf)]
Part.show(Part.Compound(col))
def showFace(rbf,rbf2,x,y,gridsize,shapeColor,bound):
import Draft
makeLoft=False
grids=gridsize
ws=[]
pts2=[]
xi, yi = np.linspace(np.min(x), np.max(x), grids), np.linspace(np.min(y), np.max(y), grids)
for ix in xi:
points=[]
for iy in yi:
iz=float(rbf(ix,iy))
#---------------------- special hacks #+#
if bound>0:
if iz > bound: iz = bound
if iz < -bound: iz = -bound
points.append(FreeCAD.Vector(iy,ix,iz))
if makeLoft:
w=Draft.makeWire(points,closed=False,face=False,support=None)
ws.append(w)
pts2.append(points)
if makeLoft:
ll=FreeCAD.activeDocument().addObject('Part::Loft','elevation')
ll.Sections=ws
ll.Ruled = True
ll.ViewObject.ShapeColor = shapeColor
ll.ViewObject.LineColor = (0.00,0.67,0.00)
for w in ws:
w.ViewObject.Visibility=False
ll.Label="Interpolation Gitter " + str(grids)
bs=Part.BSplineSurface()
bs.interpolate(pts2)
Part.show(bs.toShape())
import scipy.interpolate
def interpolate(x,y,z, gridsize,mode='thin_plate',rbfmode=True,shape=None):
mode=str(mode)
grids=gridsize
dx=np.max(x)-np.min(x)
dy=np.max(y)-np.min(y)
if dx>dy:
gridx=grids
gridy=int(round(dy/dx*grids))
else:
gridy=grids
gridx=int(round(dx/dy*grids))
if shape != None:
(gridy,gridx)=shape
xi, yi = np.linspace(np.min(x), np.max(x), gridx), np.linspace(np.min(y), np.max(y), gridy)
xi, yi = np.meshgrid(xi, yi)
if rbfmode:
rbf = scipy.interpolate.Rbf(x, y, z, function=mode)
rbf2 = scipy.interpolate.Rbf( y,x, z, function=mode)
else:
sayErr("interp2d nicht implementiert")
x=np.array(x)
y=np.array(y)
z=np.array(z)
xi, yi = np.linspace(np.min(x), np.max(x), gridx), np.linspace(np.min(y), np.max(y), gridy)
rbf = scipy.interpolate.interp2d(x, y, z, kind=mode)
rbf2 = scipy.interpolate.interp2d(y, x, z, kind=mode)
zi=rbf2(yi,xi)
return [rbf,xi,yi,zi]
def createsurface(pts,mode='thin_plate',rbfmode=True,gridCount=20,zfactor=1,bound=10**5,matplot=False):
modeColor={
'linear' : ( 1.0, 0.3, 0.0),
'thin_plate' : (0.0, 1.0, 0.0),
'cubic' : (0.0, 1.0, 1.0),
'inverse' : (1.0, 1.0, 0.0),
'multiquadric' : (1.0, .0, 1.0),
'gaussian' : (1.0, 1.0, 1.0),
'quintic' :(0.5,1.0, 0.0)
}
x=[v[1] for v in pts]
y=[v[0] for v in pts]
z=[zfactor*v[2] for v in pts]
x=np.array(x)
y=np.array(y)
z=np.array(z)
gridsize=gridCount
rbf,xi,yi,zi1 = interpolate(x,y,z, gridsize,mode,rbfmode)
# hilfsebene
xe=[100,-100,100,-100]
ye=[100,100,-100,-100]
ze=[20,10,20,5]
rbf2,xi2,yi2,zi2 = interpolate(xe,ye,ze, gridsize,mode,rbfmode,zi1.shape)
zi=zi1
color=(1.0,0.0,0.0)
showFace(rbf,rbf2,x,y,gridsize,color,bound)
App.ActiveDocument.ActiveObject.Label=mode + " ZFaktor " + str(zfactor) + " #"
rc=App.ActiveDocument.ActiveObject
if 0:
createsurface(ptsa,mode='linear')
if 0:
pn=ptsa[000:2000]
Points.show(Points.Points(pn))
createsurface(pn,mode='linear')<๏ฝfimโend๏ฝ> | for c in self.content:
if c.typ==typ: |
<|file_name|>employmentSkill.ts<|end_file_name|><๏ฝfimโbegin๏ฝ>interface IEmploymentSkill {
name: string;
}
<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | export default IEmploymentSkill; |
<|file_name|>test_api.py<|end_file_name|><๏ฝfimโbegin๏ฝ># coding=utf-8
# pylint: disable-msg=E1101,W0612
from collections import OrderedDict
import pytest
import numpy as np
import pandas as pd
from pandas import Index, Series, DataFrame, date_range
from pandas.core.indexes.datetimes import Timestamp
from pandas.compat import range
from pandas import compat
import pandas.io.formats.printing as printing
from pandas.util.testing import (assert_series_equal,
ensure_clean)
import pandas.util.testing as tm
from .common import TestData
class SharedWithSparse(object):
"""
A collection of tests Series and SparseSeries can share.
In generic tests on this class, use ``self._assert_series_equal()``
which is implemented in sub-classes.
"""
def _assert_series_equal(self, left, right):
"""Dispatch to series class dependent assertion"""
raise NotImplementedError
def test_scalarop_preserve_name(self):
result = self.ts * 2
assert result.name == self.ts.name
def test_copy_name(self):
result = self.ts.copy()
assert result.name == self.ts.name
def test_copy_index_name_checking(self):
# don't want to be able to modify the index stored elsewhere after
# making a copy
self.ts.index.name = None
assert self.ts.index.name is None
assert self.ts is self.ts
cp = self.ts.copy()
cp.index.name = 'foo'
printing.pprint_thing(self.ts.index.name)
assert self.ts.index.name is None
def test_append_preserve_name(self):
result = self.ts[:5].append(self.ts[5:])
assert result.name == self.ts.name
def test_binop_maybe_preserve_name(self):
# names match, preserve
result = self.ts * self.ts
assert result.name == self.ts.name
result = self.ts.mul(self.ts)
assert result.name == self.ts.name
result = self.ts * self.ts[:-2]
assert result.name == self.ts.name
# names don't match, don't preserve
cp = self.ts.copy()
cp.name = 'something else'
result = self.ts + cp
assert result.name is None
result = self.ts.add(cp)
assert result.name is None
ops = ['add', 'sub', 'mul', 'div', 'truediv', 'floordiv', 'mod', 'pow']
ops = ops + ['r' + op for op in ops]
for op in ops:
# names match, preserve
s = self.ts.copy()
result = getattr(s, op)(s)
assert result.name == self.ts.name
# names don't match, don't preserve
cp = self.ts.copy()
cp.name = 'changed'
result = getattr(s, op)(cp)
assert result.name is None
def test_combine_first_name(self):
result = self.ts.combine_first(self.ts[:5])
assert result.name == self.ts.name
def test_getitem_preserve_name(self):
result = self.ts[self.ts > 0]
assert result.name == self.ts.name
result = self.ts[[0, 2, 4]]
assert result.name == self.ts.name
result = self.ts[5:10]
assert result.name == self.ts.name
def test_pickle(self):
unp_series = self._pickle_roundtrip(self.series)
unp_ts = self._pickle_roundtrip(self.ts)
assert_series_equal(unp_series, self.series)
assert_series_equal(unp_ts, self.ts)
def _pickle_roundtrip(self, obj):
with ensure_clean() as path:
obj.to_pickle(path)
unpickled = pd.read_pickle(path)
return unpickled
def test_argsort_preserve_name(self):
result = self.ts.argsort()
assert result.name == self.ts.name
def test_sort_index_name(self):
result = self.ts.sort_index(ascending=False)
assert result.name == self.ts.name
def test_to_sparse_pass_name(self):
result = self.ts.to_sparse()
assert result.name == self.ts.name
def test_constructor_dict(self):
d = {'a': 0., 'b': 1., 'c': 2.}
result = self.series_klass(d)
expected = self.series_klass(d, index=sorted(d.keys()))
self._assert_series_equal(result, expected)
result = self.series_klass(d, index=['b', 'c', 'd', 'a'])
expected = self.series_klass([1, 2, np.nan, 0],
index=['b', 'c', 'd', 'a'])
self._assert_series_equal(result, expected)
def test_constructor_subclass_dict(self):
data = tm.TestSubDict((x, 10.0 * x) for x in range(10))
series = self.series_klass(data)
expected = self.series_klass(dict(compat.iteritems(data)))
self._assert_series_equal(series, expected)
def test_constructor_ordereddict(self):
# GH3283
data = OrderedDict(
('col%s' % i, np.random.random()) for i in range(12))
series = self.series_klass(data)
expected = self.series_klass(list(data.values()), list(data.keys()))
self._assert_series_equal(series, expected)
# Test with subclass
class A(OrderedDict):
pass
series = self.series_klass(A(data))
self._assert_series_equal(series, expected)
def test_constructor_dict_multiindex(self):
d = {('a', 'a'): 0., ('b', 'a'): 1., ('b', 'c'): 2.}
_d = sorted(d.items())
result = self.series_klass(d)
expected = self.series_klass(
[x[1] for x in _d],
index=pd.MultiIndex.from_tuples([x[0] for x in _d]))
self._assert_series_equal(result, expected)
d['z'] = 111.
_d.insert(0, ('z', d['z']))
result = self.series_klass(d)
expected = self.series_klass([x[1] for x in _d],
index=pd.Index([x[0] for x in _d],
tupleize_cols=False))
result = result.reindex(index=expected.index)
self._assert_series_equal(result, expected)
def test_constructor_dict_timedelta_index(self):<๏ฝfimโhole๏ฝ> # GH #12169 : Resample category data with timedelta index
# construct Series from dict as data and TimedeltaIndex as index
# will result NaN in result Series data
expected = self.series_klass(
data=['A', 'B', 'C'],
index=pd.to_timedelta([0, 10, 20], unit='s')
)
result = self.series_klass(
data={pd.to_timedelta(0, unit='s'): 'A',
pd.to_timedelta(10, unit='s'): 'B',
pd.to_timedelta(20, unit='s'): 'C'},
index=pd.to_timedelta([0, 10, 20], unit='s')
)
self._assert_series_equal(result, expected)
def test_from_array_deprecated(self):
with tm.assert_produces_warning(FutureWarning):
self.series_klass.from_array([1, 2, 3])
class TestSeriesMisc(TestData, SharedWithSparse):
series_klass = Series
# SharedWithSparse tests use generic, series_klass-agnostic assertion
_assert_series_equal = staticmethod(tm.assert_series_equal)
def test_tab_completion(self):
# GH 9910
s = Series(list('abcd'))
# Series of str values should have .str but not .dt/.cat in __dir__
assert 'str' in dir(s)
assert 'dt' not in dir(s)
assert 'cat' not in dir(s)
# similiarly for .dt
s = Series(date_range('1/1/2015', periods=5))
assert 'dt' in dir(s)
assert 'str' not in dir(s)
assert 'cat' not in dir(s)
# Similarly for .cat, but with the twist that str and dt should be
# there if the categories are of that type first cat and str.
s = Series(list('abbcd'), dtype="category")
assert 'cat' in dir(s)
assert 'str' in dir(s) # as it is a string categorical
assert 'dt' not in dir(s)
# similar to cat and str
s = Series(date_range('1/1/2015', periods=5)).astype("category")
assert 'cat' in dir(s)
assert 'str' not in dir(s)
assert 'dt' in dir(s) # as it is a datetime categorical
def test_not_hashable(self):
s_empty = Series()
s = Series([1])
pytest.raises(TypeError, hash, s_empty)
pytest.raises(TypeError, hash, s)
def test_contains(self):
tm.assert_contains_all(self.ts.index, self.ts)
def test_iter(self):
for i, val in enumerate(self.series):
assert val == self.series[i]
for i, val in enumerate(self.ts):
assert val == self.ts[i]
def test_keys(self):
# HACK: By doing this in two stages, we avoid 2to3 wrapping the call
# to .keys() in a list()
getkeys = self.ts.keys
assert getkeys() is self.ts.index
def test_values(self):
tm.assert_almost_equal(self.ts.values, self.ts, check_dtype=False)
def test_iteritems(self):
for idx, val in compat.iteritems(self.series):
assert val == self.series[idx]
for idx, val in compat.iteritems(self.ts):
assert val == self.ts[idx]
# assert is lazy (genrators don't define reverse, lists do)
assert not hasattr(self.series.iteritems(), 'reverse')
def test_items(self):
for idx, val in self.series.items():
assert val == self.series[idx]
for idx, val in self.ts.items():
assert val == self.ts[idx]
# assert is lazy (genrators don't define reverse, lists do)
assert not hasattr(self.series.items(), 'reverse')
def test_raise_on_info(self):
s = Series(np.random.randn(10))
with pytest.raises(AttributeError):
s.info()
def test_copy(self):
for deep in [None, False, True]:
s = Series(np.arange(10), dtype='float64')
# default deep is True
if deep is None:
s2 = s.copy()
else:
s2 = s.copy(deep=deep)
s2[::2] = np.NaN
if deep is None or deep is True:
# Did not modify original Series
assert np.isnan(s2[0])
assert not np.isnan(s[0])
else:
# we DID modify the original Series
assert np.isnan(s2[0])
assert np.isnan(s[0])
# GH 11794
# copy of tz-aware
expected = Series([Timestamp('2012/01/01', tz='UTC')])
expected2 = Series([Timestamp('1999/01/01', tz='UTC')])
for deep in [None, False, True]:
s = Series([Timestamp('2012/01/01', tz='UTC')])
if deep is None:
s2 = s.copy()
else:
s2 = s.copy(deep=deep)
s2[0] = pd.Timestamp('1999/01/01', tz='UTC')
# default deep is True
if deep is None or deep is True:
# Did not modify original Series
assert_series_equal(s2, expected2)
assert_series_equal(s, expected)
else:
# we DID modify the original Series
assert_series_equal(s2, expected2)
assert_series_equal(s, expected2)
def test_axis_alias(self):
s = Series([1, 2, np.nan])
assert_series_equal(s.dropna(axis='rows'), s.dropna(axis='index'))
assert s.dropna().sum('rows') == 3
assert s._get_axis_number('rows') == 0
assert s._get_axis_name('rows') == 'index'
def test_class_axis(self):
# https://github.com/pandas-dev/pandas/issues/18147
Series.index # no exception!
def test_numpy_unique(self):
# it works!
np.unique(self.ts)
def test_ndarray_compat(self):
# test numpy compat with Series as sub-class of NDFrame
tsdf = DataFrame(np.random.randn(1000, 3), columns=['A', 'B', 'C'],
index=date_range('1/1/2000', periods=1000))
def f(x):
return x[x.idxmax()]
result = tsdf.apply(f)
expected = tsdf.max()
tm.assert_series_equal(result, expected)
# .item()
s = Series([1])
result = s.item()
assert result == 1
assert s.item() == s.iloc[0]
# using an ndarray like function
s = Series(np.random.randn(10))
result = Series(np.ones_like(s))
expected = Series(1, index=range(10), dtype='float64')
tm.assert_series_equal(result, expected)
# ravel
s = Series(np.random.randn(10))
tm.assert_almost_equal(s.ravel(order='F'), s.values.ravel(order='F'))
# compress
# GH 6658
s = Series([0, 1., -1], index=list('abc'))
result = np.compress(s > 0, s)
tm.assert_series_equal(result, Series([1.], index=['b']))
result = np.compress(s < -1, s)
# result empty Index(dtype=object) as the same as original
exp = Series([], dtype='float64', index=Index([], dtype='object'))
tm.assert_series_equal(result, exp)
s = Series([0, 1., -1], index=[.1, .2, .3])
result = np.compress(s > 0, s)
tm.assert_series_equal(result, Series([1.], index=[.2]))
result = np.compress(s < -1, s)
# result empty Float64Index as the same as original
exp = Series([], dtype='float64', index=Index([], dtype='float64'))
tm.assert_series_equal(result, exp)
def test_str_attribute(self):
# GH9068
methods = ['strip', 'rstrip', 'lstrip']
s = Series([' jack', 'jill ', ' jesse ', 'frank'])
for method in methods:
expected = Series([getattr(str, method)(x) for x in s.values])
assert_series_equal(getattr(Series.str, method)(s.str), expected)
# str accessor only valid with string values
s = Series(range(5))
with tm.assert_raises_regex(AttributeError,
'only use .str accessor'):
s.str.repeat(2)
def test_empty_method(self):
s_empty = pd.Series()
assert s_empty.empty
for full_series in [pd.Series([1]), pd.Series(index=[1])]:
assert not full_series.empty
def test_tab_complete_warning(self, ip):
# https://github.com/pandas-dev/pandas/issues/16409
pytest.importorskip('IPython', minversion="6.0.0")
from IPython.core.completer import provisionalcompleter
code = "import pandas as pd; s = pd.Series()"
ip.run_code(code)
with tm.assert_produces_warning(None):
with provisionalcompleter('ignore'):
list(ip.Completer.completions('s.', 1))<๏ฝfimโend๏ฝ> | |
<|file_name|>pull-tests.py<|end_file_name|><๏ฝfimโbegin๏ฝ>import binascii
import itertools
import os
import random
import subprocess
from weaver.stack import WeaverNests
from weaver.util import Stash
def nstdir(path):
return os.path.join(CurrentNest().work_dir, path)
# Thoughts:
# - For shared files: fifo-0,push-async-1 is equivalent to fifo-0,pull-inf
TASKS = 25
SHARED = [
{
'count': 128,
'prefix': '1R-shared',
'size': lambda: random.randint(1, 64*2**10),
},
{
'count': 128,
'prefix': '1G-shared',
'size': lambda: 1*2**30,
},
{
'count': 64,
'prefix': '2G-shared',
'size': lambda: 2*2**30,
},
{
'count': 32,
'prefix': '4G-shared',
'size': lambda: 4*2**30,
},
{
'count': 16,
'prefix': '8G-shared',<๏ฝfimโhole๏ฝ>UNIQUE = [
# {
# 'count': 4,
# 'prefix': '2G',
# 'size': lambda: 2*2**30,
# },
# {
# 'count': 2,
# 'prefix': '4G',
# 'size': lambda: 4*2**30,
# },
]
consumer = ShellFunction('''
for f; do
test -e "$f" || exit 1
done
''', cmd_format = "{EXE} {ARG}")
producer = ShellFunction('''
touch "$1"
shift
while [ "$#" -ge 3 ]; do
openssl enc -aes-256-ctr -nosalt -pass pass:"$1" < /dev/zero 2> /dev/null | head -c "$2" > "$3"
shift
shift
shift
done
''', cmd_format = "{EXE} {ARG}")
gen = []
shared = []
for i in range(TASKS):
shared.append(nstdir('sync.%08d' % i))
for f in SHARED:
for i in range(f['count']):
path = nstdir((f['prefix'] + '.%08d') % i)
gen.append({'path': path, 'size': f['size']()})
shared.append(path)
for task in range(TASKS):
print("compiling task %d" % task)
inputs = []
inputs.extend(shared)
taskdir = nstdir('task.%08d' % task)
os.mkdir(taskdir)
for f in UNIQUE:
for i in range(f['count']):
path = os.path.join(taskdir, (f['prefix'] + '.%08d') % i)
inputs.append(path)
gen.append({'path': path, 'size': f['size']()})
consumer(arguments = inputs, inputs = inputs)
random.shuffle(gen)
def makerandoms(i, files):
sync = nstdir('sync.%08d' % i)
args = [sync]
outputs = [sync]
for f in files:
args.extend((binascii.hexlify(os.urandom(64)), f['size'], f['path']))
outputs.append(f['path'])
producer(arguments = args, outputs = outputs)
for i in range(TASKS):
makerandoms(i, gen[i::TASKS])
# vim: set sts=4 sw=4 ts=8 expandtab ft=python:<๏ฝfimโend๏ฝ> | 'size': lambda: 8*2**30,
},
] |
<|file_name|>enqueueActionTest.js<|end_file_name|><๏ฝfimโbegin๏ฝ>/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
/**
* Set up the actions by creating a storage, building the hierarchy, and resetting the buffer for the test.
*/
setUp: function(cmp) {
//
// Initialize storage here, in JS to avoid issues with instantiating the component multiple
// times.
//
$A.storageService.initStorage("actions", false, false, 100000, 50, 0, true, true, 1);
cmp.helper.buildHierarchy(cmp);
var ready = false;
$A.test.callServerAction($A.test.getAction(cmp, "c.execute", {
"commands" : "RESET"
}, function() {
ready = true;
}));
$A.test.addWaitFor(true, function() {
return ready;
});
},
/**
* Get a lock name that will not conflict.
*
* FIXME: there should be a $A.test.getUniqueId()
*/
getSafeLock: function(cmp, name) {
if (!cmp._lock_names) {
cmp._lock_names = {};
cmp._lock_base = ""+new Date().getTime();
}
if (!cmp._lock_names[name]) {
cmp._lock_names[name] = $A.test.getTestName()+"."+cmp._lock_base+"."+name;
}
return cmp._lock_names[name];
},
/**
* Convert an array of command arrays into a simple string.
*
* This allows us to ensure locks are unique, and check the command string.
*/
buildCommands: function(cmp, commands) {
var i;<๏ฝfimโhole๏ฝ> for (i = 0; i < commands.length; i++) {
var command_list = commands[i];
var command = command_list[0];
if (command === "WAIT") {
name = this.getSafeLock(cmp, command_list[1]);
commands_out += "WAIT "+name+";";
} else if (command === "RESUME") {
name = this.getSafeLock(cmp, command_list[1]);
commands_out += "RESUME "+name+";";
} else if (command === "APPEND") {
commands_out += "APPEND "+command_list[1]+";";
} else if (command === "COPY") {
commands_out += "COPY;";
} else if (command === "READ") {
commands_out += "READ;";
} else {
throw new Error("Unknown command "+command+" in "+command_list);
}
}
return commands_out;
},
sendAction: function(cmp, path, commands, label, options) {
var i;
var commands_out = "";
var name;
var parent = cmp;
for (i = 0; i < commands.length; i++) {
var command_list = commands[i];
var command = command_list[0];
if (command === "WAIT") {
name = this.getSafeLock(cmp, command_list[1]);
commands_out += "WAIT "+name+";";
} else if (command === "RESUME") {
name = this.getSafeLock(cmp, command_list[1]);
commands_out += "RESUME "+name+";";
} else if (command === "APPEND") {
commands_out += "APPEND "+command_list[1]+";";
} else if (command === "COPY") {
commands_out += "COPY;";
} else if (command === "READ") {
commands_out += "READ;";
}
}
cmp.runAction(path, parent, commands_out, label, options);
},
/**
* Wait for a line to appear at a specific location.
*/
addWaitForLog : function(cmp, index, content, cb, partialMatch) {
var actual;
$A.test.addWaitForWithFailureMessage(false,
function() {
actual = cmp.get("v.log")?cmp.get("v.log")[index]:undefined;
return actual === undefined;
},
"Never received log message '" + content + "' at index " + index,
function() {
if(partialMatch === true) {
$A.test.assertTrue(actual.contains(content), "mismatch on log entry "+index);
} else {
$A.test.assertEquals(content, actual, "mismatch on log entry "+index);
}
if (cb) {
cb();
}
}
);
},
/**
* Wait for a log entry that will fall in a range due to race conditions.
*/
addWaitForLogRace : function(cmp, index1, index2, content, partialMatch) {
var actual;
$A.test.addWaitForWithFailureMessage(true,
function() {
actual = cmp.get("v.log")?cmp.get("v.log")[index2]:undefined;
return actual !== undefined;
},
"Never received log message '" + content + "' between index " + index1 + " and " + index2,
function() {
var i;
var logs = cmp.get("v.log");
var acc = '';
for (i = index1; i <= index2; i++) {
if(partialMatch === true) {
if(logs[i].indexOf(content) >= 0) {
return;
}
} else {
if (logs[i] === content) {
return;
}
}
acc = acc + '\n' + logs[i];
}
$A.test.fail("mismatch in log range "+index1+','+index2+
': did not find '+content+' in:'+acc);
}
);
},
/**
* Wait for a set of log entries that will fall (in order) in a range due to race conditions.
*
* Unlike wait for log race above, this requires a set of log lines to be in order, but allows then to have
* races with other groups of lines. This is useful when you have several sets of actions in paralel, but
* want to ensure that a given set is executed in order.
*/
addWaitForLogRaceOrdered : function(cmp, index1, index2, contentSet, partialMatch) {
var actual;
$A.test.addWaitForWithFailureMessage(true,
function() {
actual = cmp.get("v.log")?cmp.get("v.log")[index2]:undefined;
return actual !== undefined;
},
"Never received log message '" + contentSet + "' between index " + index1 + " and " + index2,
function() {
var i, j;
var logs = cmp.get("v.log");
var acc = '';
for (i = index1, j=0; j < contentSet.length && i <= index2; i++) {
if(partialMatch === true) {
if(logs[i].indexOf(contentSet[j]) >= 0) {
j++;
}
} else {
if (logs[i] === contentSet[j]) {
j++;
}
}
acc = acc + '\n' + logs[i];
}
if (j === contentSet.length) {
return;
}
$A.test.fail("mismatch in log range "+index1+','+index2+
': did not find '+contentSet+' in:'+acc);
}
);
},
/**
* Test that we can enqueue and execute client actions.
*
* Guarantees:
* * client action MUST not run immediately.
* * client action MAY run after a timeout.
* * client action MUST run before render.
*/
testEnqueueClientAction : {
test : [ function(cmp) {
//Action is enqueued but not executed
var action = cmp.get("c.client");
// FIXME: Ensure that the component is not rendered until after the client action runs.
$A.enqueueAction(action);
// logging here should always beat enqueueing.
cmp.helper.log(cmp, cmp, "log1");
this.addWaitForLog(cmp, 0, "log1");
// the only guarantee is that the client actions should
// execute before the render occurs In this case, we should get exactly one rerender.
// FIXME: goliver actions-rewrite
// Don't know how to check for this.
this.addWaitForLog(cmp, 1, "client");
} ]
},
/**
* Test that we can have more than one foreground actions run in parallel on server.
*
* max 4 foreground actions can be run in parallel. here we enqueue 4 foreground actions. ask first 3 to wait on
* server till 4th arrives, then release them all.
*
* if enqueue 4 foreground action without releasing any of them, we will run out of available XHR when we want to
* enqueue another, no error/warning message on anywhere though, we just put actions in the deferred queue.
*
* This is dangerous, so we have to ensure that we don't create races. To avoid races, we explicitely chain
* our actions using resume and wait. Be careful of deadlocks.
*/
testMultipleForegroundInFlight : {
labels: ["flapper"],
test : [
function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "fore1" ],
[ "RESUME", "fore1.chain" ],
[ "WAIT", "fore1" ],
[ "COPY" ] ],
"fore1");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "WAIT", "fore1.chain"],
[ "APPEND", "fore2" ],
[ "RESUME", "fore2.chain" ],
[ "WAIT", "fore2" ],
[ "COPY" ] ],
"fore2");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "WAIT", "fore2.chain"],
[ "APPEND", "fore3" ],
[ "RESUME", "fore3.chain" ],
[ "WAIT", "fore3" ],
[ "COPY" ] ],
"fore3");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "WAIT", "fore3.chain"],
[ "APPEND", "fore4" ],
[ "READ" ],
[ "APPEND", "fore4.after" ],
[ "RESUME", "fore1" ],
[ "RESUME", "fore2" ],
[ "RESUME", "fore3" ] ],
"fore4");
}, function(cmp) {
this.addWaitForLogRace(cmp, 0, 3, "fore1: SUCCESS fore4.after");
this.addWaitForLogRace(cmp, 0, 3, "fore2: SUCCESS fore4.after");
this.addWaitForLogRace(cmp, 0, 3, "fore3: SUCCESS fore4.after");
this.addWaitForLogRace(cmp, 0, 3, "fore4: SUCCESS fore1,fore2,fore3,fore4");
} ]
},
/**
* Test to ensure that caboose actions are not executed until another foreground action is sent.
*
* Guarantees:
* * Caboose action will not be sent until a server side foreground action is enqueued.
* * allAboardCallback will be called before the action is sent, but after the foreground action is enqueued
*
* This test emulates the log+flush pattern that can be used with a combination of caboose actions and allAboard
* callbacks. This pattern lets the user queue a caboose action and use allAboardCallback to set a param (in this
* case fake log data) to be attached to the action right before the XHR is sent to the server.
*/
testCabooseActionsWithAllAboardCallback : {
test : [
function(cmp) {
var that = this;
this.sendAction(cmp, [],
[ [ "APPEND", "back1" ],
[ "READ" ] ],
"back1",
[ "background" ]);
this.sendAction(cmp, [],
[ [ "APPEND", "caboose1" ],
[ "READ" ] ],
"caboose1",
[ "caboose", "allaboard" ]);
// verify only background action ran
this.addWaitForLog(cmp, 0, "back1: SUCCESS back1");
},
function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "back2" ],
[ "READ" ] ],
"back2",
[ "background" ]);
this.addWaitForLog(cmp, 1, "back2: SUCCESS back2");
},
function(cmp) {
// Client actions also should not trigger the caboose.
$A.enqueueAction(cmp.get("c.client"));
this.addWaitForLog(cmp, 2, "client");
},
function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "fore1" ],
[ "READ" ] ],
"fore1");
// new foreground action should flush out all pending caboose actions
this.addWaitForLog(cmp, 3, "caboose1[AllAboard]: NEW");
this.addWaitForLogRace(cmp, 4, 5, "caboose1: SUCCESS caboose1");
this.addWaitForLogRace(cmp, 4, 5, "fore1: SUCCESS fore1");
}
]
},
/**
* run storable action ('c.execute', param:'WAIT;READ') couple times, make sure we read response from storage
* also check storage is updated when new response come from server (we did it by bgAction1/2/etc).
* NOTE: from storage point of view, only action def and parameter matters, foreground or background are the same
*/
testStorableRefresh : {
test : [ function(cmp) {
//enqueue foreground action(a), ask it to wait on server, till another action (bgAction1) release it.
//a is storable, its return 'initial' is stored
var that = this;
// prime storage
this.sendAction(cmp, [],
[ [ "WAIT", "prime" ],
[ "READ" ] ],
"prime",
[ "storable" ]);
this.sendAction(cmp, [],
[ [ "APPEND", "initial" ],
[ "RESUME", "prime" ] ],
"back",
[ "background" ]);
this.addWaitForLogRace(cmp, 0, 1, "prime: SUCCESS initial");
this.addWaitForLogRace(cmp, 0, 1, "back: SUCCESS ");
}, function(cmp) {
//fire foreground action(a), because we already have its response('initial') stored, it will just get that.
//we also fire background action(bgAction2), it update a's return with a new value,
//it will update stored response for a.
this.sendAction(cmp, [],
[ [ "WAIT", "prime" ],
[ "READ" ] ],
"refresh",
[ "storable" ]);
this.addWaitForLog(cmp, 2, "refresh[stored]: SUCCESS initial");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "round two" ],
[ "RESUME", "prime" ] ],
"back",
[ "background" ]);
this.addWaitForLogRace(cmp, 3, 4, "back: SUCCESS ");
this.addWaitForLogRace(cmp, 3, 4, "refresh: SUCCESS round two");
}, function(cmp) {
//fire background action(a), it will read response from storage, which is updated by bgAction2 above
//fire foreground action, it update response in storage
}, function(cmp) {
//enqueue foreground action(a) again to double check update from foreAction1 is indeed in storage.
//enqueue background action bgAction3 to release a from server,
//also update the storage with new response 'theEnd'
} ]
},
/**
* Make sure that we send only one of two duplicate actions enqueued.
*
* Test this by putting a single value on the buffer, then reading and clearing in both actions. If they
* both go to the server, they will have different values.
*/
testDeDupeStorable : {
test : [ function(cmp) {
// The storable actions should be 'de-duped', and only one should go to the server.
// This is shown by the fact that they will both get the 'initial' that is saved in a buffer.
this.sendAction(cmp, [],
[ [ "APPEND", "initial" ] ],
"setup",
[ "background" ]);
this.sendAction(cmp, [],
[ [ "WAIT", "prime" ],
[ "READ" ] ],
"prime1",
[ "storable" ]);
this.sendAction(cmp, [],
[ [ "WAIT", "prime" ],
[ "READ" ] ],
"prime2",
[ "storable" ]);
this.addWaitForLog(cmp, 0, "setup: SUCCESS ");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "RESUME", "prime" ] ],
"release",
[ "background" ]);
this.addWaitForLogRace(cmp, 1, 3, "release: SUCCESS ");
this.addWaitForLogRace(cmp, 1, 3, "prime1: SUCCESS initial");
this.addWaitForLogRace(cmp, 1, 3, "prime2: SUCCESS initial");
} ]
},
/**
* enqueue two actions, a1(foreground), a2(background) with same action def and param, they run in parallel
* make sure they read response from storage first, then update the storage with their responses.
*
* Note a1&a2 are not both foreground/background, a2 won't become a dupe of a1
*/
testParallelStorable : {
test : [ function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "initial" ] ],
"setup");
this.addWaitForLog(cmp, 0, "setup: SUCCESS ");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "READ" ] ],
"prime",
[ "storable" ]);
this.addWaitForLog(cmp, 1, "prime: SUCCESS initial");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "APPEND", "second" ] ],
"setup2");
this.addWaitForLog(cmp, 2, "setup2: SUCCESS ");
}, function(cmp) {
this.sendAction(cmp, [],
[ [ "READ" ] ],
"retrieve-fore",
[ "storable" ]);
this.sendAction(cmp, [],
[ [ "READ" ] ],
"retrieve-back",
[ "storable", "background" ]);
// both callbacks with stored value executed. These should be executed _before_ any refreshes go out.
this.addWaitForLogRace(cmp, 3, 4, "retrieve-fore[stored]: SUCCESS initial");
this.addWaitForLogRace(cmp, 3, 4, "retrieve-back[stored]: SUCCESS initial");
//last param=true:we only check partial match
this.addWaitForLogRace(cmp, 5, 6, "retrieve-fore: SUCCESS ", true);
this.addWaitForLogRace(cmp, 5, 6, "retrieve-back: SUCCESS ", true);
} ]
},
/**
* Check that an abortable action is aborted prior to send.
*/
testAbortAbortablePriorToSend : {
test : [ function(cmp) {
$A.test.blockForegroundRequests();
this.sendAction(cmp, [ "child1" ],
[ [ "APPEND", "value" ],
[ "READ" ] ],
"aborted",
[ "abortable" ]);
// return to top so that the action gets queued up.
}, function(cmp) {
cmp.helper.deleteChild(cmp, "child1");
}, function(cmp) {
$A.test.releaseForegroundRequests();
this.addWaitForLog(cmp, 0, "aborted: ABORTED undefined");
} ]
},
/**
* Check that an abortable action is aborted after send.
*/
testAbortAbortableAfterSend : {
test : [ function(cmp) {
this.sendAction(cmp, [ "child1" ],
[ [ "WAIT", "release" ],
[ "APPEND", "value" ],
[ "READ" ] ],
"aborted",
[ "abortable" ]);
// make sure we sent the action.
$A.test.addWaitFor(false, function() { return $A.test.isActionQueued(); })
}, function(cmp) {
var old = cmp.find("child1");
cmp.helper.deleteChild(cmp, "child1");
// Make sure that the component is gone before we release.
$A.test.addWaitFor(false, function() { return old.isValid(); })
}, function(cmp) {
this.sendAction(cmp, [ ],
[ [ "RESUME", "release" ] ],
"release");
this.addWaitForLogRace(cmp, 0, 1, "aborted: ABORTED value");
this.addWaitForLogRace(cmp, 0, 1, "release: SUCCESS ");
} ]
},
/**
* Check that a non-abortable action is not aborted prior to send.
*/
testAbortNonAbortableNotPriorToSend : {
test : [ function(cmp) {
$A.test.blockForegroundRequests();
this.sendAction(cmp, [ "child1" ],
[ [ "APPEND", "value" ],
[ "READ" ] ],
"aborted");
// return to top so that the action gets queued up.
}, function(cmp) {
cmp.helper.deleteChild(cmp, "child1");
}, function(cmp) {
$A.test.releaseForegroundRequests();
this.addWaitForLog(cmp, 0, "aborted: ABORTED value");
} ]
},
/**
* Check that an abortable action is aborted prior to send.
*/
testAbortNonAbortableAfterSend : {
test : [ function(cmp) {
this.sendAction(cmp, [ "child1" ],
[ [ "WAIT", "release" ],
[ "APPEND", "value" ],
[ "READ" ] ],
"aborted",
[ "abortable" ]);
}, function(cmp) {
var old = cmp.find("child1");
cmp.helper.deleteChild(cmp, "child1");
// Make sure that the component is gone before we release.
$A.test.addWaitFor(false, function() { return old.isValid(); });
}, function(cmp) {
this.sendAction(cmp, [ ],
[ [ "RESUME", "release" ] ],
"release");
this.addWaitForLogRace(cmp, 0, 1, "aborted: ABORTED value");
this.addWaitForLogRace(cmp, 0, 1, "release: SUCCESS ");
} ]
},
///////////////////////////////////////////////////////////////////////
// runActions
///////////////////////////////////////////////////////////////////////
testSimpleRunActions : {
test : [ function(cmp) {
var helper = cmp.helper;
$A.clientService.runActions([
helper.getAction(cmp, cmp, this.buildCommands(cmp, [ [ "APPEND", "a" ], ["READ"] ]), "first")
], this, function() {
cmp.helper.log(cmp, cmp, "group1");
});
}, function(cmp) {
var helper = cmp.helper;
$A.clientService.runActions([
helper.getAction(cmp, cmp, this.buildCommands(cmp, [ [ "APPEND", "b1" ], ["READ"] ]), "second"),
helper.getAction(cmp, cmp, this.buildCommands(cmp, [ [ "APPEND", "b2" ], ["READ"] ]), "second")
], this, function() {
cmp.helper.log(cmp, cmp, "group2");
});
}, function(cmp) {
var helper = cmp.helper;
$A.clientService.runActions([
helper.getAction(cmp, cmp, this.buildCommands(cmp, [ [ "APPEND", "c1" ], ["READ"] ]), "third"),
helper.getAction(cmp, cmp, this.buildCommands(cmp, [ [ "APPEND", "c2" ], ["READ"] ]), "third")
], this, function() {
cmp.helper.log(cmp, cmp, "group3");
});
}, function(cmp) {
this.addWaitForLogRaceOrdered(cmp, 0, 7, [ "first: SUCCESS a", "group1" ] );
this.addWaitForLogRaceOrdered(cmp, 0, 7, [ "second: SUCCESS b1", "group2" ]);
this.addWaitForLogRaceOrdered(cmp, 0, 7, [ "second: SUCCESS b2", "group2" ]);
this.addWaitForLogRaceOrdered(cmp, 0, 7, [ "third: SUCCESS c1", "group3" ]);
this.addWaitForLogRaceOrdered(cmp, 0, 7, [ "third: SUCCESS c2", "group3" ]);
}]
}
})<๏ฝfimโend๏ฝ> | var commands_out = "";
var name;
var parent = cmp;
|
<|file_name|>NotPublic.java<|end_file_name|><๏ฝfimโbegin๏ฝ>/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.compiler.batch;
import com.asakusafw.compiler.batch.batch.JobFlow1;
import com.asakusafw.vocabulary.batch.Batch;
import com.asakusafw.vocabulary.batch.BatchDescription;
/**
* A batch class which is not public.
*/
@Batch(name = "testing")
class NotPublic extends BatchDescription {
@Override<๏ฝfimโhole๏ฝ><๏ฝfimโend๏ฝ> | protected void describe() {
run(JobFlow1.class).soon();
}
} |
<|file_name|>_urllib2.py<|end_file_name|><๏ฝfimโbegin๏ฝ># urllib2 work-alike interface
# ...from urllib2...
from urllib2 import \
URLError, \
HTTPError, \
BaseHandler, \
UnknownHandler, \
FTPHandler, \
CacheFTPHandler
# ...and from mechanize
from _auth import \
HTTPPasswordMgr, \
HTTPPasswordMgrWithDefaultRealm, \
AbstractBasicAuthHandler, \
AbstractDigestAuthHandler, \
HTTPProxyPasswordMgr, \
ProxyHandler, \
ProxyBasicAuthHandler, \
ProxyDigestAuthHandler, \
HTTPBasicAuthHandler, \
HTTPDigestAuthHandler, \
HTTPSClientCertMgr
from _debug import \
HTTPResponseDebugProcessor, \
HTTPRedirectDebugProcessor
from _file import \
FileHandler
# crap ATM
## from _gzip import \
## HTTPGzipProcessor
from _http import \
HTTPHandler, \
HTTPDefaultErrorHandler, \
HTTPRedirectHandler, \
HTTPEquivProcessor, \
HTTPCookieProcessor, \
HTTPRefererProcessor, \<๏ฝfimโhole๏ฝ>import httplib
if hasattr(httplib, 'HTTPS'):
from _http import HTTPSHandler
del httplib
from _opener import OpenerDirector, \
SeekableResponseOpener, \
build_opener, install_opener, urlopen
from _request import \
Request
from _seek import \
SeekableProcessor
from _upgrade import \
HTTPRequestUpgradeProcessor, \
ResponseUpgradeProcessor<๏ฝfimโend๏ฝ> | HTTPRefreshProcessor, \
HTTPErrorProcessor, \
HTTPRobotRulesProcessor, \
RobotExclusionError |
<|file_name|>__init__.py<|end_file_name|><๏ฝfimโbegin๏ฝ># Xlib.ext.__init__ -- X extension modules
#
# Copyright (C) 2000 Peter Liljenberg <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<๏ฝfimโhole๏ฝ># __extensions__ is a list of tuples: (extname, extmod)
# extname is the name of the extension according to the X
# protocol. extmod is the name of the module in this package.
__extensions__ = [
('XTEST', 'xtest'),
('SHAPE', 'shape'),
('XINERAMA', 'xinerama'),
('RECORD', 'record'),
('Composite', 'composite'),
('RANDR', 'randr'),
]
__all__ = [x[1] for x in __extensions__]<๏ฝfimโend๏ฝ> | # You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.