repo_name
stringlengths
6
61
path
stringlengths
4
230
copies
stringlengths
1
3
size
stringlengths
4
6
text
stringlengths
1.01k
850k
license
stringclasses
15 values
hash
int64
-9,220,477,234,079,998,000
9,219,060,020B
line_mean
float64
11.6
96.6
line_max
int64
32
939
alpha_frac
float64
0.26
0.9
autogenerated
bool
1 class
ratio
float64
1.62
6.1
config_test
bool
2 classes
has_no_keywords
bool
2 classes
few_assignments
bool
1 class
acbilson/forbidden-island
tests/integrationtests.py
1
1368
import unittest import sys sys.path.append('../src') from islandgame import * from service_console import * from service_player import * from service_log import * from service_screen import * from iofactory import FakeIO from tiles import * class TestGame(unittest.TestCase): def test_golden_initialization(self): """ When the game is initiated and the user enters valid input, should generate a board with all players """ # Arrange bus = IslandBus() messageFactory = MessageFactory() playerFactory = PlayerFactory() game = IslandGame(bus, messageFactory) island = Island() pDeck = PlayerDeck() fio = FakeIO() tiles = Tiles() fio.callStack = [StackItem(1, "What is your name? ", 'Alex\n' ), StackItem(2, "How many players will be playing? ", '2\n' ), StackItem(3, "Choose a player type: ", 'Diver\n' ), StackItem(4, "Choose a player type: ", 'Engineer\n' )] cs = ConsoleService(bus, fio) ss = ScreenService(bus, island, fio, tiles) ps = PlayerService(bus, playerFactory, tiles) ls = LogService(bus, fio) # Act game.play() board = island.generate_board(tiles.tiles) self.assertTrue (len(board) > 0) self.assertTrue(Constant.PlayerType["Diver"] in board) self.assertTrue(Constant.PlayerType["Engineer"] in board)
gpl-3.0
-7,989,361,619,788,491,000
30.090909
112
0.655702
false
3.590551
false
false
false
rupenp/lin-train
solvers/logisticregression.py
2
1722
from solver import Solver import numpy as np class LogisticRegression(Solver): """ This uses the generalized linear model with gradient descent to calculate linear coefficients (parameters). This class is abstract and must be extended by the underlying distribution to determine the exact gradient and method for applying parameters. Inspired by Matlab code: http://www.cs.cmu.edu/~ggordon/IRLS-example/ """ epsilon = 1e-10 max_iterations = 500 ridge = 1e-5 def __init__(self): Solver.__init__(self) def calculate_parameters(self, x, y): # dimensions n, m = np.shape(x) # use standard least squares algorithm from numpy i = 0 params = np.zeros((m, 1)) old_exp_y = - np.ones(np.shape(y)) while i < self.max_iterations: # count iteration i += 1 # calculate adj_y = np.dot(x, params) exp_y = 1 / (1 + np.exp(-adj_y)) deriv = exp_y * (1 - exp_y) w_adj_y = (deriv * adj_y + (y - exp_y)) # * w weights = np.diag(deriv.flatten(1)) # * w try: params = np.dot(np.dot(np.linalg.inv(np.dot(np.dot(x.T, weights), x) + self.ridge), x.T), w_adj_y) except np.linalg.linalg.LinAlgError as err: print "Warning: Singular matrix" return params if np.sum(np.abs(exp_y - old_exp_y)) < n * self.epsilon: return params old_exp_y = exp_y # todo print "Warning: Does not converge" return params def apply_parameters(self, x, params): return 1 / (1 + np.exp(-np.dot(x, params)))
mit
-4,388,168,504,590,578,700
30.309091
116
0.552846
false
3.656051
false
false
false
vegarwe/sqrl
pysqrl/pysqrl/sqrl_client.py
1
2411
from .sqrl_conv import sqrl_base64_encode, sqrl_base64_decode from .sqrl_url import SqrlUrl from .sqrl_crypto import * def sqrl_query(imk, sks, server): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=query\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) client += b'opt=cps~suk\r\n' print('client', client) client = sqrl_base64_encode(client) print('server', server) server = sqrl_base64_encode(server) ids = sqrl_sign(ssk, client + server) print('ids', ids) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form def sqrl_ident(ilk, imk, sks, server, sin, create_suk): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=ident\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) if sin: ins = sqrl_hmac(EnHash(ssk), sin) client += b'ins=%s\r\n' % sqrl_base64_encode(ins) if create_suk: suk, vuk = sqrl_idlock_keys(ilk) client += b'suk=%s\r\n' % sqrl_base64_encode(suk) client += b'vuk=%s\r\n' % sqrl_base64_encode(vuk) client += b'opt=cps~suk\r\n' # TODO: Not always true? client = sqrl_base64_encode(client) ids = sqrl_sign(ssk, client + server) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form def sqrl_disable(ilk, imk, sks, server, sin, create_suk): # Get site specific keys idk, ssk = sqrl_get_idk_for_site(imk, sks) client = b'ver=1\r\n' client += b'cmd=disable\r\n' client += b'idk=%s\r\n' % sqrl_base64_encode(idk) if sin: ins = sqrl_hmac(EnHash(ssk), sin) client += b'ins=%s\r\n' % sqrl_base64_encode(ins) if create_suk: # TODO: Does this even make any sense? suk, vuk = sqrl_idlock_keys(ilk) client += b'suk=%s\r\n' % sqrl_base64_encode(suk) client += b'vuk=%s\r\n' % sqrl_base64_encode(vuk) client += b'opt=cps~suk\r\n' # TODO: Not always true? client = sqrl_base64_encode(client) ids = sqrl_sign(ssk, client + server) form = {'client': client, 'server': server, 'ids': sqrl_base64_encode(ids)} return form
mit
-1,717,149,960,949,419,500
32.027397
61
0.571132
false
2.614967
false
false
false
spacelis/crawler.kka
examples/utils/tweepy_patch.py
1
2726
#!/usr/bin/env python # -*- coding: utf-8 -*- """ A patch to tweepy for storing original json in Status. File: tweepy_patch.py Author: SpaceLis Email: [email protected] GitHub: http://github.com/spacelis Description: """ import types import sys from json import dumps def stack(cls, method, mock): """ Patching a method in class. """ mockee = getattr(cls, method) def wrapper(s, *args, **kwargs): """ wrapper """ return mock(s, mockee, *args, **kwargs) bak = '__patched__' + method setattr(cls, bak, mockee) setattr(cls, method, types.MethodType(wrapper, cls)) def preserve_origin(_, mockee, api, json): """ preserving the the original json object """ s = mockee(api, json) setattr(s, '_raw', dumps(json)) return s def patchStatus(): """ Patching the tweepy Status objects to make it store the raw json. """ if 'tweepy.models' in sys.modules: stack(sys.modules['tweepy.models'].Status, 'parse', preserve_origin) class ResourceKeeper(object): """ Return a resource keeper that will return false when all resource have been consumed. :returns: Whether there are resource left. """ def __init__(self, limit): """ Init resource keeper. :limit: A positive number means finite resource while 0 or negative means infinite. """ self.limit = limit self.left = limit def take(self, num=1, integrity=False): """ take resource. :num: The number to take away. :integrity: :returns: @todo """ assert num >= 0 if not self.limit: return True if integrity: ret = (self.left - num >= 0) else: ret = (self.left > 0) if self.left > 0: self.left -= num return ret def empty(self): """@todo: Docstring for empty. :returns: @todo """ return self.left <= 0 def iter_scoll(api, limit, *args, **kwargs): """ Iterating through pages of Twitter API. :api: The api to use from Tweepy :limit: The max number of tweets to scroll back in time. :*args: Args for the tweepy API. :**kwargs: Key word args for the tweepy API. :yields: A status object from Tweepy. """ _has_more = True res = ResourceKeeper(limit) if 'count' not in kwargs: kwargs['count'] = 100 while _has_more: _has_more = False for s in api(*args, **kwargs): _has_more = True max_id = s.id if not res.take(): break yield s if res.empty(): break kwargs['max_id'] = max_id - 1
mit
7,350,485,128,015,927,000
22.912281
76
0.567131
false
3.734247
false
false
false
FAForever/client
src/api/ApiBase.py
1
3965
from PyQt5 import QtCore, QtNetwork import logging import json from config import Settings logger = logging.getLogger(__name__) class ApiBase(QtCore.QObject): def __init__(self, route): QtCore.QObject.__init__(self) self.url = QtCore.QUrl(Settings.get('api') + route) self.manager = QtNetwork.QNetworkAccessManager() self.manager.finished.connect(self.onRequestFinished) self.handlers = {} # query arguments like filter=login==Rhyza def request(self, queryDict, responseHandler): query = QtCore.QUrlQuery() for key, value in queryDict.items(): query.addQueryItem(key, str(value)) url = QtCore.QUrl(self.url) url.setQuery(query) request = QtNetwork.QNetworkRequest(url) request.setRawHeader(b'User-Agent', b"FAF Client") request.setRawHeader(b'Content-Type', b'application/vnd.api+json') reply = self.manager.get(request) self.handlers[reply] = responseHandler def onRequestFinished(self, reply): if reply.error() != QtNetwork.QNetworkReply.NoError: logger.error("API request error: %s", reply.error()) else: message_bytes = reply.readAll().data() message = json.loads(message_bytes.decode('utf-8')) included = self.parseIncluded(message) meta = self.parseMeta(message) result = self.parseData(message, included) if len(meta) > 0: self.handlers[reply](result, meta) else: self.handlers[reply](result) self.handlers.pop(reply) reply.deleteLater() def parseIncluded(self, message): result = {} relationships = [] if "included" in message: for inc_item in message["included"]: if not inc_item["type"] in result: result[inc_item["type"]] = {} if "attributes" in inc_item: result[inc_item["type"]][inc_item["id"]] = inc_item["attributes"] if "relationships" in inc_item: for key, value in inc_item["relationships"].items(): relationships.append((inc_item["type"], inc_item["id"], key, value)) message.pop('included') #resolve relationships for r in relationships: result[r[0]][r[1]][r[2]] = self.parseData(r[3], result) return result def parseData(self, message, included): if "data" in message: if isinstance(message["data"], (list)): result = [] for data in message["data"]: result.append(self.parseSingleData(data, included)) return result elif isinstance(message["data"], (dict)): return self.parseSingleData(message["data"], included) else: logger.error("error in response", message) if "included" in message: logger.error("unexpected 'included' in message", message) return {} def parseSingleData(self, data, included): result = {} try: if data["type"] in included and data["id"] in included[data["type"]]: result = included[data["type"]][data["id"]] result["id"] = data["id"] if "type" not in result: result["type"] = data["type"] if "attributes" in data: for key, value in data["attributes"].items(): result[key] = value if "relationships" in data: for key, value in data["relationships"].items(): result[key] = self.parseData(value, included) except: logger.error("error parsing ", data) return result def parseMeta(self, message): result = {} if "meta" in message: result["meta"] = message["meta"] return result
gpl-3.0
-783,237,696,539,453,700
37.495146
92
0.560151
false
4.300434
false
false
false
paulmadore/Eric-IDE
6-6.0.9/eric/Preferences/ConfigurationPages/IrcPage.py
2
7648
# -*- coding: utf-8 -*- # Copyright (c) 2012 - 2015 Detlev Offenbach <[email protected]> # """ Module implementing the IRC configuration page. """ from __future__ import unicode_literals from .ConfigurationPageBase import ConfigurationPageBase from .Ui_IrcPage import Ui_IrcPage import Preferences class IrcPage(ConfigurationPageBase, Ui_IrcPage): """ Class implementing the IRC configuration page. """ TimeFormats = ["hh:mm", "hh:mm:ss", "h:mm ap", "h:mm:ss ap"] DateFormats = ["yyyy-MM-dd", "dd.MM.yyyy", "MM/dd/yyyy", "yyyy MMM. dd", "dd MMM. yyyy", "MMM. dd, yyyy"] def __init__(self): """ Constructor """ super(IrcPage, self).__init__() self.setupUi(self) self.setObjectName("IrcPage") self.timeFormatCombo.addItems(IrcPage.TimeFormats) self.dateFormatCombo.addItems(IrcPage.DateFormats) # set initial values # timestamps self.timestampGroup.setChecked(Preferences.getIrc("ShowTimestamps")) self.showDateCheckBox.setChecked( Preferences.getIrc("TimestampIncludeDate")) self.timeFormatCombo.setCurrentIndex( self.timeFormatCombo.findText(Preferences.getIrc("TimeFormat"))) self.dateFormatCombo.setCurrentIndex( self.dateFormatCombo.findText(Preferences.getIrc("DateFormat"))) # colours self.initColour("NetworkMessageColour", self.networkButton, Preferences.getIrc, byName=True) self.initColour("ServerMessageColour", self.serverButton, Preferences.getIrc, byName=True) self.initColour("ErrorMessageColour", self.errorButton, Preferences.getIrc, byName=True) self.initColour("TimestampColour", self.timestampButton, Preferences.getIrc, byName=True) self.initColour("HyperlinkColour", self.hyperlinkButton, Preferences.getIrc, byName=True) self.initColour("ChannelMessageColour", self.channelButton, Preferences.getIrc, byName=True) self.initColour("OwnNickColour", self.ownNickButton, Preferences.getIrc, byName=True) self.initColour("NickColour", self.nickButton, Preferences.getIrc, byName=True) self.initColour("JoinChannelColour", self.joinButton, Preferences.getIrc, byName=True) self.initColour("LeaveChannelColour", self.leaveButton, Preferences.getIrc, byName=True) self.initColour("ChannelInfoColour", self.infoButton, Preferences.getIrc, byName=True) # notifications self.notificationsGroup.setChecked( Preferences.getIrc("ShowNotifications")) self.joinLeaveCheckBox.setChecked(Preferences.getIrc("NotifyJoinPart")) self.messageCheckBox.setChecked(Preferences.getIrc("NotifyMessage")) self.ownNickCheckBox.setChecked(Preferences.getIrc("NotifyNick")) # IRC text colors self.initColour("IrcColor0", self.ircColor0Button, Preferences.getIrc, byName=True) self.initColour("IrcColor1", self.ircColor1Button, Preferences.getIrc, byName=True) self.initColour("IrcColor2", self.ircColor2Button, Preferences.getIrc, byName=True) self.initColour("IrcColor3", self.ircColor3Button, Preferences.getIrc, byName=True) self.initColour("IrcColor4", self.ircColor4Button, Preferences.getIrc, byName=True) self.initColour("IrcColor5", self.ircColor5Button, Preferences.getIrc, byName=True) self.initColour("IrcColor6", self.ircColor6Button, Preferences.getIrc, byName=True) self.initColour("IrcColor7", self.ircColor7Button, Preferences.getIrc, byName=True) self.initColour("IrcColor8", self.ircColor8Button, Preferences.getIrc, byName=True) self.initColour("IrcColor9", self.ircColor9Button, Preferences.getIrc, byName=True) self.initColour("IrcColor10", self.ircColor10Button, Preferences.getIrc, byName=True) self.initColour("IrcColor11", self.ircColor11Button, Preferences.getIrc, byName=True) self.initColour("IrcColor12", self.ircColor12Button, Preferences.getIrc, byName=True) self.initColour("IrcColor13", self.ircColor13Button, Preferences.getIrc, byName=True) self.initColour("IrcColor14", self.ircColor14Button, Preferences.getIrc, byName=True) self.initColour("IrcColor15", self.ircColor15Button, Preferences.getIrc, byName=True) # Automatic User Information Lookup self.whoGroup.setChecked(Preferences.getIrc("AutoUserInfoLookup")) self.whoUsersSpinBox.setValue(Preferences.getIrc("AutoUserInfoMax")) self.whoIntervalSpinBox.setValue( Preferences.getIrc("AutoUserInfoInterval")) # Markers self.markWhenHiddenCheckBox.setChecked( Preferences.getIrc("MarkPositionWhenHidden")) self.initColour("MarkerLineForegroundColour", self.markerForegroundButton, Preferences.getIrc, byName=True) self.initColour("MarkerLineBackgroundColour", self.markerBackgroundButton, Preferences.getIrc, byName=True) # Shutdown self.confirmShutdownCheckBox.setChecked( Preferences.getIrc("AskOnShutdown")) def save(self): """ Public slot to save the IRC configuration. """ # timestamps Preferences.setIrc("ShowTimestamps", self.timestampGroup.isChecked()) Preferences.setIrc( "TimestampIncludeDate", self.showDateCheckBox.isChecked()) Preferences.setIrc("TimeFormat", self.timeFormatCombo.currentText()) Preferences.setIrc("DateFormat", self.dateFormatCombo.currentText()) # notifications Preferences.setIrc( "ShowNotifications", self.notificationsGroup.isChecked()) Preferences.setIrc( "NotifyJoinPart", self.joinLeaveCheckBox.isChecked()) Preferences.setIrc("NotifyMessage", self.messageCheckBox.isChecked()) Preferences.setIrc("NotifyNick", self.ownNickCheckBox.isChecked()) # Automatic User Information Lookup Preferences.setIrc("AutoUserInfoLookup", self.whoGroup.isChecked()) Preferences.setIrc("AutoUserInfoMax", self.whoUsersSpinBox.value()) Preferences.setIrc( "AutoUserInfoInterval", self.whoIntervalSpinBox.value()) # Markers Preferences.setIrc( "MarkPositionWhenHidden", self.markWhenHiddenCheckBox.isChecked()) # Shutdown Preferences.setIrc( "AskOnShutdown", self.confirmShutdownCheckBox.isChecked()) # colours self.saveColours(Preferences.setIrc) def create(dlg): """ Module function to create the configuration page. @param dlg reference to the configuration dialog @return reference to the instantiated page (ConfigurationPageBase) """ page = IrcPage() return page
gpl-3.0
7,421,247,464,999,810,000
41.966292
79
0.625785
false
4.165577
true
false
false
mghweb/sublime-miva-ide
miva-pos-calculator.py
1
1617
import sublime, sublime_plugin, re, threading miva_error_status_key = 'miva_pos_calculator_error' class MvtPosCalculatorCommand( sublime_plugin.TextCommand ): def run( self, edit ): selections = self.view.sel() for selection in selections: # get region from start of file to first point in selection search_region = sublime.Region( 0, selection.a ) # get actual text of region search_text = self.view.substr( search_region ) # check what language you are in is_mvt = self.view.match_selector( selection.a, 'text.mvt' ) is_mv = self.view.match_selector( selection.a, 'text.mv' ) # find all matches of "open" tags if ( is_mvt ): open_tags = re.findall( r'(?i)(<)(mvt:)(foreach)\b', search_text ) close_tags = re.findall( r'(?i)(<\/)(mvt:)(foreach)\b', search_text ) elif ( is_mv ): open_tags = re.findall( r'(?i)(<Mv)(FOR|FOREACH)\b', search_text ) close_tags = re.findall( r'(?i)(<\/Mv)(FOR|FOREACH)\b', search_text ) else: continue # calculate the difference between the number of open and closed tags open_close_difference = len( open_tags ) - len( close_tags ) # check the difference if ( open_close_difference >= 1 ): # generate output string output = 'l.pos' + str( open_close_difference ); # Replace the Variable selection with the generated l.posX self.view.replace( edit, selection, output ) else: # Output error message self.view.set_status( miva_error_status_key, 'No valid <mvt:foreach> tags detected' ) threading.Timer( 3, self.view.erase_status, args=[miva_error_status_key] ).start()
mit
624,135,251,984,196,700
32.6875
89
0.664193
false
3.068311
false
false
false
nirvaris/nirvaris-profile
setup.py
1
1256
import os from setuptools import setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='nirvaris-profile', version='3.0', packages=['n_profile'], include_package_data=True, license='MIT License', # example license description='A simple Django app using django auth with custom UI', long_description=README, url='https://github.com/nirvaris/nirvaris-profile', author='Nirvaris', author_email='[email protected]', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', # example license 'Operating System :: OS Independent', 'Programming Language :: Python', # Replace these appropriately if you are stuck on Python 2. 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], )
mit
-3,625,902,828,120,398,300
33.888889
78
0.636146
false
3.912773
false
true
false
dwoods/gn-maps
geonode/documents/migrations/0002_auto__add_field_document_popular_count__add_field_document_share_count.py
4
16466
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Document.popular_count' db.add_column(u'documents_document', 'popular_count', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) # Adding field 'Document.share_count' db.add_column(u'documents_document', 'share_count', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) def backwards(self, orm): # Deleting field 'Document.popular_count' db.delete_column(u'documents_document', 'popular_count') # Deleting field 'Document.share_count' db.delete_column(u'documents_document', 'share_count') models = { u'actstream.action': { 'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'}, 'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}), 'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': u"orm['contenttypes.ContentType']"}), 'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}), 'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 489164)'}), 'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 495334)'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 6, 21, 8, 48, 27, 494806)'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'base.contactrole': { 'Meta': {'unique_together': "(('contact', 'resource', 'role'),)", 'object_name': 'ContactRole'}, 'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Profile']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.ResourceBase']"}), 'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['people.Role']"}) }, u'base.resourcebase': { 'Meta': {'object_name': 'ResourceBase'}, 'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'bbox_x0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_x1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_y0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'bbox_y1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}), 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.TopicCategory']", 'null': 'True', 'blank': 'True'}), 'constraints_other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'constraints_use': ('django.db.models.fields.CharField', [], {'default': "'copyright'", 'max_length': '255'}), 'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['people.Profile']", 'through': u"orm['base.ContactRole']", 'symmetrical': 'False'}), 'csw_anytext': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'csw_insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), 'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '256'}), 'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.isotc211.org/2005/gmd'", 'max_length': '64'}), 'csw_type': ('django.db.models.fields.CharField', [], {'default': "'dataset'", 'max_length': '32'}), 'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'gmd:MD_Metadata'", 'max_length': '32'}), 'csw_wkt_geometry': ('django.db.models.fields.TextField', [], {'default': "'POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'"}), 'data_quality_statement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_type': ('django.db.models.fields.CharField', [], {'default': "'publication'", 'max_length': '255'}), 'distribution_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'distribution_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'edition': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'keywords_region': ('django.db.models.fields.CharField', [], {'default': "'USA'", 'max_length': '3'}), 'language': ('django.db.models.fields.CharField', [], {'default': "'eng'", 'max_length': '3'}), 'maintenance_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'metadata_uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'metadata_xml': ('django.db.models.fields.TextField', [], {'default': '\'<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>\'', 'null': 'True', 'blank': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'purpose': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'spatial_representation_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'srid': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '255'}), 'supplemental_information': ('django.db.models.fields.TextField', [], {'default': "u'No information provided'"}), 'temporal_extent_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'temporal_extent_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'thumbnail': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Thumbnail']", 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'}) }, u'base.thumbnail': { 'Meta': {'object_name': 'Thumbnail'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'thumb_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'thumb_spec': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'version': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}) }, u'base.topiccategory': { 'Meta': {'ordering': "('name',)", 'object_name': 'TopicCategory'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'documents_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'layers_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'maps_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'documents.document': { 'Meta': {'object_name': 'Document', '_ormbases': [u'base.ResourceBase']}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}), 'doc_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'extension': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}), 'popular_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), u'resourcebase_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['base.ResourceBase']", 'unique': 'True', 'primary_key': 'True'}), 'share_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}) }, u'people.profile': { 'Meta': {'object_name': 'Profile'}, 'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), 'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': u"orm['auth.User']"}), 'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, u'people.role': { 'Meta': {'object_name': 'Role'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, u'taggit.tag': { 'Meta': {'object_name': 'Tag'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'}) }, u'taggit.taggeditem': { 'Meta': {'object_name': 'TaggedItem'}, 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"}) } } complete_apps = ['documents']
gpl-3.0
2,982,839,074,268,630,000
84.760417
205
0.55648
false
3.547178
false
false
false
hsab/UMOG
umog_addon/nodes/geometry/dissolve_limited.py
1
2551
from ...base_types import UMOGOutputNode import bpy import numpy as np import math from mathutils import Vector class DissolveLimitedNode(bpy.types.Node, UMOGOutputNode): bl_idname = "umog_DissolveLimitedNode" bl_label = "Dissolve Limited" assignedType = "Object" delimitOptions = bpy.props.EnumProperty(items= (('NORMAL', 'Normal', 'Delimit by face directions.'), ('MATERIAL ', 'Material', 'Delimit by face material.'), ('SEAM', 'Seam', 'Delimit by edge seams.'), ('SHARP', 'Sharp', 'Delimit by sharp edges.'), ('UV', 'UV', 'Delimit by UV coordinates.') ), name="Delimit Operation", default = {"NORMAL"}, options = {"ENUM_FLAG"}) def draw(self, layout): layout.prop(self, "delimitOptions", "Delimit Operation") def create(self): self.newInput(self.assignedType, "Object") self.newInput("VertexGroup", "Vertex Group") self.newInput("Float", "Angle Limit", value = 0.001, minValue = 0.0, maxValue= 180) self.newInput("Boolean", "All Boundries") socket = self.newOutput(self.assignedType, "Output") socket.display.refreshableIcon = False socket.display.packedIcon = False socket = self.newOutput("VertexGroup", "Vertex Group") socket.display.refreshableIcon = False socket.display.packedIcon = False self.width = 200 def refresh(self): if self.inputs[0].value == '': self.inputs[1].value = '' self.inputs[1].object = '' else: self.inputs[1].object = self.inputs[0].value self.outputs[0].value = self.inputs[0].value self.outputs[0].refresh() self.outputs[1].value = self.inputs[1].value self.outputs[1].refresh() def execute(self, refholder): if self.inputs[1].value == '': self.inputs[0].setSelected() overrideContext = self.inputs[0].setViewEditMode(selectAll = 'SELECT') else: self.inputs[1].setSelected() overrideContext = self.inputs[1].select() angleLimit = math.radians(self.inputs[2].value) boundries = self.inputs[3].value bpy.ops.mesh.dissolve_limited(angle_limit=angleLimit, use_dissolve_boundaries=boundries, delimit=self.delimitOptions) self.inputs[0].setViewObjectMode() def write_keyframe(self, refholder, frame): pass def preExecute(self, refholder): pass def postBake(self, refholder): pass
gpl-3.0
-6,074,537,798,682,257,000
32.12987
125
0.614661
false
3.762537
false
false
false
7Robot/cerveau
ia/missions/petit/positioning.py
1
3131
# -*-coding:UTF-8 -* from events.event import Event from missions.mission import Mission class PositioningMission(Mission): def __init__(self, robot, can, ui): super(self.__class__,self).__init__(robot, can, ui) self.state = -1 def start(self): if self.state == -1: self.create_timer(500) self.missions["threshold"].activate(1, False) self.missions["threshold"].activate(2, False) self.missions["threshold"].activate(8, False) self.state += 1 def process_event(self, e): if self.state == 0: if e.name == "timer": self.state = 2 self.missions["speed"].start( -20) elif self.state == 2: if e.name == "bump" and e.state == "close": self.state += 1 self.create_timer(700) elif e.name == "bump" and e.pos == "alim" \ and e.state == "close": self.missions["speed"].start(-20) elif self.state == 3: if e.name == "timer": self.state += 1 self.missions["speed"].stop(self) elif self.state == 4: if e.name == "speed" and e.type == "done": self.state += 1 self.missions["forward"].start(self, 1800) elif self.state == 5: if e.name == "forward" and e.type == "done": self.state += 1 self.missions["rotate"].start(self, 9000) elif self.state == 6: if e.name == "rotate" and e.type == "done": self.state += 1 self.missions["speed"].start(-20) elif self.state == 7: if e.name == "bump" and e.state == "close": self.state += 0.5 self.create_timer(700) elif self.state == 7.5: if e.name == "timer": self.state += 0.5 self.missions["speed"].stop(self) elif self.state == 8: if e.name == "speed" and e.type == "done": self.state += 2 self.missions["forward"].start(self, 6000) elif self.state == 10: if e.name == "forward" and e.type == "done": self.state += 1 self.logger.info("Petit en attente de positionnement de Gros") elif self.state == 11: if (e.name == "robot" and e.type == "ready") \ or (e.name == "bump" and e.state == "close"): self.state += 1 self.missions["forward"].start(self, -3100) elif self.state == 12: if e.name == "forward" and e.type == "done": self.state = 0 self.missions["threshold"].activate(1, True) self.missions["threshold"].activate(2, True) self.missions["threshold"].activate(8, True) self.logger.info("Petit en position !") self.send_event(Event("positioning", "done"))
gpl-3.0
2,794,002,897,405,929,000
35.835294
78
0.464388
false
4.024422
false
false
false
google/google-ctf
third_party/edk2/BaseTools/Source/Python/Eot/EotMain.py
1
69215
## @file # This file is used to be the main entrance of EOT tool # # Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules # from __future__ import absolute_import import Common.LongFilePathOs as os, time, glob import Common.EdkLogger as EdkLogger import Eot.EotGlobalData as EotGlobalData from optparse import OptionParser from Common.StringUtils import NormPath from Common import BuildToolError from Common.Misc import GuidStructureStringToGuidString from collections import OrderedDict as sdict from Eot.Parser import * from Eot.InfParserLite import EdkInfParser from Common.StringUtils import GetSplitValueList from Eot import c from Eot import Database from array import array from Eot.Report import Report from Common.BuildVersion import gBUILD_VERSION from Eot.Parser import ConvertGuid from Common.LongFilePathSupport import OpenLongFilePath as open import struct import uuid import copy import codecs from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID gGuidStringFormat = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" gIndention = -4 class Image(array): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = _HEADER_.size def __new__(cls, *args, **kwargs): return array.__new__(cls, 'B') def __init__(self, ID=None): if ID is None: self._ID_ = str(uuid.uuid1()).upper() else: self._ID_ = ID self._BUF_ = None self._LEN_ = None self._OFF_ = None self._SubImages = sdict() # {offset: Image()} array.__init__(self) def __repr__(self): return self._ID_ def __len__(self): Len = array.__len__(self) for Offset in self._SubImages.keys(): Len += len(self._SubImages[Offset]) return Len def _Unpack(self): self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _Pack(self, PadByte=0xFF): raise NotImplementedError def frombuffer(self, Buffer, Offset=0, Size=None): self._BUF_ = Buffer self._OFF_ = Offset # we may need the Size information in advance if it's given self._LEN_ = Size self._LEN_ = self._Unpack() def empty(self): del self[0:] def GetField(self, FieldStruct, Offset=0): return FieldStruct.unpack_from(self, Offset) def SetField(self, FieldStruct, Offset, *args): # check if there's enough space Size = FieldStruct.size if Size > len(self): self.extend([0] * (Size - len(self))) FieldStruct.pack_into(self, Offset, *args) def _SetData(self, Data): if len(self) < self._HEADER_SIZE_: self.extend([0] * (self._HEADER_SIZE_ - len(self))) else: del self[self._HEADER_SIZE_:] self.extend(Data) def _GetData(self): if len(self) > self._HEADER_SIZE_: return self[self._HEADER_SIZE_:] return None Data = property(_GetData, _SetData) ## CompressedImage() class # # A class for Compressed Image # class CompressedImage(Image): # UncompressedLength = 4-byte # CompressionType = 1-byte _HEADER_ = struct.Struct("1I 1B") _HEADER_SIZE_ = _HEADER_.size _ORIG_SIZE_ = struct.Struct("1I") _CMPRS_TYPE_ = struct.Struct("4x 1B") def __init__(self, CompressedData=None, CompressionType=None, UncompressedLength=None): Image.__init__(self) if UncompressedLength is not None: self.UncompressedLength = UncompressedLength if CompressionType is not None: self.CompressionType = CompressionType if CompressedData is not None: self.Data = CompressedData def __str__(self): global gIndention S = "algorithm=%s uncompressed=%x" % (self.CompressionType, self.UncompressedLength) for Sec in self.Sections: S += '\n' + str(Sec) return S def _SetOriginalSize(self, Size): self.SetField(self._ORIG_SIZE_, 0, Size) def _GetOriginalSize(self): return self.GetField(self._ORIG_SIZE_)[0] def _SetCompressionType(self, Type): self.SetField(self._CMPRS_TYPE_, 0, Type) def _GetCompressionType(self): return self.GetField(self._CMPRS_TYPE_)[0] def _GetSections(self): try: TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) except: TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:]) DecData = array('B') DecData.fromstring(TmpData) SectionList = [] Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary except: break SectionList.append(Sec) return SectionList UncompressedLength = property(_GetOriginalSize, _SetOriginalSize) CompressionType = property(_GetCompressionType, _SetCompressionType) Sections = property(_GetSections) ## Ui() class # # A class for Ui # class Ui(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 def __init__(self): Image.__init__(self) def __str__(self): return self.String def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetUiString(self): return codecs.utf_16_decode(self[0:-2].tostring())[0] String = property(_GetUiString) ## Depex() class # # A class for Depex # class Depex(Image): _HEADER_ = struct.Struct("") _HEADER_SIZE_ = 0 _GUID_ = struct.Struct("1I2H8B") _OPCODE_ = struct.Struct("1B") _OPCODE_STRING_ = { 0x00 : "BEFORE", 0x01 : "AFTER", 0x02 : "PUSH", 0x03 : "AND", 0x04 : "OR", 0x05 : "NOT", 0x06 : "TRUE", 0x07 : "FALSE", 0x08 : "END", 0x09 : "SOR" } _NEXT_ = { -1 : _OPCODE_, # first one in depex must be an opcdoe 0x00 : _GUID_, #"BEFORE", 0x01 : _GUID_, #"AFTER", 0x02 : _GUID_, #"PUSH", 0x03 : _OPCODE_, #"AND", 0x04 : _OPCODE_, #"OR", 0x05 : _OPCODE_, #"NOT", 0x06 : _OPCODE_, #"TRUE", 0x07 : _OPCODE_, #"FALSE", 0x08 : None, #"END", 0x09 : _OPCODE_, #"SOR" } def __init__(self): Image.__init__(self) self._ExprList = [] def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention S = '\n' for T in self.Expression: if T in self._OPCODE_STRING_: S += Indention + self._OPCODE_STRING_[T] if T not in [0x00, 0x01, 0x02]: S += '\n' else: S += ' ' + gGuidStringFormat % T + '\n' gIndention -= 4 return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _GetExpression(self): if self._ExprList == []: Offset = 0 CurrentData = self._OPCODE_ while Offset < len(self): Token = CurrentData.unpack_from(self, Offset) Offset += CurrentData.size if len(Token) == 1: Token = Token[0] if Token in self._NEXT_: CurrentData = self._NEXT_[Token] else: CurrentData = self._GUID_ else: CurrentData = self._OPCODE_ self._ExprList.append(Token) if CurrentData is None: break return self._ExprList Expression = property(_GetExpression) # # FirmwareVolume() class # # A class for Firmware Volume # class FirmwareVolume(Image): # Read FvLength, Attributes, HeaderLength, Checksum _HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H") _HEADER_SIZE_ = _HEADER_.size _FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3" _GUID_ = struct.Struct("16x 1I2H8B") _LENGTH_ = struct.Struct("16x 16x 1Q") _SIG_ = struct.Struct("16x 16x 8x 1I") _ATTR_ = struct.Struct("16x 16x 8x 4x 1I") _HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H") _CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H") def __init__(self, Name=''): Image.__init__(self) self.Name = Name self.FfsDict = sdict() self.OrderedFfsDict = sdict() self.UnDispatchedFfsDict = sdict() self.ProtocolList = sdict() def CheckArchProtocol(self): for Item in EotGlobalData.gArchProtocolGuids: if Item.lower() not in EotGlobalData.gProtocolList: return False return True def ParseDepex(self, Depex, Type): List = None if Type == 'Ppi': List = EotGlobalData.gPpiList if Type == 'Protocol': List = EotGlobalData.gProtocolList DepexStack = [] DepexList = [] DepexString = '' FileDepex = None CouldBeLoaded = True for Index in range(0, len(Depex.Expression)): Item = Depex.Expression[Index] if Item == 0x00: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE']) elif Item == 0x01: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08: return (True, 'AFTER %s' % Guid, [Guid, 'AFTER']) elif Item == 0x02: Index = Index + 1 Guid = gGuidStringFormat % Depex.Expression[Index] if Guid.lower() in List: DepexStack.append(True) DepexList.append(Guid) else: DepexStack.append(False) DepexList.append(Guid) continue elif Item == 0x03 or Item == 0x04: DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop())) elif Item == 0x05: DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop()))) DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop())) elif Item == 0x06: DepexStack.append(True) DepexList.append('TRUE') DepexString = DepexString + 'TRUE' + ' ' elif Item == 0x07: DepexStack.append(False) DepexList.append('False') DepexString = DepexString + 'FALSE' + ' ' elif Item == 0x08: if Index != len(Depex.Expression) - 1: CouldBeLoaded = False else: CouldBeLoaded = DepexStack.pop() else: CouldBeLoaded = False if DepexList != []: DepexString = DepexList[0].strip() return (CouldBeLoaded, DepexString, FileDepex) def Dispatch(self, Db=None): if Db is None: return False self.UnDispatchedFfsDict = copy.copy(self.FfsDict) # Find PeiCore, DexCore, PeiPriori, DxePriori first FfsSecCoreGuid = None FfsPeiCoreGuid = None FfsDxeCoreGuid = None FfsPeiPrioriGuid = None FfsDxePrioriGuid = None for FfsID in list(self.UnDispatchedFfsDict.keys()): Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x03: FfsSecCoreGuid = FfsID continue if Ffs.Type == 0x04: FfsPeiCoreGuid = FfsID continue if Ffs.Type == 0x05: FfsDxeCoreGuid = FfsID continue if Ffs.Guid.lower() == PEI_APRIORI_GUID.lower(): FfsPeiPrioriGuid = FfsID continue if Ffs.Guid.lower() == DXE_APRIORI_GUID.lower(): FfsDxePrioriGuid = FfsID continue # Parse SEC_CORE first if FfsSecCoreGuid is not None: self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid) self.LoadPpi(Db, FfsSecCoreGuid) # Parse PEI first if FfsPeiCoreGuid is not None: self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid) self.LoadPpi(Db, FfsPeiCoreGuid) if FfsPeiPrioriGuid is not None: # Load PEIM described in priori file FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid) if len(FfsPeiPriori.Sections) == 1: Section = FfsPeiPriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadPpi(Db, GuidString) self.DisPatchPei(Db) # Parse DXE then if FfsDxeCoreGuid is not None: self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid) self.LoadProtocol(Db, FfsDxeCoreGuid) if FfsDxePrioriGuid is not None: # Load PEIM described in priori file FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid) if len(FfsDxePriori.Sections) == 1: Section = FfsDxePriori.Sections.popitem()[1] if Section.Type == 0x19: GuidStruct = struct.Struct('1I2H8B') Start = 4 while len(Section) > Start: Guid = GuidStruct.unpack_from(Section[Start : Start + 16]) GuidString = gGuidStringFormat % Guid Start = Start + 16 if GuidString in self.UnDispatchedFfsDict: self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString) self.LoadProtocol(Db, GuidString) self.DisPatchDxe(Db) def LoadProtocol(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Protocol' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: SqlCommand = """select Value2 from Inf where BelongsToFile = (select DISTINCT BelongsToFile from Inf where Value1 = (select SourceFileFullPath from Report where GuidValue like '%s' and ItemMode = 'Callback')) and Value1 = 'FILE_GUID'""" % Record[0] CallBackSet = Db.TblReport.Exec(SqlCommand) if CallBackSet != []: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid else: EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid def LoadPpi(self, Db, ModuleGuid): SqlCommand = """select GuidValue from Report where SourceFileFullPath in (select Value1 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s) and Model = %s) and ItemType = 'Ppi' and ItemMode = 'Produced'""" \ % (ModuleGuid, 5001, 3007) RecordSet = Db.TblReport.Exec(SqlCommand) for Record in RecordSet: EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid def DisPatchDxe(self, Db): IsInstalled = False ScheduleList = sdict() for FfsID in list(self.UnDispatchedFfsDict.keys()): CouldBeLoaded = False DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x07: # Get Depex IsFoundDepex = False for Section in Ffs.Sections.values(): # Find Depex if Section.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x13: IsFoundDepex = True CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol') break # Not find Depex if not IsFoundDepex: CouldBeLoaded = self.CheckArchProtocol() DepexString = '' FileDepex = None # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString if FileDepex is not None: ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0]) else: ScheduleList[FfsID] = NewFfs else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString for FfsID in ScheduleList.keys(): NewFfs = ScheduleList.pop(FfsID) FfsName = 'UnKnown' self.OrderedFfsDict[FfsID] = NewFfs self.LoadProtocol(Db, FfsID) SqlCommand = """select Value2 from Inf where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s) and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001) RecordSet = Db.TblReport.Exec(SqlCommand) if RecordSet != []: FfsName = RecordSet[0][0] if IsInstalled: self.DisPatchDxe(Db) def DisPatchPei(self, Db): IsInstalled = False for FfsID in list(self.UnDispatchedFfsDict.keys()): CouldBeLoaded = True DepexString = '' FileDepex = None Ffs = self.UnDispatchedFfsDict[FfsID] if Ffs.Type == 0x06 or Ffs.Type == 0x08: # Get Depex for Section in Ffs.Sections.values(): if Section.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi') break if Section.Type == 0x01: CompressSections = Section._SubImages[4] for CompressSection in CompressSections.Sections: if CompressSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi') break if CompressSection.Type == 0x02: NewSections = CompressSection._SubImages[4] for NewSection in NewSections.Sections: if NewSection.Type == 0x1B: CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi') break # Append New Ffs if CouldBeLoaded: IsInstalled = True NewFfs = self.UnDispatchedFfsDict.pop(FfsID) NewFfs.Depex = DepexString self.OrderedFfsDict[FfsID] = NewFfs self.LoadPpi(Db, FfsID) else: self.UnDispatchedFfsDict[FfsID].Depex = DepexString if IsInstalled: self.DisPatchPei(Db) def __str__(self): global gIndention gIndention += 4 FvInfo = '\n' + ' ' * gIndention FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum) FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict]) gIndention -= 4 return FvInfo + FfsInfo def _Unpack(self): Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0] self.empty() self.extend(self._BUF_[self._OFF_:self._OFF_ + Size]) # traverse the FFS EndOfFv = Size FfsStartAddress = self.HeaderSize LastFfsObj = None while FfsStartAddress < EndOfFv: FfsObj = Ffs() FfsObj.frombuffer(self, FfsStartAddress) FfsId = repr(FfsObj) if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \ or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0): if LastFfsObj is not None: LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj) else: if FfsId in self.FfsDict: EdkLogger.error("FV", 0, "Duplicate GUID in FFS", ExtraData="\t%s @ %s\n\t%s @ %s" \ % (FfsObj.Guid, FfsObj.Offset, self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset)) self.FfsDict[FfsId] = FfsObj if LastFfsObj is not None: LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj) FfsStartAddress += len(FfsObj) # # align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1)) # The next FFS must be at the latest next 8-byte aligned address # FfsStartAddress = (FfsStartAddress + 7) & (~7) LastFfsObj = FfsObj def _GetAttributes(self): return self.GetField(self._ATTR_, 0)[0] def _GetSize(self): return self.GetField(self._LENGTH_, 0)[0] def _GetChecksum(self): return self.GetField(self._CHECKSUM_, 0)[0] def _GetHeaderLength(self): return self.GetField(self._HLEN_, 0)[0] def _GetFileSystemGuid(self): return gGuidStringFormat % self.GetField(self._GUID_, 0) Attributes = property(_GetAttributes) Size = property(_GetSize) Checksum = property(_GetChecksum) HeaderSize = property(_GetHeaderLength) FileSystemGuid = property(_GetFileSystemGuid) ## GuidDefinedImage() class # # A class for GUID Defined Image # class GuidDefinedImage(Image): _HEADER_ = struct.Struct("1I2H8B 1H 1H") _HEADER_SIZE_ = _HEADER_.size _GUID_ = struct.Struct("1I2H8B") _DATA_OFFSET_ = struct.Struct("16x 1H") _ATTR_ = struct.Struct("18x 1H") CRC32_GUID = "FC1BCDB0-7D31-49AA-936A-A4600D9DD083" TIANO_COMPRESS_GUID = 'A31280AD-481E-41B6-95E8-127F4C984779' LZMA_COMPRESS_GUID = 'EE4E5898-3914-4259-9D6E-DC7BD79403CF' def __init__(self, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None): Image.__init__(self) if SectionDefinitionGuid is not None: self.SectionDefinitionGuid = SectionDefinitionGuid if DataOffset is not None: self.DataOffset = DataOffset if Attributes is not None: self.Attributes = Attributes if Data is not None: self.Data = Data def __str__(self): S = "guid=%s" % (gGuidStringFormat % self.SectionDefinitionGuid) for Sec in self.Sections: S += "\n" + str(Sec) return S def _Unpack(self): # keep header in this Image object self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_]) return len(self) def _SetAttribute(self, Attribute): self.SetField(self._ATTR_, 0, Attribute) def _GetAttribute(self): return self.GetField(self._ATTR_)[0] def _SetGuid(self, Guid): self.SetField(self._GUID_, 0, Guid) def _GetGuid(self): return self.GetField(self._GUID_) def _SetDataOffset(self, Offset): self.SetField(self._DATA_OFFSET_, 0, Offset) def _GetDataOffset(self): return self.GetField(self._DATA_OFFSET_)[0] def _GetSections(self): SectionList = [] Guid = gGuidStringFormat % self.SectionDefinitionGuid if Guid == self.CRC32_GUID: # skip the CRC32 value, we don't do CRC32 verification here Offset = self.DataOffset - 4 while Offset < len(self): Sec = Section() try: Sec.frombuffer(self, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) elif Guid == self.TIANO_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Framework', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass elif Guid == self.LZMA_COMPRESS_GUID: try: # skip the header Offset = self.DataOffset - 4 TmpData = DeCompress('Lzma', self[self.Offset:]) DecData = array('B') DecData.fromstring(TmpData) Offset = 0 while Offset < len(DecData): Sec = Section() try: Sec.frombuffer(DecData, Offset) Offset += Sec.Size # the section is aligned to 4-byte boundary Offset = (Offset + 3) & (~3) except: break SectionList.append(Sec) except: pass return SectionList Attributes = property(_GetAttribute, _SetAttribute) SectionDefinitionGuid = property(_GetGuid, _SetGuid) DataOffset = property(_GetDataOffset, _SetDataOffset) Sections = property(_GetSections) ## Section() class # # A class for Section # class Section(Image): _TypeName = { 0x00 : "<unknown>", 0x01 : "COMPRESSION", 0x02 : "GUID_DEFINED", 0x10 : "PE32", 0x11 : "PIC", 0x12 : "TE", 0x13 : "DXE_DEPEX", 0x14 : "VERSION", 0x15 : "USER_INTERFACE", 0x16 : "COMPATIBILITY16", 0x17 : "FIRMWARE_VOLUME_IMAGE", 0x18 : "FREEFORM_SUBTYPE_GUID", 0x19 : "RAW", 0x1B : "PEI_DEPEX" } _SectionSubImages = { 0x01 : CompressedImage, 0x02 : GuidDefinedImage, 0x17 : FirmwareVolume, 0x13 : Depex, 0x1B : Depex, 0x15 : Ui } # Size = 3-byte # Type = 1-byte _HEADER_ = struct.Struct("3B 1B") _HEADER_SIZE_ = _HEADER_.size # SubTypeGuid # _FREE_FORM_SUBTYPE_GUID_HEADER_ = struct.Struct("1I2H8B") _SIZE_ = struct.Struct("3B") _TYPE_ = struct.Struct("3x 1B") def __init__(self, Type=None, Size=None): Image.__init__(self) self._Alignment = 1 if Type is not None: self.Type = Type if Size is not None: self.Size = Size def __str__(self): global gIndention gIndention += 4 SectionInfo = ' ' * gIndention if self.Type in self._TypeName: SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size) else: SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size) for Offset in self._SubImages.keys(): SectionInfo += ", " + str(self._SubImages[Offset]) gIndention -= 4 return SectionInfo def _Unpack(self): self.empty() Type, = self._TYPE_.unpack_from(self._BUF_, self._OFF_) Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) if Type not in self._SectionSubImages: # no need to extract sub-image, keep all in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) else: # keep header in this Image object self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._HEADER_SIZE_]) # # use new Image object to represent payload, which may be another kind # of image such as PE32 # PayloadOffset = self._HEADER_SIZE_ PayloadLen = self.Size - self._HEADER_SIZE_ Payload = self._SectionSubImages[self.Type]() Payload.frombuffer(self._BUF_, self._OFF_ + self._HEADER_SIZE_, PayloadLen) self._SubImages[PayloadOffset] = Payload return Size def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _GetAlignment(self): return self._Alignment def _SetAlignment(self, Alignment): self._Alignment = Alignment AlignmentMask = Alignment - 1 # section alignment is actually for payload, so we need to add header size PayloadOffset = self._OFF_ + self._HEADER_SIZE_ if (PayloadOffset & (~AlignmentMask)) == 0: return NewOffset = (PayloadOffset + AlignmentMask) & (~AlignmentMask) while (NewOffset - PayloadOffset) < self._HEADER_SIZE_: NewOffset += self._Alignment def tofile(self, f): self.Size = len(self) Image.tofile(self, f) for Offset in self._SubImages: self._SubImages[Offset].tofile(f) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Alignment = property(_GetAlignment, _SetAlignment) ## Ffs() class # # A class for Ffs Section # class Ffs(Image): _FfsFormat = "24B%(payload_size)sB" # skip IntegrityCheck _HEADER_ = struct.Struct("1I2H8B 2x 1B 1B 3B 1B") _HEADER_SIZE_ = _HEADER_.size _NAME_ = struct.Struct("1I2H8B") _INT_CHECK_ = struct.Struct("16x 1H") _TYPE_ = struct.Struct("18x 1B") _ATTR_ = struct.Struct("19x 1B") _SIZE_ = struct.Struct("20x 3B") _STATE_ = struct.Struct("23x 1B") FFS_ATTRIB_FIXED = 0x04 FFS_ATTRIB_DATA_ALIGNMENT = 0x38 FFS_ATTRIB_CHECKSUM = 0x40 _TypeName = { 0x00 : "<unknown>", 0x01 : "RAW", 0x02 : "FREEFORM", 0x03 : "SECURITY_CORE", 0x04 : "PEI_CORE", 0x05 : "DXE_CORE", 0x06 : "PEIM", 0x07 : "DRIVER", 0x08 : "COMBINED_PEIM_DRIVER", 0x09 : "APPLICATION", 0x0A : "SMM", 0x0B : "FIRMWARE_VOLUME_IMAGE", 0x0C : "COMBINED_SMM_DXE", 0x0D : "SMM_CORE", 0x0E : "MM_STANDALONE", 0x0F : "MM_CORE_STANDALONE", 0xc0 : "OEM_MIN", 0xdf : "OEM_MAX", 0xe0 : "DEBUG_MIN", 0xef : "DEBUG_MAX", 0xf0 : "FFS_MIN", 0xff : "FFS_MAX", 0xf0 : "FFS_PAD", } def __init__(self): Image.__init__(self) self.FreeSpace = 0 self.Sections = sdict() self.Depex = '' self.__ID__ = None def __str__(self): global gIndention gIndention += 4 Indention = ' ' * gIndention FfsInfo = Indention FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \ (Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment) SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()]) gIndention -= 4 return FfsInfo + SectionInfo + "\n" def __len__(self): return self.Size def __repr__(self): return self.__ID__ def _Unpack(self): Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_) Size = Size1 + (Size2 << 8) + (Size3 << 16) self.empty() self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size]) # Pad FFS may use the same GUID. We need to avoid it. if self.Type == 0xf0: self.__ID__ = str(uuid.uuid1()).upper() else: self.__ID__ = self.Guid # Traverse the SECTION. RAW and PAD do not have sections if self.Type not in [0xf0, 0x01] and Size > 0 and Size < 0xFFFFFF: EndOfFfs = Size SectionStartAddress = self._HEADER_SIZE_ while SectionStartAddress < EndOfFfs: SectionObj = Section() SectionObj.frombuffer(self, SectionStartAddress) #f = open(repr(SectionObj), 'wb') #SectionObj.Size = 0 #SectionObj.tofile(f) #f.close() self.Sections[SectionStartAddress] = SectionObj SectionStartAddress += len(SectionObj) SectionStartAddress = (SectionStartAddress + 3) & (~3) def Pack(self): pass def SetFreeSpace(self, Size): self.FreeSpace = Size def _GetGuid(self): return gGuidStringFormat % self.Name def _SetName(self, Value): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 self.SetField(self._NAME_, 0, Value) def _GetName(self): # Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11 return self.GetField(self._NAME_) def _SetSize(self, Size): Size1 = Size & 0xFF Size2 = (Size & 0xFF00) >> 8 Size3 = (Size & 0xFF0000) >> 16 self.SetField(self._SIZE_, 0, Size1, Size2, Size3) def _GetSize(self): Size1, Size2, Size3 = self.GetField(self._SIZE_) return Size1 + (Size2 << 8) + (Size3 << 16) def _SetType(self, Type): self.SetField(self._TYPE_, 0, Type) def _GetType(self): return self.GetField(self._TYPE_)[0] def _SetAttributes(self, Value): self.SetField(self._ATTR_, 0, Value) def _GetAttributes(self): return self.GetField(self._ATTR_)[0] def _GetFixed(self): if (self.Attributes & self.FFS_ATTRIB_FIXED) != 0: return True return False def _GetCheckSum(self): if (self.Attributes & self.FFS_ATTRIB_CHECKSUM) != 0: return True return False def _GetAlignment(self): return (self.Attributes & self.FFS_ATTRIB_DATA_ALIGNMENT) >> 3 def _SetState(self, Value): self.SetField(self._STATE_, 0, Value) def _GetState(self): return self.GetField(self._STATE_)[0] Name = property(_GetName, _SetName) Guid = property(_GetGuid) Type = property(_GetType, _SetType) Size = property(_GetSize, _SetSize) Attributes = property(_GetAttributes, _SetAttributes) Fixed = property(_GetFixed) Checksum = property(_GetCheckSum) Alignment = property(_GetAlignment) State = property(_GetState, _SetState) ## MultipleFv() class # # A class for Multiple FV # class MultipleFv(FirmwareVolume): def __init__(self, FvList): FirmwareVolume.__init__(self) self.BasicInfo = [] for FvPath in FvList: Fd = None FvName = os.path.splitext(os.path.split(FvPath)[1])[0] if FvPath.strip(): Fd = open(FvPath, 'rb') Buf = array('B') try: Buf.fromfile(Fd, os.path.getsize(FvPath)) except EOFError: pass Fv = FirmwareVolume(FvName) Fv.frombuffer(Buf, 0, len(Buf)) self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size]) self.FfsDict.update(Fv.FfsDict) ## Class Eot # # This class is used to define Eot main entrance # # @param object: Inherited from object class # class Eot(object): ## The constructor # # @param self: The object pointer # def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \ IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None, FvFileList="", MapFileList="", Report='Report.html', Dispatch=None): # Version and Copyright self.VersionNumber = ("0.02" + " " + gBUILD_VERSION) self.Version = "%prog Version " + self.VersionNumber self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved." self.Report = Report self.IsInit = IsInit self.SourceFileList = SourceFileList self.IncludeDirList = IncludeDirList self.DecFileList = DecFileList self.GuidList = GuidList self.LogFile = LogFile self.FvFileList = FvFileList self.MapFileList = MapFileList self.Dispatch = Dispatch # Check workspace environment if "EFI_SOURCE" not in os.environ: if "EDK_SOURCE" not in os.environ: pass else: EotGlobalData.gEDK_SOURCE = os.path.normpath(os.getenv("EDK_SOURCE")) else: EotGlobalData.gEFI_SOURCE = os.path.normpath(os.getenv("EFI_SOURCE")) EotGlobalData.gEDK_SOURCE = os.path.join(EotGlobalData.gEFI_SOURCE, 'Edk') if "WORKSPACE" not in os.environ: EdkLogger.error("EOT", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", ExtraData="WORKSPACE") else: EotGlobalData.gWORKSPACE = os.path.normpath(os.getenv("WORKSPACE")) EotGlobalData.gMACRO['WORKSPACE'] = EotGlobalData.gWORKSPACE EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gEFI_SOURCE EotGlobalData.gMACRO['EDK_SOURCE'] = EotGlobalData.gEDK_SOURCE # Parse the options and args if CommandLineOption: self.ParseOption() if self.FvFileList: for FvFile in GetSplitValueList(self.FvFileList, ' '): FvFile = os.path.normpath(FvFile) if not os.path.isfile(FvFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % FvFile) EotGlobalData.gFV_FILE.append(FvFile) else: EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "The fv file list of target platform was not specified") if self.MapFileList: for MapFile in GetSplitValueList(self.MapFileList, ' '): MapFile = os.path.normpath(MapFile) if not os.path.isfile(MapFile): EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile) EotGlobalData.gMAP_FILE.append(MapFile) # Generate source file list self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList) # Generate guid list of dec file list self.ParseDecFile(self.DecFileList) # Generate guid list from GUID list file self.ParseGuidList(self.GuidList) # Init Eot database EotGlobalData.gDb = Database.Database(Database.DATABASE_PATH) EotGlobalData.gDb.InitDatabase(self.IsInit) # Build ECC database self.BuildDatabase() # Parse Ppi/Protocol self.ParseExecutionOrder() # Merge Identifier tables self.GenerateQueryTable() # Generate report database self.GenerateReportDatabase() # Load Fv Info self.LoadFvInfo() # Load Map Info self.LoadMapInfo() # Generate Report self.GenerateReport() # Convert log file self.ConvertLogFile(self.LogFile) # DONE EdkLogger.quiet("EOT FINISHED!") # Close Database EotGlobalData.gDb.Close() ## ParseDecFile() method # # parse DEC file and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param DecFileList: A list of all DEC files # def ParseDecFile(self, DecFileList): if DecFileList: path = os.path.normpath(DecFileList) lfr = open(path, 'rb') for line in lfr: path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if os.path.exists(path): dfr = open(path, 'rb') for line in dfr: line = CleanString(line) list = line.split('=') if len(list) == 2: EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip()) ## ParseGuidList() method # # Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue} # The Dict is stored in EotGlobalData.gGuidDict # # @param self: The object pointer # @param GuidList: A list of all GUID and its value # def ParseGuidList(self, GuidList): Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList) if os.path.isfile(Path): for Line in open(Path): if Line.strip(): (GuidName, GuidValue) = Line.split() EotGlobalData.gGuidDict[GuidName] = GuidValue ## ConvertLogFile() method # # Parse a real running log file to get real dispatch order # The result is saved to old file name + '.new' # # @param self: The object pointer # @param LogFile: A real running log file name # def ConvertLogFile(self, LogFile): newline = [] lfr = None lfw = None if LogFile: lfr = open(LogFile, 'rb') lfw = open(LogFile + '.new', 'wb') for line in lfr: line = line.strip() line = line.replace('.efi', '') index = line.find("Loading PEIM at ") if index > -1: newline.append(line[index + 55 : ]) continue index = line.find("Loading driver at ") if index > -1: newline.append(line[index + 57 : ]) continue for line in newline: lfw.write(line + '\r\n') if lfr: lfr.close() if lfw: lfw.close() ## GenerateSourceFileList() method # # Generate a list of all source files # 1. Search the file list one by one # 2. Store inf file name with source file names under it like # { INF file name: [source file1, source file2, ...]} # 3. Search the include list to find all .h files # 4. Store source file list to EotGlobalData.gSOURCE_FILES # 5. Store INF file list to EotGlobalData.gINF_FILES # # @param self: The object pointer # @param SourceFileList: A list of all source files # @param IncludeFileList: A list of all include files # def GenerateSourceFileList(self, SourceFileList, IncludeFileList): EdkLogger.quiet("Generating source files list ... ") mSourceFileList = [] mInfFileList = [] mDecFileList = [] mFileList = {} mCurrentInfFile = '' mCurrentSourceFileList = [] if SourceFileList: sfl = open(SourceFileList, 'r') for line in sfl: line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) if line[-2:].upper() == '.C' or line[-2:].upper() == '.H': if line not in mCurrentSourceFileList: mCurrentSourceFileList.append(line) mSourceFileList.append(line) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % line) if line[-4:].upper() == '.INF': if mCurrentInfFile != '': mFileList[mCurrentInfFile] = mCurrentSourceFileList mCurrentSourceFileList = [] mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line)) EotGlobalData.gOP_INF.write('%s\n' % mCurrentInfFile) if mCurrentInfFile not in mFileList: mFileList[mCurrentInfFile] = mCurrentSourceFileList # Get all include files from packages if IncludeFileList: ifl = open(IncludeFileList, 'rb') for line in ifl: if not line.strip(): continue newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip())) for Root, Dirs, Files in os.walk(str(newline)): for File in Files: FullPath = os.path.normpath(os.path.join(Root, File)) if FullPath not in mSourceFileList and File[-2:].upper() == '.H': mSourceFileList.append(FullPath) EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % FullPath) if FullPath not in mDecFileList and File.upper().find('.DEC') > -1: mDecFileList.append(FullPath) EotGlobalData.gSOURCE_FILES = mSourceFileList EotGlobalData.gOP_SOURCE_FILES.close() EotGlobalData.gINF_FILES = mFileList EotGlobalData.gOP_INF.close() ## GenerateReport() method # # Generate final HTML report # # @param self: The object pointer # def GenerateReport(self): EdkLogger.quiet("Generating report file ... ") Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch) Rep.GenerateReport() ## LoadMapInfo() method # # Load map files and parse them # # @param self: The object pointer # def LoadMapInfo(self): if EotGlobalData.gMAP_FILE != []: EdkLogger.quiet("Parsing Map file ... ") EotGlobalData.gMap = ParseMapFile(EotGlobalData.gMAP_FILE) ## LoadFvInfo() method # # Load FV binary files and parse them # # @param self: The object pointer # def LoadFvInfo(self): EdkLogger.quiet("Parsing FV file ... ") EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE) EotGlobalData.gFV.Dispatch(EotGlobalData.gDb) for Protocol in EotGlobalData.gProtocolList: EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s\n' %Protocol) ## GenerateReportDatabase() method # # Generate data for the information needed by report # 1. Update name, macro and value of all found PPI/PROTOCOL GUID # 2. Install hard coded PPI/PROTOCOL # # @param self: The object pointer # def GenerateReportDatabase(self): EdkLogger.quiet("Generating the cross-reference table of GUID for Ppi/Protocol ... ") # Update Protocol/Ppi Guid SqlCommand = """select DISTINCT GuidName from Report""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: GuidName = Record[0] GuidMacro = '' GuidMacro2 = '' GuidValue = '' # Find guid value defined in Dec file if GuidName in EotGlobalData.gGuidDict: GuidValue = EotGlobalData.gGuidDict[GuidName] SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Search defined Macros for guid name SqlCommand ="""select DISTINCT Value, Modifier from Query where Name like '%s'""" % GuidName GuidMacroSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) # Ignore NULL result if not GuidMacroSet: continue GuidMacro = GuidMacroSet[0][0].strip() if not GuidMacro: continue # Find Guid value of Guid Macro SqlCommand ="""select DISTINCT Value from Query2 where Value like '%%%s%%' and Model = %s""" % (GuidMacro, MODEL_IDENTIFIER_MACRO_DEFINE) GuidValueSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) if GuidValueSet != []: GuidValue = GuidValueSet[0][0] GuidValue = GuidValue[GuidValue.find(GuidMacro) + len(GuidMacro) :] GuidValue = GuidValue.lower().replace('\\', '').replace('\r', '').replace('\n', '').replace('l', '').strip() GuidValue = GuidStructureStringToGuidString(GuidValue) SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName) EotGlobalData.gDb.TblReport.Exec(SqlCommand) continue # Update Hard Coded Ppi/Protocol SqlCommand = """select DISTINCT GuidValue, ItemType from Report where ModuleID = -2 and ItemMode = 'Produced'""" RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand) for Record in RecordSet: if Record[1] == 'Ppi': EotGlobalData.gPpiList[Record[0].lower()] = -2 if Record[1] == 'Protocol': EotGlobalData.gProtocolList[Record[0].lower()] = -2 ## GenerateQueryTable() method # # Generate two tables improve query performance # # @param self: The object pointer # def GenerateQueryTable(self): EdkLogger.quiet("Generating temp query table for analysis ... ") for Identifier in EotGlobalData.gIdentifierTableList: SqlCommand = """insert into Query (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where (Model = %s or Model = %s)""" \ % (Identifier[0], MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION) EotGlobalData.gDb.TblReport.Exec(SqlCommand) SqlCommand = """insert into Query2 (Name, Modifier, Value, Model) select Name, Modifier, Value, Model from %s where Model = %s""" \ % (Identifier[0], MODEL_IDENTIFIER_MACRO_DEFINE) EotGlobalData.gDb.TblReport.Exec(SqlCommand) ## ParseExecutionOrder() method # # Get final execution order # 1. Search all PPI # 2. Search all PROTOCOL # # @param self: The object pointer # def ParseExecutionOrder(self): EdkLogger.quiet("Searching Ppi/Protocol ... ") for Identifier in EotGlobalData.gIdentifierTableList: ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled = \ -1, '', '', -1, '', '', '', '', '', '', '', '', 0 SourceFileID = Identifier[0].replace('Identifier', '') SourceFileFullPath = Identifier[1] Identifier = Identifier[0] # Find Ppis ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallPpi', '->InstallPpi', 'PeiInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.ReInstallPpi', '->ReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocatePpi', '->LocatePpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Ppi', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode) # Find Protocols ItemMode = 'Produced' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallProtocolInterface', '.ReInstallProtocolInterface', '->InstallProtocolInterface', '->ReInstallProtocolInterface', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.InstallMultipleProtocolInterfaces', '->InstallMultipleProtocolInterfaces', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Consumed' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.LocateProtocol', '->LocateProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.HandleProtocol', '->HandleProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) ItemMode = 'Callback' SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \ % (Identifier, '.RegisterProtocolNotify', '->RegisterProtocolNotify', MODEL_IDENTIFIER_FUNCTION_CALLING) SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0) SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode) # Hard Code EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiSecPlatformInformationPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiNtLoadAsDllPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtPeiLoadFileGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtAutoScanPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtFwhPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtThunkPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiPlatformTypePpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiFrequencySelectionCpuPpiGuid', '', '', '', 0) EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiCachePpiGuid', '', '', '', 0) EotGlobalData.gDb.Conn.commit() ## BuildDatabase() methoc # # Build the database for target # # @param self: The object pointer # def BuildDatabase(self): # Clean report table EotGlobalData.gDb.TblReport.Drop() EotGlobalData.gDb.TblReport.Create() # Build database if self.IsInit: self.BuildMetaDataFileDatabase(EotGlobalData.gINF_FILES) EdkLogger.quiet("Building database for source code ...") c.CreateCCodeDB(EotGlobalData.gSOURCE_FILES) EdkLogger.quiet("Building database for source code done!") EotGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EotGlobalData.gDb) ## BuildMetaDataFileDatabase() method # # Build the database for meta data files # # @param self: The object pointer # @param Inf_Files: A list for all INF files # def BuildMetaDataFileDatabase(self, Inf_Files): EdkLogger.quiet("Building database for meta data files ...") for InfFile in Inf_Files: if not InfFile: continue EdkLogger.quiet("Parsing %s ..." % str(InfFile)) EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile], '') EotGlobalData.gDb.Conn.commit() EdkLogger.quiet("Building database for meta data files done!") ## ParseOption() method # # Parse command line options # # @param self: The object pointer # def ParseOption(self): (Options, Target) = self.EotOptionParser() # Set log level self.SetLogLevel(Options) if Options.FvFileList: self.FvFileList = Options.FvFileList if Options.MapFileList: self.MapFileList = Options.FvMapFileList if Options.SourceFileList: self.SourceFileList = Options.SourceFileList if Options.IncludeDirList: self.IncludeDirList = Options.IncludeDirList if Options.DecFileList: self.DecFileList = Options.DecFileList if Options.GuidList: self.GuidList = Options.GuidList if Options.LogFile: self.LogFile = Options.LogFile if Options.keepdatabase: self.IsInit = False ## SetLogLevel() method # # Set current log level of the tool based on args # # @param self: The object pointer # @param Option: The option list including log level setting # def SetLogLevel(self, Option): if Option.verbose is not None: EdkLogger.SetLevel(EdkLogger.VERBOSE) elif Option.quiet is not None: EdkLogger.SetLevel(EdkLogger.QUIET) elif Option.debug is not None: EdkLogger.SetLevel(Option.debug + 1) else: EdkLogger.SetLevel(EdkLogger.INFO) ## EotOptionParser() method # # Using standard Python module optparse to parse command line option of this tool. # # @param self: The object pointer # # @retval Opt A optparse.Values object containing the parsed options # @retval Args Target of build command # def EotOptionParser(self): Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Eot.exe", usage = "%prog [options]") Parser.add_option("-m", "--makefile filename", action="store", type="string", dest='MakeFile', help="Specify a makefile for the platform.") Parser.add_option("-c", "--dsc filename", action="store", type="string", dest="DscFile", help="Specify a dsc file for the platform.") Parser.add_option("-f", "--fv filename", action="store", type="string", dest="FvFileList", help="Specify fv file list, quoted by \"\".") Parser.add_option("-a", "--map filename", action="store", type="string", dest="MapFileList", help="Specify map file list, quoted by \"\".") Parser.add_option("-s", "--source files", action="store", type="string", dest="SourceFileList", help="Specify source file list by a file") Parser.add_option("-i", "--include dirs", action="store", type="string", dest="IncludeDirList", help="Specify include dir list by a file") Parser.add_option("-e", "--dec files", action="store", type="string", dest="DecFileList", help="Specify dec file list by a file") Parser.add_option("-g", "--guid list", action="store", type="string", dest="GuidList", help="Specify guid file list by a file") Parser.add_option("-l", "--log filename", action="store", type="string", dest="LogFile", help="Specify real execution log file") Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Eot database will not be cleaned except report information if this option is specified.") Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\ "including library instances selected, final dependency expression, "\ "and warning messages, etc.") Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") (Opt, Args)=Parser.parse_args() return (Opt, Args) ## # # This acts like the main() function for the script, unless it is 'import'ed into another # script. # if __name__ == '__main__': # Initialize log system EdkLogger.Initialize() EdkLogger.IsRaiseError = False EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n") StartTime = time.clock() Eot = Eot(CommandLineOption=False, SourceFileList=r'C:\TestEot\Source.txt', GuidList=r'C:\TestEot\Guid.txt', FvFileList=r'C:\TestEot\FVRECOVERY.Fv') FinishTime = time.clock() BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime)))) EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
apache-2.0
-5,141,420,957,244,299,000
38.264689
199
0.541761
false
3.868489
false
false
false
openhumanoids/pronto
motion_estimate/scripts/send_a_robot_state.py
2
2079
#!/usr/bin/python # A very simple process to combine the floating base estimate # with the kinematics and output the combined message # input: POSE_BODY and ATLAS_STATE, output: EST_ROBOT_STATE # # currently this only works/used for Thor Mang import os,sys import lcm import time from lcm import LCM from math import * import numpy as np import numpy.random as random home_dir =os.getenv("HOME") #print home_dir sys.path.append(home_dir + "/drc/software/build/lib/python2.7/site-packages") sys.path.append(home_dir + "/drc/software/build/lib/python2.7/dist-packages") #sys.path.append(home_dir + "/otherprojects/pronto-distro/build/lib/python2.7/site-packages") #sys.path.append(home_dir + "/otherprojects/pronto-distro/build/lib/python2.7/dist-packages") from pronto.robot_state_t import robot_state_t ######################################################################################## def timestamp_now (): return int (time.time () * 1000000) # hyq: joint_name_list = ['lf_haa_joint', 'lf_hfe_joint', 'lf_kfe_joint', 'rf_haa_joint', 'rf_hfe_joint', 'rf_kfe_joint', 'lh_haa_joint', 'lh_hfe_joint', 'lh_kfe_joint', 'rh_haa_joint', 'rh_hfe_joint', 'rh_kfe_joint', 'ptu_pan', 'ptu_tilt'] def send_state(): o = robot_state_t() o.utime = timestamp_now () o.num_joints = len(joint_name_list) #o.joint_name = ["" for x in range(o.num_joints)] o.joint_name = joint_name_list o.joint_position = random.rand(o.num_joints) -0.5 #o.joint_position = [0.0, 0.0, -1.3962634015945001, 0.0, 0.0, -1.3962634015945001, 0.0, -2.220446049250313e-16, 1.3962634015945001, 0.0, -2.220446049250313e-16, 1.3962634015945001, 0.0, 1.1102230246251565e-16, 0,0] o.joint_velocity = [0]*o.num_joints o.joint_effort = [0]*o.num_joints o.pose.translation.x =0; o.pose.translation.y =0; o.pose.translation.z =1; o.pose.rotation.w = 1; o.pose.rotation.x = 0 o.pose.rotation.y = 0 o.pose.rotation.z = 0 lc.publish("EST_ROBOT_STATE",o.encode()) #################################################################### lc = lcm.LCM() print "started" send_state()
lgpl-2.1
-3,301,472,500,719,083,500
32.532258
233
0.645984
false
2.658568
false
false
false
mgp25/LocalBitcoins-API
src/LocalBitcoin.py
1
12088
import hmac import json import urllib import hashlib import requests from urllib import parse from datetime import datetime class LocalBitcoin: baseurl = 'https://localbitcoins.com' def __init__(self, hmac_auth_key, hmac_auth_secret, debug = False): self.hmac_auth_key = hmac_auth_key self.hmac_auth_secret = hmac_auth_secret self.debug = debug """ Returns public user profile information """ def getAccountInfo(self, username): return self.sendRequest('/api/account_info/' + username + '/', '', 'get') """ Returns recent notifications. """ def getNotifications(self): return self.sendRequest('/api/notifications/', '', 'get') """ Return the information of the currently logged in user (the owner of authentication token). """ def getMyself(self): return self.sendRequest('/api/myself/', '', 'get') """ Checks the given PIN code against the user's currently active PIN code. You can use this method to ensure the person using the session is the legitimate user. """ def checkPinCode(self, code): return self.sendRequest('/api/pincode/', {'code': code}, 'post') """ Return open and active contacts """ def getDashboard(self): return self.sendRequest('/api/dashboard/', '', 'get') """ Return released (successful) contacts """ def getDashboardReleased(self): return self.sendRequest('/api/dashboard/released/', '', 'get') """ Return canceled contacts """ def getDashboardCanceled(self): return self.sendRequest('/api/dashboard/canceled/', '', 'get') """ Return closed contacts, both released and canceled """ def getDashboardClosed(self): return self.sendRequest('/api/dashboard/closed/', '', 'get') """ Releases the escrow of contact specified by ID {contact_id}. On success there's a complimentary message on the data key. """ def contactRelease(self, contact_id): return self.sendRequest('/api/contact_release/' + contact_id + '/', '', 'post') """ Releases the escrow of contact specified by ID {contact_id}. On success there's a complimentary message on the data key. """ def contactReleasePin(self, contact_id, pincode): return self.sendRequest('/api/contact_release_pin/' + contact_id + '/', {'pincode': pincode}, 'post') """ Reads all messaging from the contact. Messages are on the message_list key. On success there's a complimentary message on the data key. attachment_* fields exist only if there is an attachment. """ def getContactMessages(self, contact_id): return self.sendRequest('/api/contact_messages/' + contact_id + '/', '', 'get') """ Marks a contact as paid. It is recommended to access this API through /api/online_buy_contacts/ entries' action key. """ def markContactAsPaid(self, contact_id): return self.sendRequest('/api/contact_mark_as_paid/' + contact_id + '/', '', 'get') """ Post a message to contact """ def postMessageToContact(self, contact_id, message, document=None): return self.sendRequest('/api/contact_message_post/' + contact_id + '/', {'msg': message}, 'post') """ Starts a dispute with the contact, if possible. You can provide a short description using topic. This helps support to deal with the problem. """ def startDispute(self, contact_id, topic = None): topic = '' if topic != None: topic = {'topic': topic} return self.sendRequest('/api/contact_dispute/' + contact_id + '/', topic, 'post') """ Cancels the contact, if possible """ def cancelContact(self, contact_id): return self.sendRequest('/api/contact_cancel/' + contact_id + '/', '', 'post') """ Attempts to fund an unfunded local contact from the seller's wallet. """ def fundContact(self, contact_id): return self.sendRequest('/api/contact_fund/' + contact_id + '/', '', 'post') """ Attempts to create a contact to trade bitcoins. Amount is a number in the advertisement's fiat currency. Returns the API URL to the newly created contact at actions.contact_url. Whether the contact was able to be funded automatically is indicated at data.funded. Only non-floating LOCAL_SELL may return unfunded, all other trade types either fund or fail. """ def createContact(self, contact_id, ammount, message = None): post = '' if message == None: post = {'ammount': ammount} else: post = {'ammount': ammount, 'message': message} return self.sendRequest('/api/contact_create/' + contact_id + '/', post, 'post') """ Gets information about a single contact you are involved in. Same fields as in /api/contacts/. """ def getContactInfo(self, contact_id): return self.sendRequest('/api/contact_info/' + contact_id + '/', '', 'get') """ contacts is a comma-separated list of contact IDs that you want to access in bulk. The token owner needs to be either a buyer or seller in the contacts, contacts that do not pass this check are simply not returned. A maximum of 50 contacts can be requested at a time. The contacts are not returned in any particular order. """ def getContactsInfo(self, contacts): return self.sendRequest('/api/contact_info/', {'contacts': contacts}, 'get') """ Returns maximum of 50 newest trade messages. Messages are ordered by sending time, and the newest one is first. The list has same format as /api/contact_messages/, but each message has also contact_id field. """ def getRecentMessages(self): return self.sendRequest('/api/recent_messages/', '', 'get') """ Gives feedback to user. Possible feedback values are: trust, positive, neutral, block, block_without_feedback as strings. You may also set feedback message field with few exceptions. Feedback block_without_feedback clears the message and with block the message is mandatory. """ def postFeedbackToUser(self, username, feedback, message = None): post = {'feedback': feedback} if message != None: post = {'feedback': feedback, 'msg': message} return self.sendRequest('/api/feedback/' + username + '/', post, 'post') """ Gets information about the token owner's wallet balance. """ def getWallet(self): return self.sendRequest('/api/wallet/', '', 'get') """ Same as /api/wallet/ above, but only returns the message, receiving_address_list and total fields. (There's also a receiving_address_count but it is always 1: only the latest receiving address is ever returned by this call.) Use this instead if you don't care about transactions at the moment. """ def getWalletBallance(self): return self.sendRequest('/api/wallet-balance/', '', 'get') """ Sends amount bitcoins from the token owner's wallet to address. Note that this API requires its own API permission called Money. On success, this API returns just a message indicating success. It is highly recommended to minimize the lifetime of access tokens with the money permission. Call /api/logout/ to make the current token expire instantly. """ def walletSend(self, ammount, address): return self.sendRequest('/api/wallet-send/', {'ammount': ammount, 'address': address}, 'post') """ As above, but needs the token owner's active PIN code to succeed. Look before you leap. You can check if a PIN code is valid without attempting a send with /api/pincode/. Security concern: To get any security beyond the above API, do not retain the PIN code beyond a reasonable user session, a few minutes at most. If you are planning to save the PIN code anyway, please save some headache and get the real no-pin-required money permission instead. """ def walletSendWithPin(self, ammount, address, pincode): return self.sendRequest('/api/wallet-send-pin/', {'ammount': ammount, 'address': address, 'pincode': pincode}, 'post') """ Gets an unused receiving address for the token owner's wallet, its address given in the address key of the response. Note that this API may keep returning the same (unused) address if called repeatedly. """ def getWalletAddress(self): return self.sendRequest('/api/wallet-addr/', '', 'post') """ Expires the current access token immediately. To get a new token afterwards, public apps will need to reauthenticate, confidential apps can turn in a refresh token. """ def logout(self): return self.sendRequest('/api/logout/', '', 'post') """ Lists the token owner's all ads on the data key ad_list, optionally filtered. If there's a lot of ads, the listing will be paginated. Refer to the ad editing pages for the field meanings. List item structure is like so: """ def getOwnAds(self): return self.sendRequest('/api/ads/', '', 'post') """ This endpoint lets you edit an ad given the ad id and all the required fiends as designated by the API. If you just want to update the equation there is a better endpoint for that, this one takes a lot of LBC resources. """ def editAd(self, ad_id, lat, bank_name, price_equation, lon, countrycode, opening_hours, msg, max_amount, track_max_amount, visible): return self.sendRequest('/api/ad/' + ad_id + '/', {'lat': lat,'bank_name': bank_name,'price_equation': price_equation,'lon': lon,'countrycode': countrycode, 'opening_hours': opening_hours, 'msg': msg, 'max_amount': max_amount, 'track_max_amount': track_max_amount, 'visible': visible}, 'post') """ Creates a new invoice under the LBC merchant services page. """ def newInvoice(self, currency, amount, description): return self.sendRequest('/api/merchant/new_invoice/', {'currency': currency, 'amount': amount, 'description': description,}, 'post') """ Marks a users id as verified based on an open contact id. """ def markIdentityVerified(self, contact_id): return self.sendRequest('/api/contact_mark_identified/' + contact_id + '/', '', 'post') """ Get all the details of an ad based on its ID, can be any ad. """ def getAd(self, ad_id): return self.sendRequest('/api/ad-get/' + ad_id + '/', '', 'get') """ Change an ad's pricing equation to something else. """ def changeEquation(self, ad_id, equation): return self.sendRequest('/api/ad-equation/{ad_id}/'.format(ad_id=ad_id), {'price_equation': equation}, 'post') """ Main driver. """ def sendRequest(self, endpoint, params, method): params_encoded = '' if params != '': params_encoded = parse.urlencode(params) if method == 'get': params_encoded = '?' + params_encoded now = datetime.utcnow() epoch = datetime.utcfromtimestamp(0) delta = now - epoch nonce = int(delta.total_seconds() * 1000) message = str(nonce) + self.hmac_auth_key + endpoint + params_encoded signature = hmac.new(bytes(self.hmac_auth_secret, 'latin-1'), msg = bytes(message , 'latin-1'), digestmod = hashlib.sha256).hexdigest().upper() headers = {} headers['Apiauth-key'] = self.hmac_auth_key headers['Apiauth-Nonce'] = str(nonce) headers['Apiauth-Signature'] = signature if method == 'get': response = requests.get(self.baseurl + endpoint, headers = headers, params = params) else: response = requests.post(self.baseurl + endpoint, headers = headers, data = params) if self.debug == True: print('REQUEST: ' + self.baseurl + endpoint) print('PARAMS: ' + str(params)) print('METHOD: ' + method) print('RESPONSE: ' + response.text) return json.loads(response.text)['data']
mit
-391,811,989,045,716,030
39.837838
301
0.649073
false
4.076897
false
false
false
zozo123/buildbot
master/buildbot/test/unit/test_steps_source_svn.py
3
87137
# This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members from buildbot import config from buildbot.process import buildstep from buildbot.status.results import FAILURE from buildbot.status.results import RETRY from buildbot.status.results import SUCCESS from buildbot.steps.source import svn from buildbot.steps.transfer import _FileReader from buildbot.test.fake.remotecommand import Expect from buildbot.test.fake.remotecommand import ExpectRemoteRef from buildbot.test.fake.remotecommand import ExpectShell from buildbot.test.util import sourcesteps from buildbot.test.util.properties import ConstantRenderable from twisted.internet import error from twisted.python.reflect import namedModule from twisted.trial import unittest class TestSVN(sourcesteps.SourceStepMixin, unittest.TestCase): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file1"> <wc-status props="none" item="unversioned"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file2"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ svn_st_xml_corrupt = """<?xml version="1.0"?> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ svn_st_xml_empty = """<?xml version="1.0"?> <status> <target path="."> </target> </status>""" svn_info_stdout_xml = """<?xml version="1.0"?> <info> <entry kind="dir" path="." revision="100"> <url>http://svn.red-bean.com/repos/test</url> <repository> <root>http://svn.red-bean.com/repos/test</root> <uuid>5e7d134a-54fb-0310-bd04-b611643e5c25</uuid> </repository> <wc-info> <schedule>normal</schedule> <depth>infinity</depth> </wc-info> <commit revision="90"> <author>sally</author> <date>2003-01-15T23:35:12.847647Z</date> </commit> </entry> </info>""" svn_info_stdout_xml_nonintegerrevision = """<?xml version="1.0"?> <info> <entry kind="dir" path="." revision="a10"> <url>http://svn.red-bean.com/repos/test</url> <repository> <root>http://svn.red-bean.com/repos/test</root> <uuid>5e7d134a-54fb-0310-bd04-b611643e5c25</uuid> </repository> <wc-info> <schedule>normal</schedule> <depth>infinity</depth> </wc-info> <commit revision="a10"> <author>sally</author> <date>2003-01-15T23:35:12.847647Z</date> </commit> </entry> </info>""" def setUp(self): return self.setUpSourceStep() def tearDown(self): return self.tearDownSourceStep() def patch_slaveVersionIsOlderThan(self, result): self.patch(svn.SVN, 'slaveVersionIsOlderThan', lambda x, y, z: result) def test_no_repourl(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN()) def test_incorrect_mode(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN(repourl='http://svn.local/app/trunk', mode='invalid')) def test_incorrect_method(self): self.assertRaises(config.ConfigErrors, lambda: svn.SVN(repourl='http://svn.local/app/trunk', method='invalid')) def test_corrupt_xml(self): self.setupStep(svn.SVN(repourl='http://svn.local/app/trunk')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_corrupt) + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_revision_noninteger(self): svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml_nonintegerrevision) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', 'a10', 'SVN') d = self.runStep() def _checkType(): revision = self.step.getProperty('got_revision') self.assertRaises(ValueError, lambda: int(revision)) d.addCallback(lambda _: _checkType()) return d def test_revision_missing(self): """Fail if 'revision' tag isnt there""" svn_info_stdout = self.svn_info_stdout_xml.replace('entry', 'Blah') svnTestStep = svn.SVN(repourl='http://svn.local/app/trunk') self.setupStep(svnTestStep) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_mode_incremental(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', timeout=1, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_renderable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_canonical(self): self.setupStep( svn.SVN(repourl='http://svn.local/trunk/test app', mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/trunk/test%20app</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental',)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_retry(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental', retry=(0, 1))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_repourl_not_updatable_svninfo_mismatch(self): self.setupStep( svn.SVN(repourl=ConstantRenderable('http://svn.local/trunk/app'), mode='incremental')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', # expecting ../trunk/app stdout="""<?xml version="1.0"?><url>http://svn.local/branch/foo/app</url>""") + 0, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/trunk/app', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_win32path(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.build.path_module = namedModule("ntpath") self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file=r'wkdir\.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file=r'wkdir\.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() def test_mode_incremental_preferLastChangedRev(self): """Give the last-changed rev if 'preferLastChangedRev' is set""" self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '90', 'SVN') return self.runStep() def test_mode_incremental_preferLastChangedRev_butMissing(self): """If 'preferLastChangedRev' is set, but missing, fall back to the regular revision value.""" svn_info_stdout = self.svn_info_stdout_xml.replace('commit', 'Blah') self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', preferLastChangedRev=True, password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=svn_info_stdout) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clobber_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_retry(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', retry=(0, 2))) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='fresh', depth='infinite'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache', '--depth', 'infinite']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + ExpectShell.log('stdio', stdout='\n') + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_fresh_keep_on_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml_empty) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_not_updatable_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'checkout', 'http://svn.local/app/trunk', '.', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_old_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_slaveVersionIsOlderThan(True) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file1', 'logEnviron': True}) + 0, Expect('rmdir', {'dir': 'wkdir/svn_external_path/unversioned_file2', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_clean_new_rmdir(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clean')) self.patch_slaveVersionIsOlderThan(False) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_copy_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_patch(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), patch=(1, 'patch')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', dict(dir=['wkdir/svn_external_path/unversioned_file1', 'wkdir/svn_external_path/unversioned_file2'], logEnviron=True)) + 0, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', 'source', 'wkdir']) + 0, Expect('downloadFile', dict(blocksize=16384, maxsize=None, reader=ExpectRemoteRef(_FileReader), slavedest='.buildbot-diff', workdir='wkdir', mode=None)) + 0, Expect('downloadFile', dict(blocksize=16384, maxsize=None, reader=ExpectRemoteRef(_FileReader), slavedest='.buildbot-patched', workdir='wkdir', mode=None)) + 0, ExpectShell(workdir='wkdir', command=['patch', '-p1', '--remove-empty-files', '--force', '--forward', '-i', '.buildbot-diff']) + 0, Expect('rmdir', dict(dir='wkdir/.buildbot-diff', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_timeout(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', timeout=1, mode='full', method='export')) self.expectCommands( ExpectShell(workdir='wkdir', timeout=1, command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', timeout=1, command=['svn', 'export', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', timeout=1, command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_given_revision(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export'), dict( revision='100', )) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--revision', '100', '--non-interactive', '--no-auth-cache']) + 0, ExpectShell(workdir='', command=['svn', 'export', '--revision', '100', 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_full_export_auth(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='export', username='svn_username', password='svn_password')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX')]) + 0, ExpectShell(workdir='', command=['svn', 'export', '--username', 'svn_username', '--password', ('obfuscated', 'svn_password', 'XXXXXX'), 'source', 'wkdir']) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_with_env(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], env={'abc': '123'})) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], env={'abc': '123'}) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], env={'abc': '123'}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], env={'abc': '123'}) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_mode_incremental_logEnviron(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'], logEnviron=False)) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version'], logEnviron=False) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=False)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=False)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random'], logEnviron=False) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml'], logEnviron=False) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) self.expectProperty('got_revision', '100', 'SVN') return self.runStep() def test_command_fails(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_bogus_svnversion(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><entry kind="dir" path="/a/b/c" revision="1"><url>http://svn.local/app/trunk</url></entry>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', 'pass', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout='1x0y0') + 0, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_clobber(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='clobber')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', {'dir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_cpdir_fails_copy(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', method='copy')) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('rmdir', dict(dir='wkdir', logEnviron=True)) + 0, Expect('stat', dict(file='source/.svn', logEnviron=True)) + 0, ExpectShell(workdir='source', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='source', command=['svn', 'update', '--non-interactive', '--no-auth-cache']) + 0, Expect('cpdir', {'fromdir': 'source', 'todir': 'wkdir', 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_rmdir_fails_purge(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='full', keep_on_purge=['svn_external_path/unversioned_file1'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', {'file': 'wkdir/.svn', 'logEnviron': True}) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'status', '--xml', '--no-ignore', '--non-interactive', '--no-auth-cache']) + ExpectShell.log('stdio', stdout=self.svn_st_xml) + 0, Expect('rmdir', {'dir': ['wkdir/svn_external_path/unversioned_file2'], 'logEnviron': True}) + 1, ) self.expectOutcome(result=FAILURE, status_text=["updating"]) return self.runStep() def test_slave_connection_lost(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='pass', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + ('err', error.ConnectionLost()), ) self.expectOutcome(result=RETRY, status_text=["update", "exception", "slave", "lost"]) return self.runStep() def test_empty_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', password='', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--password', ('obfuscated', '', 'XXXXXX'), '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() def test_omit_password(self): self.setupStep( svn.SVN(repourl='http://svn.local/app/trunk', mode='incremental', username='user', extra_args=['--random'])) self.expectCommands( ExpectShell(workdir='wkdir', command=['svn', '--version']) + 0, Expect('stat', dict(file='wkdir/.buildbot-patched', logEnviron=True)) + 1, Expect('stat', dict(file='wkdir/.svn', logEnviron=True)) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + ExpectShell.log('stdio', stdout="""<?xml version="1.0"?><url>http://svn.local/app/trunk</url>""") + 0, ExpectShell(workdir='wkdir', command=['svn', 'update', '--non-interactive', '--no-auth-cache', '--username', 'user', '--random']) + 0, ExpectShell(workdir='wkdir', command=['svn', 'info', '--xml']) + ExpectShell.log('stdio', stdout=self.svn_info_stdout_xml) + 0, ) self.expectOutcome(result=SUCCESS, status_text=["update"]) return self.runStep() # # svn.SVN.svnUriCanonicalize() test method factory # # given input string and expected result create a test method that # will call svn.SVN.svnUriCanonicalize() with the input and check # that expected result is returned # # @param input: test input # @param exp: expected result # def _makeSUCTest(input, exp): return lambda self: self.assertEqual( svn.SVN.svnUriCanonicalize(input), exp) class TestGetUnversionedFiles(unittest.TestCase): def test_getUnversionedFiles_does_not_list_externals(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals(["svn_external_path/unversioned_file"], unversioned_files) def test_getUnversionedFiles_does_not_list_missing(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="missing_file"> <wc-status props="none" item="missing"></wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals([], unversioned_files) def test_getUnversionedFiles_corrupted_xml(self): svn_st_xml_corrupt = """<?xml version="1.0"?> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ self.assertRaises(buildstep.BuildStepFailed, lambda: list(svn.SVN.getUnversionedFiles(svn_st_xml_corrupt, []))) def test_getUnversionedFiles_no_path(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry> <wc-status props="none" item="unversioned"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals([], unversioned_files) def test_getUnversionedFiles_no_item(self): svn_st_xml = """<?xml version="1.0"?> <status> <target path="."> <entry path="svn_external_path"> <wc-status props="none" item="external"> </wc-status> </entry> <entry path="svn_external_path/unversioned_file"> <wc-status props="none"> </wc-status> </entry> </target> </status> """ unversioned_files = list(svn.SVN.getUnversionedFiles(svn_st_xml, [])) self.assertEquals(["svn_external_path/unversioned_file"], unversioned_files) test_svnUriCanonicalize_empty = _makeSUCTest( "", "") test_svnUriCanonicalize_canonical = _makeSUCTest( "http://foo.com/bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_scheme = _makeSUCTest( "hTtP://foo.com/bar", "http://foo.com/bar") test_svnUriCanonicalize_trailing_dot = _makeSUCTest( "http://foo.com./bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_hostname = _makeSUCTest( "http://foO.COm/bar", "http://foo.com/bar") test_svnUriCanonicalize_lc_hostname_with_user = _makeSUCTest( "http://[email protected]/bar", "http://[email protected]/bar") test_svnUriCanonicalize_lc_hostname_with_user_pass = _makeSUCTest( "http://Jimmy:[email protected]/bar", "http://Jimmy:[email protected]/bar") test_svnUriCanonicalize_trailing_slash = _makeSUCTest( "http://foo.com/bar/", "http://foo.com/bar") test_svnUriCanonicalize_trailing_slash_scheme = _makeSUCTest( "http://", "http://") test_svnUriCanonicalize_trailing_slash_hostname = _makeSUCTest( "http://foo.com/", "http://foo.com") test_svnUriCanonicalize_trailing_double_slash = _makeSUCTest( "http://foo.com/x//", "http://foo.com/x") test_svnUriCanonicalize_double_slash = _makeSUCTest( "http://foo.com/x//y", "http://foo.com/x/y") test_svnUriCanonicalize_slash = _makeSUCTest( "/", "/") test_svnUriCanonicalize_dot = _makeSUCTest( "http://foo.com/x/./y", "http://foo.com/x/y") test_svnUriCanonicalize_dot_dot = _makeSUCTest( "http://foo.com/x/../y", "http://foo.com/y") test_svnUriCanonicalize_double_dot_dot = _makeSUCTest( "http://foo.com/x/y/../../z", "http://foo.com/z") test_svnUriCanonicalize_dot_dot_root = _makeSUCTest( "http://foo.com/../x/y", "http://foo.com/x/y") test_svnUriCanonicalize_quote_spaces = _makeSUCTest( "svn+ssh://user@host:123/My Stuff/file.doc", "svn+ssh://user@host:123/My%20Stuff/file.doc") test_svnUriCanonicalize_remove_port_80 = _makeSUCTest( "http://foo.com:80/bar", "http://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_80 = _makeSUCTest( "https://foo.com:80/bar", "https://foo.com:80/bar") # not http test_svnUriCanonicalize_remove_port_443 = _makeSUCTest( "https://foo.com:443/bar", "https://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_443 = _makeSUCTest( "svn://foo.com:443/bar", "svn://foo.com:443/bar") # not https test_svnUriCanonicalize_remove_port_3690 = _makeSUCTest( "svn://foo.com:3690/bar", "svn://foo.com/bar") test_svnUriCanonicalize_dont_remove_port_3690 = _makeSUCTest( "http://foo.com:3690/bar", "http://foo.com:3690/bar") # not svn test_svnUriCanonicalize_dont_remove_port_other = _makeSUCTest( "https://foo.com:2093/bar", "https://foo.com:2093/bar") test_svnUriCanonicalize_quote_funny_chars = _makeSUCTest( "http://foo.com/\x10\xe6%", "http://foo.com/%10%E6%25") test_svnUriCanonicalize_overquoted = _makeSUCTest( "http://foo.com/%68%65%6c%6c%6f%20%77%6f%72%6c%64", "http://foo.com/hello%20world")
gpl-3.0
-5,627,715,937,354,881,000
42.897733
164
0.435234
false
4.564536
true
false
false
Debith/py2traits
src/pytraits/core/singleton.py
1
2028
#!/usr/bin/python -tt # -*- coding: utf-8 -*- ''' Copyright 2014-2015 Teppo Perä Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' from pytraits.core.errors import SingletonError class Singleton(type): """ Turn the class to immutable singleton. >>> class Example(object, metaclass=Singleton): ... pass ... >>> a = Example() >>> b = Example() >>> id(a) == id(b) True Having your instance as a singleton is faster than creating from scratch >>> import timeit >>> class MySingleton(object, metaclass=Singleton): ... def __init__(self): ... self._store = dict(one=1, two=2, three=3, four=4) ... >>> class NonSingleton(object): ... def __init__(self): ... self._store = dict(one=1, two=2, three=3, four=4) ... >>> #timeit.timeit(NonSingleton) > timeit.timeit(MySingleton) True >>> MySingleton().new_item = False Traceback (most recent call last): ... errors.SingletonError: Singletons are immutable """ def __call__(self, *args, **kwargs): try: return self.__instance except AttributeError: def immutable_object(*args): raise SingletonError() self.__instance = super(Singleton, self).__call__(*args, **kwargs) self.__setitem__ = immutable_object self.__setattr__ = immutable_object return self.__instance if __name__ == "__main__": import doctest doctest.testmod()
apache-2.0
270,962,698,969,717,020
29.727273
78
0.613222
false
4.179381
false
false
false
Polytechnique-org/xorgauth
scripts/alumnforce/convert_json_to_csv.py
1
3347
#!/usr/bin/env python3 # -*- coding:UTF-8 -*- """Convert a JSON file to a CSV file to be imported on AlumnForce website This is the reciprocal of convert_csv_to_json.py. """ import argparse import csv import json import sys from csv_format import ALUMNFORCE_FIELDS JSON_TO_CSV_FIELDS = dict((x[1], (x[0], x[2], i)) for i, x in enumerate(ALUMNFORCE_FIELDS)) assert len(ALUMNFORCE_FIELDS) == len(JSON_TO_CSV_FIELDS) class AlumnForceDataJ2C(object): """Data extracted from a JSON to produce data importted on AlumnForce website""" def __init__(self): self.fields = set() # content is a list of dicts "json field"->value self.content = [] @classmethod def import_json_file(cls, json_file_path): """Create AlumnForce data from a JSON file""" with open(json_file_path, 'r') as json_stream: return cls.import_json_stream(json_stream) @classmethod def import_json_stream(cls, json_file): """Create AlumnForce data from a JSON stream""" data = cls() for record in json.load(json_file): flat_record = data.flatten_json_fields(record) for record_val in flat_record: if record_val[0] not in data.fields: data.fields.add(record_val[0]) data.content.append(dict(flat_record)) return data @classmethod def flatten_json_fields(cls, json_record, prefixkey=None): result = [] for key, value in json_record.items(): fullkey = (prefixkey + key) if prefixkey else key field_properties = JSON_TO_CSV_FIELDS.get(fullkey) if field_properties is not None: if field_properties[1] is not None: # Encode the JSON value to CSV value = field_properties[1].encode(value) result.append((fullkey, value)) elif isinstance(value, dict): # sub-dict result += cls.flatten_json_fields(value, fullkey + '.') else: raise ValueError("Unknown json field %r" % fullkey) return result def csv_dump(self, csv_file, **kwargs): """Dump all the CSV data""" # Sort the fields by their rank in ALUMNFORCE_FIELDS columns = sorted(self.fields, key=lambda f: JSON_TO_CSV_FIELDS[f][2]) writer = csv.writer(csv_file, delimiter=',', quotechar='"', escapechar='\\', quoting=csv.QUOTE_MINIMAL) writer.writerow((JSON_TO_CSV_FIELDS[f][0] for f in columns)) for row in self.content: writer.writerow(row.get(f) for f in columns) def main(): parser = argparse.ArgumentParser(description="Convert AF CSV to JSON") parser.add_argument('file', nargs='?', help="JSON file to read (or standard input)") parser.add_argument('-o', '--output', type=str, help="CSV file to write (or standard output)") args = parser.parse_args() if args.file: data = AlumnForceDataJ2C.import_json_file(args.file) else: data = AlumnForceDataJ2C.import_json_stream(sys.stdin) if args.output and args.output != '-': with open(args.output, 'w') as fcsv: data.csv_dump(fcsv) else: data.csv_dump(sys.stdout) if __name__ == '__main__': main()
agpl-3.0
-6,566,489,218,920,297,000
34.989247
111
0.599641
false
3.739665
false
false
false
vitorio/ocropodium
ocradmin/presets/models.py
1
4715
""" Model to store script data. """ import json import datetime from django.db import models from django.contrib.auth.models import User from tagging.fields import TagField import tagging import autoslug from nodetree import script class JsonTextField(models.TextField): def to_python(self, value): return value def validate(self, value, *args, **kwargs): super(models.TextField, self).validate(value, *args, **kwargs) try: json.loads(value) except ValueError: raise models.exceptions.ValidationError("Data must be valid JSON") class Preset(models.Model): user = models.ForeignKey(User, related_name="presets") tags = tagging.fields.TagField() name = models.CharField(max_length=100, unique=True) slug = autoslug.AutoSlugField(populate_from="name", unique=True) description = models.TextField(blank=True) public = models.BooleanField(default=True) created_on = models.DateField(editable=False) updated_on = models.DateField(editable=False, null=True, blank=True) data = JsonTextField() profile = models.ForeignKey("Profile", related_name="presets", null=True, blank=True) def __unicode__(self): """ String representation. """ return self.name def save(self): if not self.id: self.created_on = datetime.datetime.now() else: self.updated_on = datetime.datetime.now() super(Preset, self).save() def get_absolute_url(self): """URL to view an object detail""" return "/presets/show/%s/" % self.slug def get_update_url(self): """url to update an object detail""" return "/presets/edit/%s/" % self.slug def get_delete_url(self): """url to update an object detail""" return "/presets/delete/%s/" % self.slug @classmethod def get_list_url(cls): """URL to view the object list""" return "/presets/list/" @classmethod def get_create_url(cls): """URL to create a new object""" return "/presets/create/" TEST_PROFILE = { "must_exist" : [ { "attr": "stage", "value": "recognize", "unique": False, }, { "attr": "stage", "value": "input", "unique": True, }, ], } class Profile(models.Model): """Preset profile. This defines a class of presets to which the information in the preset must conform.""" name = models.CharField(max_length=255) slug = autoslug.AutoSlugField(populate_from="name", unique=True) tags = tagging.fields.TagField() description = models.TextField(blank=True) created_on = models.DateField(editable=False) updated_on = models.DateField(editable=False, null=True, blank=True) data = JsonTextField() def __unicode__(self): """ String representation. """ return self.name def save(self): if not self.id: self.created_on = datetime.datetime.now() else: self.updated_on = datetime.datetime.now() super(Profile, self).save() def validate_preset(self, data): this = json.loads(self.data) tree = script.Script(data) errors = [] for name, preds in this.iteritems(): for pred in preds: perrors = self.validate_predicate(name, pred, tree) if perrors: errors.extend(perrors) return errors def validate_predicate(self, name, pred, tree): errors = [] if name == "must_exist": attr = pred.get("attr") value = pred.get("value") unique = pred.get("unique") nodes = tree.get_nodes_by_attr(attr, value) if not nodes: errors.append("A node with attr '%s'='%s' must exist" % (attr, value)) elif len(nodes) > 1: errors.append("Node with attr '%s'='%s' must be unique" % (attr, value)) return errors def get_absolute_url(self): """URL to view an object detail""" return "/profiles/show/%s/" % self.slug def get_update_url(self): """url to update an object detail""" return "/profiles/edit/%s/" % self.slug def get_delete_url(self): """url to update an object detail""" return "/profiles/delete/%s/" % self.slug @classmethod def get_list_url(cls): """URL to view the object list""" return "/profiles/list/" @classmethod def get_create_url(cls): """URL to create a new object""" return "/profiles/create/"
apache-2.0
-4,253,747,964,622,213,000
27.75
88
0.584093
false
4.005947
false
false
false
Erotemic/local
init/_freshstart-win/install_scripts.py
1
2047
""" cd %HOME% mkdir tmp cd tmp wget http://www.woogerworks.com/files/cockatrice.weeklybuilds/Cockatrice-WindowsClient.exe Cockatrice-WindowsClient.exe """ def install_ansicon(): """ lets windows console display ansii References: http://www.liferay.com/web/igor.spasic/blog/-/blogs/enable-ansi-colors-in-windows-command-prompt """ #http://adoxa.altervista.org/ansicon/dl.php?f=ansicon pass def install_cockatrice(): cockatrice_url = 'http://www.woogerworks.com/files/cockatrice.weeklybuilds/Cockatrice-WindowsClient.exe' import utool as ut fpath = ut.grab_file_url(cockatrice_url) # run setup script ut.cmd(fpath) # press enter a few times import win32com.client as w32 shell = w32.Dispatch("WScript.Shell") shell.AppActivate('Cockatrice Setup') shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") shell.SendKeys("{ENTER}") # need msvcp120.dll #https://www.microsoft.com/en-us/download/details.aspx?id=40784 #import win32gui #import win32api #import win32con #def window_handle(Title): # hwnd = win32gui.FindWindowEx(0, 0, 0, Title) # return hwnd #def click_btn(hwnd, Button): # hbutton = win32gui.FindWindowEx(hwnd, 0, "Button", Button) # if hbutton != 0: # win32api.PostMessage(hbutton, win32con.WM_LBUTTONDOWN, 0, 0) # win32api.PostMessage(hbutton, win32con.WM_LBUTTONUP, 0, 0) # return True # return None #click_btn(hwnd, "&Install") #window_title = 'Cockatrice Setup' #hwnd = win32gui.FindWindowEx(0, 0, 0, window_title) #assert hwnd != 0 #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "Cancel") #print(btnHnd) #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "Next") #print(btnHnd) #btnHnd= win32gui.FindWindowEx(hwnd, 0 , "Button", "") #button_name = 'Next' #hbutton = win32gui.FindWindowEx(hwnd, 0, "Button", button_name) #assert hbutton != 0, 'could not find button'
gpl-3.0
4,970,867,670,716,593,000
27.830986
108
0.657548
false
2.883099
false
false
false
JmPotato/Pomash
Pomash/libs/models.py
1
3675
#!/usr/bin/env python # -*- coding: utf-8 -*- import os from .tools import * from .utils import * class DatabaseError(Exception): def __init__(self, content): Exception.__init__(self) if os.path.exists("blog.db"): db = Connection("blog.db") else: raise DatabaseError("Database file not found !") def gat_page(id): page = db.get("SELECT * FROM pages WHERE id = ?", id) return page def get_all_pages(): pages = db.query("SELECT * FROM pages") return pages def get_article(id): article = db.get("SELECT * FROM articles WHERE id = ?;", id) return article def get_articles(page, post_per_page): articles = db.query("SELECT * FROM articles ORDER BY id DESC LIMIT ?, ?;",(page - 1) * post_per_page, post_per_page) return articles def get_all_articles(): articles = db.query("SELECT * FROM articles ORDER BY id DESC;") return articles def get_article_count(): count = db.query('''SELECT COUNT(*) AS count FROM articles''') return count[0].count def get_tag_articles(tag_name): sql = """SELECT * FROM articles AS a INNER JOIN tags AS t ON a.id = t.article_id WHERE t.name = ? ORDER BY id DESC;""" articles = db.query(sql, tag_name) return articles def get_all_tags(): tags = db.query("SELECT name, COUNT(name) AS num FROM tags GROUP BY name ORDER BY num DESC;") return tags def creat_page(**kwargs): count = db.query('''SELECT COUNT(*) AS count FROM pages''') if count[0].count < 5: sql = '''INSERT INTO pages (title, content) VALUES (?,?);''' id = db.execute(sql, kwargs["title"], kwargs["content"]) return id else: return False def update_page(id, **kwargs): sql = '''UPDATE pages SET title=?, content=? WHERE id=?;''' db.execute(sql, kwargs["title"], kwargs["content"], id) return True def delete_page(id): db.execute("DELETE FROM pages WHERE id=?;", id) return True def creat_article(**kwargs): sql = '''INSERT INTO articles (title, content, tag, datetime) VALUES (?,?,?,?);''' id = db.execute(sql, kwargs["title"], kwargs["content"], kwargs["tags"], get_datetime()) tags = [tag.strip() for tag in kwargs["tags"].split(",")] for tag in tags: db.execute("INSERT INTO tags (name, article_id) VALUES (?,?);", tag, id) return id def update_article(id, **kwargs): db.execute("DELETE FROM tags WHERE article_id=?;", id) sql = '''UPDATE articles SET title=?, content=?, tag=? WHERE id=?;''' db.execute(sql, kwargs["title"], kwargs["content"], kwargs["tags"], id) tags = [tag.strip() for tag in kwargs["tags"].split(",")] for tag in tags: db.execute("INSERT INTO tags (name, article_id) VALUES (?,?);", tag, id) return True def delete_article(id): db.execute("DELETE FROM articles WHERE id=?;", id) db.execute("DELETE FROM tags WHERE article_id=?;", id) return True def update_token(username, token): sql = '''UPDATE admin_config SET token=? WHERE username=?;''' db.execute(sql, token, username) return True def verify_user(username, password_md5): information = db.get("SELECT * FROM admin_config WHERE username = ?;", username) if information and information.password == password_md5: return True else: return False def verify_token(username, token): information = db.get("SELECT * FROM admin_config WHERE username = ?;", username) if information.token == token: return True else: return False def change_password(username, n_password_md5): sql = '''UPDATE admin_config SET password=? WHERE username=?;''' db.execute(sql, n_password_md5, username) return True
mit
368,509,658,430,314,900
31.530973
122
0.633741
false
3.61002
false
false
false
CrashenX/python-rabbitmq
src/receive.py
1
2219
#!/usr/bin/env python # Hello queue example using Python and RabbitMQ # Copyright (c) 2014, Jesse J. Cook # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of the {organization} nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import pika import logging import time def cb(ch, method, properties, body): print " [x] Received %r" % body time.sleep(body.count('.')) print " [x] Done" ch.basic_ack(delivery_tag = method.delivery_tag) logging.basicConfig() conn = pika.BlockingConnection(pika.ConnectionParameters('localhost')) chan = conn.channel() chan.queue_declare(queue='task_queue', durable=True) chan.basic_qos(prefetch_count=1) chan.basic_consume(cb, queue='task_queue') print ' [*] Waiting for messages. To exit press CTRL+C' try: chan.start_consuming() except: print 'Consumption terminated' conn.close()
bsd-3-clause
8,839,191,121,534,269,000
41.673077
80
0.764308
false
4.101664
false
false
false
RasaHQ/rasa_nlu
rasa/core/policies/tf_utils.py
1
36915
from collections import namedtuple import tensorflow as tf class TimedNTM(object): """Timed Neural Turing Machine Inspired by paper: https://arxiv.org/pdf/1410.5401.pdf Implementation inspired by: https://github.com/carpedm20/NTM-tensorflow/blob/master/ntm_cell.py See our paper for details: https://arxiv.org/abs/1811.11707 """ def __init__(self, attn_shift_range, sparse_attention, name): """Construct the `TimedNTM`. Args: attn_shift_range: Python int. A time range within which to attend to the memory by location sparse_attention: Python bool. If `True` use sparsemax instead of softmax for probs name: Name to use when creating ops. """ # interpolation gate self.name = "timed_ntm_" + name self._inter_gate = tf.layers.Dense( units=1, activation=tf.sigmoid, name=self.name + "/inter_gate" ) # if use sparsemax instead of softmax for probs self._sparse_attention = sparse_attention if sparse_attention: # sparsemax doesn't support inf self._inf = float(5000) else: self._inf = float("inf") # shift weighting if range is provided if attn_shift_range: self._shift_weight = tf.layers.Dense( units=2 * attn_shift_range + 1, activation=tf.nn.softmax, name=self.name + "/shift_weight", ) else: self._shift_weight = None # sharpening parameter self._gamma_sharp = tf.layers.Dense( units=1, activation=lambda a: tf.nn.softplus(a) + 1, bias_initializer=tf.constant_initializer(1), name=self.name + "/gamma_sharp", ) def __call__(self, attn_inputs, scores, scores_state, mask): # apply exponential moving average with interpolation gate weight # to scores from previous time which are equal to probs at this point # different from original NTM where it is applied after softmax i_g = self._inter_gate(attn_inputs) # scores limited by time scores = tf.concat( [i_g * scores[:, :-1] + (1 - i_g) * scores_state, scores[:, -1:]], 1 ) next_scores_state = scores if mask is not None: # apply mask to scores if self._shift_weight is not None: # rearrange scores to make them continuous for convolution scores = tf.map_fn( self._rearrange_fn, [scores, mask], dtype=scores.dtype ) else: scores = tf.where(mask > 0, scores, -self._inf * tf.ones_like(scores)) # create probabilities for attention if self._sparse_attention: probs = tf.contrib.sparsemax.sparsemax(scores) else: probs = tf.nn.softmax(scores) if self._shift_weight is not None: s_w = self._shift_weight(attn_inputs) # we want to go back in time during convolution conv_probs = tf.reverse(probs, axis=[1]) # preare probs for tf.nn.depthwise_conv2d # [in_width, in_channels=batch] conv_probs = tf.transpose(conv_probs, [1, 0]) # [batch=1, in_height=1, in_width=time+1, in_channels=batch] conv_probs = conv_probs[tf.newaxis, tf.newaxis, :, :] # [filter_height=1, filter_width=2*attn_shift_range+1, # in_channels=batch, channel_multiplier=1] conv_s_w = tf.transpose(s_w, [1, 0]) conv_s_w = conv_s_w[tf.newaxis, :, :, tf.newaxis] # perform 1d convolution # [batch=1, out_height=1, out_width=time+1, out_channels=batch] conv_probs = tf.nn.depthwise_conv2d_native( conv_probs, conv_s_w, [1, 1, 1, 1], "SAME" ) conv_probs = conv_probs[0, 0, :, :] conv_probs = tf.transpose(conv_probs, [1, 0]) probs = tf.reverse(conv_probs, axis=[1]) if mask is not None: # arrange probs back to their original time order probs = tf.map_fn( self._arrange_back_fn, [probs, mask], dtype=probs.dtype ) # sharpening g_sh = self._gamma_sharp(attn_inputs) powed_probs = tf.pow(probs, g_sh) probs = powed_probs / (tf.reduce_sum(powed_probs, 1, keepdims=True) + 1e-32) return probs, next_scores_state def _rearrange_fn(self, list_tensor_1d_mask_1d): """Rearranges tensor_1d to put all the values where mask_1d=1 to the right and where mask_1d=0 to the left and sets them to -infinity""" tensor_1d, mask_1d = list_tensor_1d_mask_1d partitioned_tensor = tf.dynamic_partition(tensor_1d, mask_1d, 2) partitioned_tensor[0] = -self._inf * tf.ones_like(partitioned_tensor[0]) return tf.concat(partitioned_tensor, 0) @staticmethod def _arrange_back_fn(list_tensor_1d_mask_1d): """Arranges back tensor_1d to restore original order modified by `_rearrange_fn` according to mask_1d: - number of 0s in mask_1d values on the left are set to their corresponding places where mask_1d=0, - number of 1s in mask_1d values on the right are set to their corresponding places where mask_1d=1""" tensor_1d, mask_1d = list_tensor_1d_mask_1d mask_indices = tf.dynamic_partition( tf.range(tf.shape(tensor_1d)[0]), mask_1d, 2 ) mask_sum = tf.reduce_sum(mask_1d, axis=0) partitioned_tensor = [ tf.zeros_like(tensor_1d[:-mask_sum]), tensor_1d[-mask_sum:], ] return tf.dynamic_stitch(mask_indices, partitioned_tensor) def _compute_time_attention( attention_mechanism, attn_inputs, attention_state, # time is added to calculate time attention time, timed_ntm, time_mask, ignore_mask, attention_layer, ): """Computes the attention and alignments limited by time for a given attention_mechanism. Modified helper method from tensorflow.""" scores, _ = attention_mechanism(attn_inputs, state=attention_state) # take only scores from current and past times timed_scores = scores[:, : time + 1] timed_scores_state = attention_state[:, :time] # get mask for past times timed_time_mask = time_mask[:, :time] if ignore_mask is not None: timed_time_mask *= 1 - ignore_mask[:, :time] # set mask for current time to 1 timed_time_mask = tf.concat([timed_time_mask, tf.ones_like(time_mask[:, :1])], 1) # pass these scores to NTM probs, next_scores_state = timed_ntm( attn_inputs, timed_scores, timed_scores_state, timed_time_mask ) # concatenate probs with zeros to get new alignments zeros = tf.zeros_like(scores) # remove current time from attention alignments = tf.concat([probs[:, :-1], zeros[:, time:]], 1) # Reshape from [batch_size, memory_time] to [batch_size, 1, memory_time] expanded_alignments = tf.expand_dims(alignments, 1) # Context is the inner product of alignments and values along the # memory time dimension. # alignments shape is # [batch_size, 1, memory_time] # attention_mechanism.values shape is # [batch_size, memory_time, memory_size] # the batched matmul is over memory_time, so the output shape is # [batch_size, 1, memory_size]. # we then squeeze out the singleton dim. context = tf.matmul(expanded_alignments, attention_mechanism.values) context = tf.squeeze(context, [1]) if attention_layer is not None: attention = attention_layer(tf.concat([attn_inputs, context], 1)) else: attention = context # return current time to attention alignments = tf.concat([probs, zeros[:, time + 1 :]], 1) next_attention_state = tf.concat([next_scores_state, zeros[:, time + 1 :]], 1) return attention, alignments, next_attention_state # noinspection PyProtectedMember class TimeAttentionWrapperState( namedtuple( "TimeAttentionWrapperState", tf.contrib.seq2seq.AttentionWrapperState._fields + ("all_time_masks", "all_cell_states"), ) ): # added """Modified from tensorflow's tf.contrib.seq2seq.AttentionWrapperState see there for description of the parameters Additional fields: - `all_time_masks`: A mask applied to a memory that filters certain time steps - `all_cell_states`: All states of the wrapped `RNNCell` at all the previous time steps. """ def clone(self, **kwargs): """Copied from tensorflow's tf.contrib.seq2seq.AttentionWrapperState see there for description of the parameters""" def with_same_shape(old, new): """Check and set new tensor's shape.""" if isinstance(old, tf.Tensor) and isinstance(new, tf.Tensor): return tf.contrib.framework.with_same_shape(old, new) return new return tf.contrib.framework.nest.map_structure( with_same_shape, self, super(TimeAttentionWrapperState, self)._replace(**kwargs), ) class TimeAttentionWrapper(tf.contrib.seq2seq.AttentionWrapper): """Custom AttentionWrapper that takes into account time when calculating attention. Attention is calculated before calling rnn cell. Modified from tensorflow's tf.contrib.seq2seq.AttentionWrapper. See our paper for details: https://arxiv.org/abs/1811.11707 """ def __init__( self, cell, attention_mechanism, sequence_len, attn_shift_range=0, sparse_attention=False, attention_layer_size=None, alignment_history=False, rnn_and_attn_inputs_fn=None, ignore_mask=None, cell_input_fn=None, index_of_attn_to_copy=None, likelihood_fn=None, tensor_not_to_copy=None, output_attention=False, initial_cell_state=None, name=None, attention_layer=None, ): """Construct the `TimeAttentionWrapper`. See the super class for the original arguments description. Additional args: sequence_len: Python integer. Maximum length of the sequence, used to create appropriate TensorArray for all cell states in TimeAttentionWrapperState attn_shift_range: Python integer (`0` by default). A time range within which to attend to the memory by location in Neural Turing Machine. sparse_attention: Python bool. A flag to use sparsemax (if `True`) instead of softmax (if `False`, default) for probabilities inputs_and_attn_inputs_fn: (optional) A `callable`. A function that creates inputs and attention inputs tensors. ignore_mask: (optional) Boolean Tensor. Determines which time steps to ignore in attention index_of_attn_to_copy: (optional) Python integer. An index of attention mechanism that picks which part of attention tensor to use for copying to output, the default is `None`, which turns off copying mechanism. Copy inspired by: https://arxiv.org/pdf/1603.06393.pdf likelihood_fn: (optional) A `callable`. A method to perform likelihood calculation to filter time step in copy mechanism. Returns a tuple of binary likelihood and likelihood tensor_not_to_copy: (optional) A Tensor. A tensor, which shouldn't be copied from previous time steps Modified args: output_attention: Python bool. If `True`, the output at each time step is the concatenated cell outputs, attention values and additional values described in `additional_output_size()`, used in copy mechanism. """ super(TimeAttentionWrapper, self).__init__( cell, attention_mechanism, attention_layer_size, alignment_history, cell_input_fn, output_attention, initial_cell_state, name, attention_layer, ) self._sequence_len = sequence_len if not isinstance(attn_shift_range, list): # attn_shift_range might not be a list attn_shift_range = [attn_shift_range] self._timed_ntms = [TimedNTM(attn_shift_range[0], sparse_attention, name="0")] if self._is_multi: # if there are several attention mechanisms, # create additional TimedNTMs for them if len(attn_shift_range) == 1: # original attn_shift_range might not be a list attn_shift_range *= len(attention_mechanism) elif len(attn_shift_range) != len(attention_mechanism): raise ValueError( "If provided, `attn_shift_range` must contain exactly one " "integer per attention_mechanism, saw: {} vs {}" "".format(len(attn_shift_range), len(attention_mechanism)) ) for i in range(1, len(attention_mechanism)): self._timed_ntms.append( TimedNTM(attn_shift_range[i], sparse_attention, name=str(i)) ) if rnn_and_attn_inputs_fn is None: rnn_and_attn_inputs_fn = self._default_rnn_and_attn_inputs_fn else: if not callable(rnn_and_attn_inputs_fn): raise TypeError( "`rnn_and_attn_inputs_fn` must be callable, saw type: {}" "".format(type(rnn_and_attn_inputs_fn).__name__) ) self._rnn_and_attn_inputs_fn = rnn_and_attn_inputs_fn if not isinstance(ignore_mask, list): self._ignore_mask = [tf.cast(ignore_mask, tf.int32)] else: self._ignore_mask = [tf.cast(i_m, tf.int32) for i_m in ignore_mask] self._index_of_attn_to_copy = index_of_attn_to_copy self._likelihood_fn = likelihood_fn self._tensor_not_to_copy = tensor_not_to_copy @staticmethod def _default_rnn_and_attn_inputs_fn(inputs, cell_state): if isinstance(cell_state, tf.contrib.rnn.LSTMStateTuple): return inputs, tf.concat([inputs, cell_state.h], -1) else: return inputs, tf.concat([inputs, cell_state], -1) @staticmethod def additional_output_size(): """Number of additional outputs: likelihoods: attn_likelihood, state_likelihood debugging info: current_time_prob, bin_likelihood_not_to_copy, bin_likelihood_to_copy **Method should be static** """ return 2 + 3 @property def output_size(self): if self._output_attention: if self._index_of_attn_to_copy is not None: # output both raw rnn cell_output and # cell_output with copied attention # together with attention vector itself # and additional output return ( 2 * self._cell.output_size + self._attention_layer_size + self.additional_output_size() ) else: return self._cell.output_size + self._attention_layer_size else: return self._cell.output_size @property def state_size(self): """The `state_size` property of `TimeAttentionWrapper`. Returns: A `TimeAttentionWrapperState` tuple containing shapes used by this object. """ # use AttentionWrapperState from superclass state_size = super(TimeAttentionWrapper, self).state_size all_cell_states = self._cell.state_size return TimeAttentionWrapperState( cell_state=state_size.cell_state, time=state_size.time, attention=state_size.attention, alignments=state_size.alignments, attention_state=state_size.attention_state, alignment_history=state_size.alignment_history, all_time_masks=self._sequence_len, all_cell_states=all_cell_states, ) def zero_state(self, batch_size, dtype): """Modified from tensorflow's zero_state see there for description of the parameters""" # use AttentionWrapperState from superclass zero_state = super(TimeAttentionWrapper, self).zero_state(batch_size, dtype) with tf.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]): # store time masks all_time_masks = tf.TensorArray( tf.int32, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, tf.zeros([batch_size, self.state_size.all_time_masks], tf.int32)) # store all cell states into a tensor array to allow # copy mechanism to go back in time if isinstance(self._cell.state_size, tf.contrib.rnn.LSTMStateTuple): all_cell_states = tf.contrib.rnn.LSTMStateTuple( tf.TensorArray( dtype, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, zero_state.cell_state.c), tf.TensorArray( dtype, size=self._sequence_len + 1, dynamic_size=False, clear_after_read=False, ).write(0, zero_state.cell_state.h), ) else: all_cell_states = tf.TensorArray( dtype, size=0, dynamic_size=False, clear_after_read=False ).write(0, zero_state.cell_state) return TimeAttentionWrapperState( cell_state=zero_state.cell_state, time=zero_state.time, attention=zero_state.attention, alignments=zero_state.alignments, attention_state=zero_state.attention_state, alignment_history=zero_state.alignment_history, all_time_masks=all_time_masks, all_cell_states=all_cell_states, ) def call(self, inputs, state): """Perform a step of attention-wrapped RNN. The order has changed: - Step 1: Calculate attention inputs based on the previous cell state and current inputs - Step 2: Score the output with `attention_mechanism`. - Step 3: Calculate the alignments by passing the score through the `normalizer` and limit them by time. - Step 4: Calculate the context vector as the inner product between the alignments and the attention_mechanism's values (memory). - Step 5: Calculate the attention output by concatenating the cell output and context through the attention layer (a linear layer with `attention_layer_size` outputs). - Step 6: Mix the `inputs` and `attention` output via `cell_input_fn` to get cell inputs. - Step 7: Call the wrapped `cell` with these cell inputs and its previous state. - Step 8: (optional) Maybe copy output and cell state from history Args: inputs: (Possibly nested tuple of) Tensor, the input at this time step. state: An instance of `TimeAttentionWrapperState` containing tensors from the previous time step. Returns: A tuple `(attention_or_cell_output, next_state)`, where: - `attention_or_cell_output` depending on `output_attention`. - `next_state` is an instance of `TimeAttentionWrapperState` containing the state calculated at this time step. Raises: TypeError: If `state` is not an instance of `TimeAttentionWrapperState`. """ if not isinstance(state, TimeAttentionWrapperState): raise TypeError( "Expected state to be instance of " "TimeAttentionWrapperState. " "Received type {} instead.".format(type(state)) ) # Step 1: Calculate attention based on # the previous output and current input cell_state = state.cell_state rnn_inputs, attn_inputs = self._rnn_and_attn_inputs_fn(inputs, cell_state) cell_batch_size = attn_inputs.shape[0].value or tf.shape(attn_inputs)[0] error_message = ( "When applying AttentionWrapper %s: " % self.name + "Non-matching batch sizes between the memory " "(encoder output) and the query (decoder output). " "Are you using " "the BeamSearchDecoder? " "You may need to tile your memory input via " "the tf.contrib.seq2seq.tile_batch function with argument " "multiple=beam_width." ) with tf.control_dependencies( self._batch_size_checks(cell_batch_size, error_message) ): attn_inputs = tf.identity(attn_inputs, name="checked_attn_inputs") if self._is_multi: previous_attention_state = state.attention_state previous_alignment_history = state.alignment_history else: previous_attention_state = [state.attention_state] previous_alignment_history = [state.alignment_history] all_alignments = [] all_attentions = [] all_attention_states = [] maybe_all_histories = [] prev_time_masks = self._read_from_tensor_array(state.all_time_masks, state.time) prev_time_mask = prev_time_masks[:, -1, :] for i, attention_mechanism in enumerate(self._attention_mechanisms): # Steps 2 - 5 are performed inside `_compute_time_attention` (attention, alignments, next_attention_state) = _compute_time_attention( attention_mechanism, attn_inputs, previous_attention_state[i], # time is added to calculate time attention state.time, self._timed_ntms[i], # provide boolean masks, to ignore some time steps prev_time_mask, self._ignore_mask[i], self._attention_layers[i] if self._attention_layers else None, ) alignment_history = ( previous_alignment_history[i].write(state.time, alignments) if self._alignment_history else () ) all_attention_states.append(next_attention_state) all_alignments.append(alignments) all_attentions.append(attention) maybe_all_histories.append(alignment_history) attention = tf.concat(all_attentions, 1) # Step 6: Mix the `inputs` and `attention` output via # `cell_input_fn` to get cell inputs. cell_inputs = self._cell_input_fn(rnn_inputs, attention) # Step 7: Call the wrapped `cell` with these cell inputs and # its previous state. cell_output, next_cell_state = self._cell(cell_inputs, cell_state) prev_all_cell_states = state.all_cell_states time_mask = tf.concat( [ prev_time_mask[:, : state.time], tf.ones_like(prev_time_mask[:, :1]), prev_time_mask[:, state.time + 1 :], ], 1, ) if self._index_of_attn_to_copy is not None: # Step 8: Maybe copy output and cell state from history # get relevant previous outputs from history attn_to_copy = all_attentions[self._index_of_attn_to_copy] # copy them to current output cell_output_with_attn = cell_output + attn_to_copy memory_probs = self._get_memory_probs(all_alignments, state.time) # check that we do not pay attention to `tensor_not_to_copy` bin_likelihood_not_to_copy, _ = self._likelihood_fn( cell_output_with_attn, self._tensor_not_to_copy ) # recalculate probs memory_probs *= 1 - bin_likelihood_not_to_copy history_alignments = self._history_alignments(memory_probs) # get previous output from the history prev_output = self._prev_output( cell_output_with_attn, history_alignments, state.time ) # check that current output is close to # the one in the history to which we pay attention to bin_likelihood_to_copy, _ = self._likelihood_fn( cell_output_with_attn, prev_output ) # recalculate probs memory_probs *= bin_likelihood_to_copy history_alignments = self._history_alignments(memory_probs) current_time_prob = history_alignments[:, -1:] # create additional likelihoods to maximize attn_likelihood = self._additional_likelihood( attn_to_copy, prev_output, current_time_prob ) state_likelihood = self._additional_likelihood( cell_output + tf.stop_gradient(attn_to_copy), prev_output, current_time_prob, ) # recalculate time_mask time_mask = self._apply_alignments_to_history( tf.cast(history_alignments, time_mask.dtype), prev_time_masks[:, :-1, :], time_mask, ) # recalculate new next_cell_state based on history_alignments next_cell_state = self._new_next_cell_state( prev_all_cell_states, next_cell_state, cell_output_with_attn, history_alignments, state.time, ) all_cell_states = self._all_cell_states( prev_all_cell_states, next_cell_state, state.time ) if self._output_attention: # concatenate cell outputs, attention, additional likelihoods # and copy_attn_debug output = tf.concat( [ cell_output_with_attn, cell_output, attention, # additional likelihoods attn_likelihood, state_likelihood, # copy_attn_debug bin_likelihood_not_to_copy, bin_likelihood_to_copy, current_time_prob, ], 1, ) else: output = cell_output_with_attn else: # do not waste resources on storing history all_cell_states = prev_all_cell_states if self._output_attention: output = tf.concat([cell_output, attention], 1) else: output = cell_output all_time_masks = state.all_time_masks.write(state.time + 1, time_mask) next_state = TimeAttentionWrapperState( time=state.time + 1, cell_state=next_cell_state, attention=attention, attention_state=self._item_or_tuple(all_attention_states), alignments=self._item_or_tuple(all_alignments), alignment_history=self._item_or_tuple(maybe_all_histories), all_time_masks=all_time_masks, all_cell_states=all_cell_states, ) return output, next_state # helper for TensorArray @staticmethod def _read_from_tensor_array(tensor_array, time): """TensorArray time reader""" return tf.transpose(tensor_array.gather(tf.range(0, time + 1)), [1, 0, 2]) # helper methods for copy mechanism def _get_memory_probs(self, all_alignments, time): """Helper method to get memory_probs from all_alignments""" memory_probs = tf.stop_gradient( all_alignments[self._index_of_attn_to_copy][:, :time] ) # binarize memory_probs only if max value is larger than margin=0.1 memory_probs_max = tf.reduce_max(memory_probs, axis=1, keepdims=True) memory_probs_max = tf.where( memory_probs_max > 0.1, memory_probs_max, -memory_probs_max ) return tf.where( tf.equal(memory_probs, memory_probs_max), tf.ones_like(memory_probs), tf.zeros_like(memory_probs), ) @staticmethod def _history_alignments(memory_probs): """Helper method to apply binary mask to memory_probs""" current_time_prob = 1 - tf.reduce_sum(memory_probs, 1, keepdims=True) return tf.concat([memory_probs, current_time_prob], 1) @staticmethod def _apply_alignments_to_history(alignments, history_states, state): """Helper method to apply attention probabilities to rnn history copied from tf's `_compute_attention(...)`""" expanded_alignments = tf.stop_gradient(tf.expand_dims(alignments, 1)) history_states = tf.concat([history_states, tf.expand_dims(state, 1)], 1) # Context is the inner product of alignments and values along the # memory time dimension. # expanded_alignments shape is # [batch_size, 1, memory_time] # history_states shape is # [batch_size, memory_time, memory_size] # the batched matmul is over memory_time, so the output shape is # [batch_size, 1, memory_size]. # we then squeeze out the singleton dim. return tf.squeeze(tf.matmul(expanded_alignments, history_states), [1]) def _prev_output(self, state, alignments, time): """Helper method to get previous output from memory""" # get all previous outputs from appropriate # attention mechanism's memory limited by current time prev_outputs = tf.stop_gradient( self._attention_mechanisms[self._index_of_attn_to_copy].values[:, :time, :] ) # multiply by alignments to get one vector from one time step return self._apply_alignments_to_history(alignments, prev_outputs, state) def _additional_likelihood(self, output, prev_output, current_time_prob): """Helper method to create additional likelihood to maximize""" _, likelihood = self._likelihood_fn(output, tf.stop_gradient(prev_output)) return tf.where(current_time_prob < 0.5, likelihood, tf.ones_like(likelihood)) def _new_hidden_state(self, prev_all_cell_states, new_state, alignments, time): """Helper method to look into rnn history""" # reshape to (batch, time, memory_time) and # do not include current time because # we do not want to pay attention to it, # but we need to read it instead of # adding conditional flow if time == 0 prev_cell_states = self._read_from_tensor_array(prev_all_cell_states, time)[ :, :-1, : ] return self._apply_alignments_to_history( alignments, prev_cell_states, new_state ) def _new_next_cell_state( self, prev_all_cell_states, next_cell_state, new_cell_output, alignments, time ): """Helper method to recalculate new next_cell_state""" if isinstance(next_cell_state, tf.contrib.rnn.LSTMStateTuple): next_cell_state_c = self._new_hidden_state( prev_all_cell_states.c, next_cell_state.c, alignments, time ) next_cell_state_h = self._new_hidden_state( prev_all_cell_states.h, new_cell_output, alignments, time ) return tf.contrib.rnn.LSTMStateTuple(next_cell_state_c, next_cell_state_h) else: return self._new_hidden_state( prev_all_cell_states, alignments, new_cell_output, time ) @staticmethod def _all_cell_states(prev_all_cell_states, next_cell_state, time): """Helper method to recalculate all_cell_states tensor array""" if isinstance(next_cell_state, tf.contrib.rnn.LSTMStateTuple): return tf.contrib.rnn.LSTMStateTuple( prev_all_cell_states.c.write(time + 1, next_cell_state.c), prev_all_cell_states.h.write(time + 1, next_cell_state.h), ) else: return prev_all_cell_states.write(time + 1, next_cell_state) class ChronoBiasLayerNormBasicLSTMCell(tf.contrib.rnn.LayerNormBasicLSTMCell): """Custom LayerNormBasicLSTMCell that allows chrono initialization of gate biases. See super class for description. See https://arxiv.org/abs/1804.11188 for details about chrono initialization """ def __init__( self, num_units, forget_bias=1.0, input_bias=0.0, activation=tf.tanh, layer_norm=True, norm_gain=1.0, norm_shift=0.0, dropout_keep_prob=1.0, dropout_prob_seed=None, out_layer_size=None, reuse=None, ): """Initializes the basic LSTM cell Additional args: input_bias: float, The bias added to input gates. out_layer_size: (optional) integer, The number of units in the optional additional output layer. """ super(ChronoBiasLayerNormBasicLSTMCell, self).__init__( num_units, forget_bias=forget_bias, activation=activation, layer_norm=layer_norm, norm_gain=norm_gain, norm_shift=norm_shift, dropout_keep_prob=dropout_keep_prob, dropout_prob_seed=dropout_prob_seed, reuse=reuse, ) self._input_bias = input_bias self._out_layer_size = out_layer_size @property def output_size(self): return self._out_layer_size or self._num_units @property def state_size(self): return tf.contrib.rnn.LSTMStateTuple(self._num_units, self.output_size) @staticmethod def _dense_layer(args, layer_size): """Optional out projection layer""" proj_size = args.get_shape()[-1] dtype = args.dtype weights = tf.get_variable("kernel", [proj_size, layer_size], dtype=dtype) bias = tf.get_variable("bias", [layer_size], dtype=dtype) out = tf.nn.bias_add(tf.matmul(args, weights), bias) return out def call(self, inputs, state): """LSTM cell with layer normalization and recurrent dropout.""" c, h = state args = tf.concat([inputs, h], 1) concat = self._linear(args) dtype = args.dtype i, j, f, o = tf.split(value=concat, num_or_size_splits=4, axis=1) if self._layer_norm: i = self._norm(i, "input", dtype=dtype) j = self._norm(j, "transform", dtype=dtype) f = self._norm(f, "forget", dtype=dtype) o = self._norm(o, "output", dtype=dtype) g = self._activation(j) if (not isinstance(self._keep_prob, float)) or self._keep_prob < 1: g = tf.nn.dropout(g, self._keep_prob, seed=self._seed) new_c = c * tf.sigmoid(f + self._forget_bias) + g * tf.sigmoid( i + self._input_bias ) # added input_bias # do not do layer normalization on the new c, # because there are no trainable weights # if self._layer_norm: # new_c = self._norm(new_c, "state", dtype=dtype) new_h = self._activation(new_c) * tf.sigmoid(o) # added dropout to the hidden state h if (not isinstance(self._keep_prob, float)) or self._keep_prob < 1: new_h = tf.nn.dropout(new_h, self._keep_prob, seed=self._seed) # add postprocessing of the output if self._out_layer_size is not None: with tf.variable_scope("out_layer"): new_h = self._dense_layer(new_h, self._out_layer_size) new_state = tf.contrib.rnn.LSTMStateTuple(new_c, new_h) return new_h, new_state
apache-2.0
4,987,237,206,709,575,000
37.65445
88
0.579033
false
4.057039
false
false
false
schleichdi2/OpenNfr_E2_Gui-6.0
lib/python/Plugins/Extensions/MediaPortal/additions/porn/naughtyamerica.py
1
7967
# -*- coding: utf-8 -*- ############################################################################################### # # MediaPortal for Dreambox OS # # Coded by MediaPortal Team (c) 2013-2017 # # This plugin is open source but it is NOT free software. # # This plugin may only be distributed to and executed on hardware which # is licensed by Dream Property GmbH. This includes commercial distribution. # In other words: # It's NOT allowed to distribute any parts of this plugin or its source code in ANY way # to hardware which is NOT licensed by Dream Property GmbH. # It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way # on hardware which is NOT licensed by Dream Property GmbH. # # This applies to the source code as a whole as well as to parts of it, unless # explicitely stated otherwise. # # If you want to use or modify the code or parts of it, # you have to keep OUR license and inform us about the modifications, but it may NOT be # commercially distributed other than under the conditions noted above. # # As an exception regarding execution on hardware, you are permitted to execute this plugin on VU+ hardware # which is licensed by satco europe GmbH, if the VTi image is used on that hardware. # # As an exception regarding modifcations, you are NOT permitted to remove # any copy protections implemented in this plugin or change them for means of disabling # or working around the copy protections, unless the change has been explicitly permitted # by the original authors. Also decompiling and modification of the closed source # parts is NOT permitted. # # Advertising with this plugin is NOT allowed. # For other uses, permission from the authors is necessary. # ############################################################################################### from Plugins.Extensions.MediaPortal.plugin import _ from Plugins.Extensions.MediaPortal.resources.imports import * from Plugins.Extensions.MediaPortal.resources.choiceboxext import ChoiceBoxExt myagent = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0' BASE_NAME = "NaughtyAmerica.com" class naughtyamericaGenreScreen(MPScreen): def __init__(self, session): MPScreen.__init__(self, session, skin='MP_Plugin') self["actions"] = ActionMap(["MP_Actions"], { "ok" : self.keyOK, "0" : self.closeAll, "cancel" : self.keyCancel, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "yellow" : self.keyChangeCats }, -1) self['title'] = Label(BASE_NAME) self['ContentTitle'] = Label("Genre:") self['F3'] = Label(_("Mode")) self.keyLocked = True self.suchString = '' self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.catmode = 'category' self.onLayoutFinish.append(self.layoutFinished) def layoutFinished(self): self.genreliste = [] self.keyLocked = True url = "https://tour.naughtyamerica.com/fantasy/%s" % self.catmode getPage(url, agent=myagent).addCallback(self.genreData).addErrback(self.dataError) def genreData(self, data): parse = re.search('class="categories">(.*?)class="clear"', data, re.S) Cats = re.findall('href="(.*?fantasy\/(.*?))(?:\?nats.*?)".*?>(.*?)</a', parse.group(1), re.S) if Cats: for (Url, Title, TitleText) in Cats: if not "..." in TitleText: Title = TitleText else: Title = Title.replace('-porn-videos','').replace('-',' ').title() if not Title in ("VR Porn", "Virtual Reality"): Url = Url + "?page=" self.genreliste.append((decodeHtml(Title), Url, None)) self.genreliste.sort() self.genreliste.insert(0, ("Newest", 'https://tour.naughtyamerica.com/new-porn-videos?page=', None)) self.genreliste.insert(0, ("--- Search ---", "callSuchen", None)) self.ml.setList(map(self._defaultlistcenter, self.genreliste)) self.ml.moveToIndex(0) self.keyLocked = False self.showInfos() def keyOK(self): if self.keyLocked: return if not config.mediaportal.premiumize_use.value: message = self.session.open(MessageBoxExt, _("%s only works with enabled MP premiumize.me option (MP Setup)!" % BASE_NAME), MessageBoxExt.TYPE_INFO, timeout=10) return Name = self['liste'].getCurrent()[0][0] if Name == "--- Search ---": self.suchen() else: Link = self['liste'].getCurrent()[0][1] self.session.open(naughtyamericaFilmScreen, Link, Name) def SuchenCallback(self, callback = None, entry = None): if callback is not None and len(callback): self.suchString = callback Name = "--- Search ---" Link = self.suchString.replace(' ', '+') self.session.open(naughtyamericaFilmScreen, Link, Name) def keyChangeCats(self): rangelist = [['Category', 'category'], ['Who', 'who'], ['Where', 'where']] self.session.openWithCallback(self.ChangeCats, ChoiceBoxExt, title=_('Select Action'), list = rangelist) def ChangeCats(self, result): if result: self.catmode = result[1] self.layoutFinished() class naughtyamericaFilmScreen(MPScreen, ThumbsHelper): def __init__(self, session, Link, Name): self.Link = Link self.Name = Name MPScreen.__init__(self, session, skin='MP_PluginDescr') ThumbsHelper.__init__(self) self["actions"] = ActionMap(["MP_Actions"], { "ok" : self.keyOK, "0" : self.closeAll, "cancel" : self.keyCancel, "5" : self.keyShowThumb, "up" : self.keyUp, "down" : self.keyDown, "right" : self.keyRight, "left" : self.keyLeft, "nextBouquet" : self.keyPageUp, "prevBouquet" : self.keyPageDown, "green" : self.keyPageNumber }, -1) self['title'] = Label(BASE_NAME) self['ContentTitle'] = Label("Genre: %s" % self.Name) self['F2'] = Label(_("Page")) self['Page'] = Label(_("Page:")) self.keyLocked = True self.page = 1 self.lastpage = 1 self.filmliste = [] self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent) self['liste'] = self.ml self.onLayoutFinish.append(self.loadPage) def loadPage(self): self.keyLocked = True self['name'].setText(_('Please wait...')) self.filmliste = [] if re.match(".*?Search", self.Name): url = "https://tour.naughtyamerica.com/search?term=%s&page=%s" % (self.Link, str(self.page)) else: url = "%s%s" % (self.Link, str(self.page)) getPage(url, agent=myagent).addCallback(self.loadData).addErrback(self.dataError) def loadData(self, data): self.getLastPage(data, 'class="pagination(.*?)</div>', '.*(?:\/|>)(\d+)') parse = re.search('content-main(.*?)(id="fantasySideBar"|id="wrapper-footer)', data, re.S) Movies = re.findall('a\shref="(https?://tour.naughtyamerica.com.*?)"\stitle="(.*?)".*?img.*?src="(.*?)"(.*?)class="entry-date">(.*?)</p', parse.group(1), re.S) if Movies: for (Url, Title, Image, VrCheck, Date) in Movies: if not "vr-icon" in VrCheck: if Image.startswith('//'): Image = 'http:' + Image self.filmliste.append((decodeHtml(Title), Url, Image, Date)) if len(self.filmliste) == 0: self.filmliste.append((_('No videos found!'), '', None, '')) self.ml.setList(map(self._defaultlistleft, self.filmliste)) self.ml.moveToIndex(0) self.keyLocked = False self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, self.page, int(self.lastpage), mode=1) self.showInfos() def showInfos(self): title = self['liste'].getCurrent()[0][0] pic = self['liste'].getCurrent()[0][2] date = self['liste'].getCurrent()[0][3] self['handlung'].setText(date) self['name'].setText(title) CoverHelper(self['coverArt']).getCover(pic) def keyOK(self): if self.keyLocked: return Link = self['liste'].getCurrent()[0][1] get_stream_link(self.session).check_link(Link, self.play) def play(self, url): title = self['liste'].getCurrent()[0][0] self.session.open(SimplePlayer, [(title, url.replace('%2F','%252F').replace('%3D','%253D').replace('%2B','%252B'))], showPlaylist=False, ltype='naughtyamerica')
gpl-2.0
8,299,333,533,479,308,000
36.933333
163
0.663779
false
3.06582
false
false
false
snood1205/Computational-Science
eq-solve.py
1
1492
import numpy as np import numpy.linalg as la import scipy.linalg as sla import timeit # This file provides many different methods for solving Ax=b # Economy QR Decomposition def qrsolve(a,b): q, r = la.qr(a,mode='reduced') return np.dot(np.dot(la.inv(r),la.inv(q)),b) # LU (non-pivot) Decomposition def lusolve(a,b): l, u = sla.lu(a,permute_l=True) return np.dot(np.dot(la.inv(u),la.inv(l)),b) # Regular inverse solve def solve(a,b): return np.dot(la.inv(a),b) # Cholesky Factorization Solution (Only works for Singular Positive Definite Matrices) def chsolve(a,b): u = sla.cholesky(a) return np.dot(np.dot(u,u.T),b) # Polar Factorization Solve def psolve(a,b): u,p = sla.polar(a); return np.dot(np.dot(la.inv(p),la.inv(u)),b) # Built in solver def pro(a,b): return la.solve(a,b) a = np.random.random((10000,10000)) b = np.random.random((10000,1)) x = pro(a,b) ax = np.dot(a,x) sub = ax - b qr = timeit.timeit("qrsolve(a,b)", setup="from __main__ import qrsolve, a, b",number=1) lu = timeit.timeit("lusolve(a,b)", setup="from __main__ import lusolve, a, b",number=1) reg = timeit.timeit("solve(a,b)", setup="from __main__ import solve, a, b",number=1) pro = timeit.timeit("pro(a,b)",setup="from __main__ import pro, a, b",number=1) pol = timeit.timeit("psolve(a,b)",setup="from __main__ import psolve, a, b",number=1) print("QR:",qr,"\nLU:",lu,"\nReg:",reg,"\nProgrammed:",pro,"\nPolar:",pol) count = 0 for i in sub: if i > 1e-4: print(i) count += 1 print(count)
mit
-8,539,782,576,811,710,000
25.642857
87
0.664879
false
2.353312
false
false
false
elastic/elasticsearch-dsl-py
tests/test_integration/test_examples/test_completion.py
1
1307
# -*- coding: utf-8 -*- # Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import unicode_literals from .completion import Person def test_person_suggests_on_all_variants_of_name(write_client): Person.init(using=write_client) Person(name="Honza Král", popularity=42).save(refresh=True) s = Person.search().suggest("t", "kra", completion={"field": "suggest"}) response = s.execute() opts = response.suggest.t[0].options assert 1 == len(opts) assert opts[0]._score == 42 assert opts[0]._source.name == "Honza Král"
apache-2.0
6,124,546,491,409,554,000
35.25
76
0.722605
false
3.655462
false
false
false
thagomizer/examples
Collision15/scripts/rails_log_to_csv.py
1
1983
# Copyright 2015 Google # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import re import csv import sys script, logfile, outfile = sys.argv IP_RE = re.compile('(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') TIMESTAMP_RE = re.compile('\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}') STATUS_RE = re.compile('Completed (\d{3})') RESOURCE_RE = re.compile('Started (GET|POST|PUT|DELETE) "(.*?)"') PROCESSING_TIME_RE = re.compile(' (\d+)ms') records = [] with open(logfile, 'r') as f: for line in f: line = line.strip() if not line: continue d = {} d['body'] = line match = IP_RE.search(line) d['ip_address'] = match.group() if match else None match = TIMESTAMP_RE.search(line) d['timestamp'] = match.group() if match else None match = STATUS_RE.search(line) d['status'] = match.group() if match else None match = PROCESSING_TIME_RE.search(line) d['processing_time'] = match.group() if match else None match = RESOURCE_RE.search(line) d['verb'] = match.groups()[0] if match else None d['resource'] = match.groups()[1] if match else None records.append(d) with open(outfile, 'wb') as csvfile: logwriter = csv.writer(csvfile) logwriter.writerow(records[0].keys()) for record in records: logwriter.writerow(record.values())
apache-2.0
-4,016,073,517,099,934,700
32.05
74
0.602622
false
3.553763
false
false
false
dirkjankrijnders/aubio
interfaces/python/test_fft.py
4
3442
from numpy.testing import TestCase, run_module_suite from numpy.testing import assert_equal, assert_almost_equal # WARNING: numpy also has an fft object from aubio import fvec, fft, cvec from numpy import array, shape from math import pi class aubio_fft_test_case(TestCase): def test_members(self): f = fft() assert_equal (f.win_s, 1024) def test_output_dimensions(self): """ check the dimensions of output """ win_s = 1024 timegrain = fvec(win_s) f = fft(win_s) fftgrain = f (timegrain) assert_equal (fftgrain.norm, 0) assert_equal (shape(fftgrain.norm), (win_s/2+1,)) assert_equal (fftgrain.phas, 0) assert_equal (shape(fftgrain.phas), (win_s/2+1,)) def test_zeros(self): """ check the transform of zeros """ win_s = 512 timegrain = fvec(win_s) f = fft(win_s) fftgrain = f(timegrain) assert_equal ( fftgrain.norm == 0, True ) assert_equal ( fftgrain.phas == 0, True ) def test_impulse(self): """ check the transform of one impulse at a random place """ from random import random from math import floor win_s = 256 i = floor(random()*win_s) impulse = pi * random() f = fft(win_s) timegrain = fvec(win_s) timegrain[i] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_almost_equal ( fftgrain.norm, impulse, decimal = 6 ) assert_equal ( fftgrain.phas <= pi, True) assert_equal ( fftgrain.phas >= -pi, True) def test_impulse_negative(self): """ check the transform of one impulse at a random place """ from random import random from math import floor win_s = 256 i = 0 impulse = -10. f = fft(win_s) timegrain = fvec(win_s) timegrain[i] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_almost_equal ( fftgrain.norm, abs(impulse), decimal = 6 ) if impulse < 0: # phase can be pi or -pi, as it is not unwrapped assert_almost_equal ( abs(fftgrain.phas[1:-1]) , pi, decimal = 6 ) assert_almost_equal ( fftgrain.phas[0], pi, decimal = 6) assert_almost_equal ( fftgrain.phas[-1], pi, decimal = 6) else: assert_equal ( fftgrain.phas[1:-1] == 0, True) assert_equal ( fftgrain.phas[0] == 0, True) assert_equal ( fftgrain.phas[-1] == 0, True) # now check the resynthesis synthgrain = f.rdo ( fftgrain ) #self.plot_this ( fftgrain.phas.T ) assert_equal ( fftgrain.phas <= pi, True) assert_equal ( fftgrain.phas >= -pi, True) #self.plot_this ( synthgrain - timegrain ) assert_almost_equal ( synthgrain, timegrain, decimal = 6 ) def test_impulse_at_zero(self): """ check the transform of one impulse at a index 0 """ win_s = 1024 impulse = pi f = fft(win_s) timegrain = fvec(win_s) timegrain[0] = impulse fftgrain = f ( timegrain ) #self.plot_this ( fftgrain.phas ) assert_equal ( fftgrain.phas[0], 0) assert_equal ( fftgrain.phas[1], 0) assert_almost_equal (fftgrain.norm[0], impulse, decimal = 6 ) def test_rdo_before_do(self): """ check running fft.rdo before fft.do works """ win_s = 1024 impulse = pi f = fft(win_s) fftgrain = cvec(win_s) t = f.rdo( fftgrain ) assert_equal ( t, 0 ) def plot_this(self, this): from pylab import plot, show plot ( this ) show () if __name__ == '__main__': from unittest import main main()
gpl-3.0
2,928,332,273,026,703,400
30.290909
72
0.623475
false
2.982669
true
false
false
tsuberi/kid-watch-app
Server/twilio/rest/resources/messages.py
3
5123
from . import InstanceResource, ListResource from .media import MediaList from .util import normalize_dates, parse_date class Message(InstanceResource): """ A Message instance. .. attribute:: sid A 34 character string that uniquely identifies this resource. .. attribute:: account_sid The unique id of the Account that sent or received this message. .. attribute:: from The phone number that initiated this message in E.164 format. For incoming messages, this will be the remote phone. For outgoing messages, this will be one of your Twilio phone numbers. .. attribute:: to The phone number that received the message in E.164 format. For incoming messages, this will be one of your Twilio phone numbers. For outgoing messages, this will be the remote phone. .. attribute:: date_created The date that this resource was created, given in RFC 2822 format. .. attribute:: date_updated The date that this resource was last updated, given in RFC 2822 format. .. attribute:: date_sent The date that the SMS was sent, given in RFC 2822 format. .. attribute:: body The text body of the message, as a unicode string. .. attribute:: num_segments The number of SMS messages used to deliver the body specified. .. attribute:: num_media The number of media that are associated with the message. If num_media is 0, then the media and image subresource will not contain any images. .. attribute:: status The status of this message. Either queued, sending, sent,failed, or received. .. attribute:: direction The direction of this message. inbound for incoming messages, outbound-api for messages initiated via the REST API, outbound-call for messages initiated during a call or outbound-reply for messages initiated in response to an incoming message. .. attribute:: price The amount billed for the message, in the currency associated with the account. .. attribute:: price_unit The currency in which price is measured, in ISO 4127 format (e.g. USD,EUR, JPY). .. attribute:: api_version The version of the Twilio API used to process the message. .. attribute:: uri The URI for this resource, relative to https://api.twilio.com """ subresources = [MediaList] def delete(self): """Delete this Message record from Twilio.""" return self.parent.delete(self.sid) def redact(self): """Redact this Message's `body` field from Twilio while preserving the record itself and related metadata. """ return self.parent.redact(self.sid) class Messages(ListResource): name = "Messages" key = "messages" instance = Message def create(self, from_=None, **kwargs): """ Create and send a Message. :param str to: The destination phone number. :param str `from_`: The phone number sending this message (must be a verified Twilio number) :param str body: The message you want to send, limited to 1600 characters. :param list media_url: A list of URLs of images to include in the message. :param status_callback: A URL that Twilio will POST to when your message is processed. :param str application_sid: The 34 character sid of the application Twilio should use to handle this message. """ kwargs["from"] = from_ return self.create_instance(kwargs) @normalize_dates def list(self, from_=None, before=None, after=None, date_sent=None, **kw): """ Returns a page of :class:`Message` resources as a list. For paging information see :class:`ListResource`. :param to: Only show messages to this phone number. :param from_: Only show messages from this phone number. :param date after: Only list messages sent after this date. :param date before: Only list message sent before this date. :param date date_sent: Only list message sent on this date. :param `from_`: Only show messages from this phone number. :param date after: Only list messages logged after this datetime :param date before: Only list messages logged before this datetime """ kw["From"] = from_ kw["DateSent<"] = before kw["DateSent>"] = after kw["DateSent"] = parse_date(date_sent) return self.get_instances(kw) def update(self, sid, **kwargs): """ Updates the message for the given sid :param sid: The sid of the message to update. """ return self.update_instance(sid, kwargs) def delete(self, sid): """Delete the specified Message record from Twilio.""" return self.delete_instance(sid) def redact(self, sid): """Redact the specified Message record's Body field.""" return self.update_instance(sid, {'Body': ''})
mit
-2,986,672,169,921,871,000
31.630573
79
0.64552
false
4.525618
false
false
false
dmlc/mxnet
benchmark/python/sparse/util.py
19
1451
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os import random def estimate_density(DATA_PATH, feature_size): """sample 10 times of a size of 1000 for estimating the density of the sparse dataset""" if not os.path.exists(DATA_PATH): raise Exception("Data is not there!") density = [] P = 0.01 for _ in range(10): num_non_zero = 0 num_sample = 0 with open(DATA_PATH) as f: for line in f: if (random.random() < P): num_non_zero += len(line.split(" ")) - 1 num_sample += 1 density.append(num_non_zero * 1.0 / (feature_size * num_sample)) return sum(density) / len(density)
apache-2.0
-8,475,303,383,507,789,000
38.216216
92
0.67264
false
4.008287
false
false
false
fkie-cad/FACT_core
src/plugins/analysis/cwe_checker/code/cwe_checker.py
1
5402
''' This plugin implements a wrapper around the BAP plugin cwe_checker, which checks ELF executables for several CWEs (Common Weakness Enumeration). Please refer to cwe_checkers implementation for further information. Please note that these checks are heuristics and the checks are static. This means that there are definitely false positives and false negatives. The objective of this plugin is to find potentially interesting binaries that deserve a deep manual analysis or intensive fuzzing. As the plugin depends on BAP, it depends on BAP's lifting capabilities. Currently, BAP lifts to the following architectures: - Intel x86 (32 and 64 bits) - ARM - PowerPC - Mips ''' import json import logging from collections import defaultdict from common_helper_process import execute_shell_command_get_return_code from helperFunctions.docker import run_docker_container from analysis.PluginBase import AnalysisBasePlugin TIMEOUT_IN_SECONDS = 600 # 10 minutes DOCKER_IMAGE = 'fkiecad/cwe_checker:latest' class AnalysisPlugin(AnalysisBasePlugin): ''' This class implements the FACT Python wrapper for the BAP plugin cwe_checker. ''' NAME = 'cwe_checker' DESCRIPTION = 'This plugin checks ELF binaries for several CWEs (Common Weakness Enumeration) like'\ 'CWE-243 (Creation of chroot Jail Without Changing Working Directory) and'\ 'CWE-676 (Use of Potentially Dangerous Function).'\ 'Due to the nature of static analysis, this plugin may run for a long time.' DEPENDENCIES = ['cpu_architecture', 'file_type'] VERSION = '0.5.0' MIME_WHITELIST = ['application/x-executable', 'application/x-object', 'application/x-sharedlib'] SUPPORTED_ARCHS = ['arm', 'x86', 'x64', 'mips', 'ppc'] def __init__(self, plugin_administrator, config=None, recursive=True, timeout=TIMEOUT_IN_SECONDS + 30): self.config = config if not self._check_docker_installed(): raise RuntimeError('Docker is not installed.') self._log_version_string() super().__init__(plugin_administrator, config=config, plugin_path=__file__, recursive=recursive, timeout=timeout) @staticmethod def _check_docker_installed(): _, return_code = execute_shell_command_get_return_code('docker -v') return return_code == 0 def _log_version_string(self): output = self._run_cwe_checker_to_get_version_string() if output is None: logging.error('Could not get version string from cwe_checker.') else: logging.info('Version is {}'.format(str(output))) return output @staticmethod def _run_cwe_checker_to_get_version_string(): return run_docker_container(DOCKER_IMAGE, timeout=60, command='--version') @staticmethod def _run_cwe_checker_in_docker(file_object): return run_docker_container(DOCKER_IMAGE, timeout=TIMEOUT_IN_SECONDS, command='/input --json --quiet', mount=('/input', file_object.file_path)) @staticmethod def _parse_cwe_checker_output(output): tmp = defaultdict(list) j_doc = json.loads(output) for warning in j_doc: tmp[warning['name']] = tmp[warning['name']] + [warning, ] res = {} for key, values in tmp.items(): tmp_list = [] plugin_version = None for hit in values: tmp_list.append(hit['description']) if not plugin_version: plugin_version = hit['version'] res[key] = {'plugin_version': plugin_version, 'warnings': tmp_list} return res def _is_supported_arch(self, file_object): arch_type = file_object.processed_analysis['file_type']['full'].lower() return any(supported_arch in arch_type for supported_arch in self.SUPPORTED_ARCHS) def _do_full_analysis(self, file_object): output = self._run_cwe_checker_in_docker(file_object) if output is not None: try: cwe_messages = self._parse_cwe_checker_output(output) file_object.processed_analysis[self.NAME] = {'full': cwe_messages, 'summary': list(cwe_messages.keys())} except json.JSONDecodeError: logging.error('cwe_checker execution failed: {}\nUID: {}'.format(output, file_object.uid)) file_object.processed_analysis[self.NAME] = {'summary': []} else: logging.error('Timeout or error during cwe_checker execution.\nUID: {}'.format(file_object.uid)) file_object.processed_analysis[self.NAME] = {'summary': []} return file_object def process_object(self, file_object): ''' This function handles only ELF executables. Otherwise it returns an empty dictionary. It calls the cwe_checker docker container. ''' if not self._is_supported_arch(file_object): logging.debug('{}\'s arch is not supported ({})'.format( file_object.file_path, file_object.processed_analysis['cpu_architecture']['summary'])) file_object.processed_analysis[self.NAME] = {'summary': []} else: file_object = self._do_full_analysis(file_object) return file_object
gpl-3.0
-5,476,560,485,305,494,000
42.564516
121
0.637727
false
4.098634
false
false
false
un33k/problems
paths/iterative_max_product.py
1
1549
import sys import cProfile def maxProductPaths(nXm_matrix): """ Returns the `maximum` path `product` from the top left to the bottom right by moving right and down in a 2D array of size n x m. """ hight = len(nXm_matrix) width = len(nXm_matrix[0]) noop = 1 grid = {} # create a grid system dictionary using key format of (h,w) for h in range(hight): for w in range(width): grid[(h, w)] = nXm_matrix[h][w] # start from bottom-right and move your way up for h in reversed(range(hight)): for w in reversed(range(width)): bottom = grid.get((h + 1, w), noop) right = grid.get((h, w + 1), noop) if not all(x == noop for x in (bottom, right)): grid[(h, w)] *= max(bottom, right) # print a matrix where (0,0) has the total max `product` number for h in range(hight): row = [str(grid[(h,w)]).rjust(5) for w in range(width)] print '[{}]'.format(','.join(row)) # to traverse the the minimal product path, start from (0,0) and # work your way down. (take-home exercise) count = grid[(0,0)] return count def run_test(): """ Test function. """ d2_matrix = [ [1,6,1], [3,7,2], [5,0,8], [1,6,1], ] count = maxProductPaths(d2_matrix) print "---------------------" print "Maximum Path Product = {}\n".format(count) if __name__ == "__main__": """ Run the code and profile it. """ cProfile.run('run_test()')
bsd-2-clause
-7,724,922,176,417,361,000
24.393443
72
0.537766
false
3.338362
false
false
false
Hybrid-Cloud/cinder
cinder/volume/drivers/nexenta/jsonrpc.py
9
2851
# Copyright 2016 Nexenta Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_serialization import jsonutils import requests from cinder import exception from cinder.utils import retry LOG = logging.getLogger(__name__) TIMEOUT = 60 class NexentaJSONProxy(object): retry_exc_tuple = (requests.exceptions.ConnectionError,) def __init__(self, scheme, host, port, path, user, password, auto=False, obj=None, method=None, session=None): if session: self.session = session else: self.session = requests.Session() self.session.auth = (user, password) self.session.headers.update({'Content-Type': 'application/json'}) self.scheme = scheme.lower() self.host = host self.port = port self.path = path self.user = user self.password = password self.auto = auto self.obj = obj self.method = method def __getattr__(self, name): if not self.obj: obj, method = name, None elif not self.method: obj, method = self.obj, name else: obj, method = '%s.%s' % (self.obj, self.method), name return NexentaJSONProxy(self.scheme, self.host, self.port, self.path, self.user, self.password, self.auto, obj, method, self.session) @property def url(self): return '%s://%s:%s%s' % (self.scheme, self.host, self.port, self.path) def __hash__(self): return self.url.__hash__() def __repr__(self): return 'NMS proxy: %s' % self.url @retry(retry_exc_tuple, retries=6) def __call__(self, *args): data = jsonutils.dumps({ 'object': self.obj, 'method': self.method, 'params': args }) LOG.debug('Sending JSON data: %s', data) r = self.session.post(self.url, data=data, timeout=TIMEOUT) response = r.json() LOG.debug('Got response: %s', response) if response.get('error') is not None: message = response['error'].get('message', '') raise exception.NexentaException(message) return response.get('result')
apache-2.0
6,879,426,092,511,571,000
32.151163
78
0.600842
false
4.015493
false
false
false
pfont/www.pfont.ir
app/utilis/sms.py
1
1807
from suds.client import Client as SoapClient from lxml import objectify #import logging #logging.basicConfig(level=logging.INFO) #logging.getLogger('suds.client').setLevel(logging.DEBUG) #print(client) class SMS(object): def __init__(self, app=None): self.sender = None self.soapservice = None self.user = None self.password = None self.template = '<xmsrequest>' + \ '<userid>{userid}</userid>' + \ '<password>{password}</password>' + \ '<action>smssend</action>' + '<body>' + \ '<type>otm</type>' + \ '<message originator="{sender}">{message}</message>' + \ '<recipient>{number}</recipient>' + \ '</xmsrequest>' if app is not None: self.init_app(app) def init_app(self, app): """Set up this instance for use with *app*, if no app was passed to the constructor. """ self.sender = app.config['SMS_SENDER'] self.soapservice = app.config['SMS_SOAP_SERVICE'] self.user = app.config['SMS_USERNAME'] self.password = app.config['SMS_PASSWORD'] def send(self, number, message): client = SoapClient(self.soapservice) response = client.service.XmsRequest( self.template.format(userid=self.user, password=self.password, sender=self.sender, message=message, number=number) ) try: response = objectify(response) return str(response), response.get('mobile', None) except: return 'error', response
gpl-2.0
-3,489,350,214,241,698,300
33.75
80
0.515219
false
4.428922
false
false
false
AntoineLee/spider163
spider163/spider/music.py
1
3493
#!/usr/bin/env python # -*- coding: utf-8 -*- import requests import json from bs4 import BeautifulSoup import settings as uapi from spider163 import settings from spider163.utils import pysql from spider163.utils import pylog from terminaltables import AsciiTable class Music: def __init__(self): self.__headers = uapi.header self.__url = uapi.music_url self.session = settings.Session() def views_capture(self): urls = self.session.query(pysql.Playlist163).filter(pysql.Playlist163.over == 'N').limit(10) for url in urls: print("正在抓取歌单《{}》的歌曲……".format(url.title.encode("utf-8"))) self.view_capture(url.link) for url in urls: self.session.query(pysql.Playlist163).filter(pysql.Playlist163.link == url.link).update({'over': 'Y'}) self.session.commit() return urls.count() def view_capture(self, link): self.session.query(pysql.Playlist163).filter(pysql.Playlist163.link == link).update({'over': 'Y'}) url = self.__url + str(link) s = requests.session() try: s = BeautifulSoup(s.get(url, headers=self.__headers).content, "html.parser") musics = json.loads(s.text)['result']['tracks'] exist = 0 for music in musics: name = music['name'].encode('utf-8') author = music['artists'][0]['name'].encode('utf-8') if pysql.single("music163", "song_id", (music['id'])) == True: self.session.add(pysql.Music163(song_id=music['id'],song_name=name,author=author)) self.session.commit() exist = exist + 1 else: pylog.log.info('{} : {} {}'.format("重复抓取歌曲", name, "取消持久化")) print("歌单包含歌曲 {} 首,数据库 merge 歌曲 {} 首 \r\n".format(len(musics), exist)) except Exception: pylog.log.error('{} : {}'.format("抓取歌单页面存在问题", url)) def get_playlist(self, playlist_id): self.view_capture(int(playlist_id)) url = uapi.playlist_api.format(playlist_id) s = requests.session() s = BeautifulSoup(s.get(url, headers=self.__headers).content, "html.parser") playlist = json.loads(s.text)['result'] print("《" + playlist['name'].encode('utf-8') + "》") author = playlist['creator']['nickname'].encode('utf-8') pc = str(playlist['playCount']) sc = str(playlist['subscribedCount']) rc = str(playlist['shareCount']) cc = str(playlist['commentCount']) print("维护者:{} 播放:{} 关注:{} 分享:{} 评论:{}".format(author, pc, sc, rc, cc)) print("描述:{}".format(playlist['description'].encode('utf-8'))) print("标签:{}".format(",".join(playlist['tags']).encode("utf-8"))) tb = [["ID", "歌曲名字", "艺术家", "唱片"]] for music in playlist['tracks']: artists = [] for s in music['artists']: artists.append(s['name']) ms = music['name'].encode("utf-8") ar = ",".join(artists).encode("utf-8") ab = music['album']['name'].encode("utf-8") id = music['id'] tb.append([id, ms, ar, ab]) print(AsciiTable(tb).table) if __name__ == "__main__": tmp = Music() print tmp.views_capture()
mit
-6,229,999,756,993,117,000
38.211765
114
0.553255
false
3.296736
false
false
false
aplicatii-romanesti/allinclusive-kodi-pi
.kodi/addons/plugin.video.1channel/waldo/indexes/1Channel_index.py
3
8581
import os import re import sys import urllib2 import HTMLParser import xbmcgui import xbmcplugin from t0mm0.common.addon import Addon from t0mm0.common.addon import Addon as Addon2 addon = Addon('plugin.video.waldo', sys.argv) _1CH = Addon2('plugin.video.1channel', sys.argv) #BASE_Address = 'www.primewire.ag' BASE_Address = _1CH.get_setting('domain').replace('http://','') if (_1CH.get_setting("enableDomain")=='true') and (len(_1CH.get_setting("customDomain")) > 10): BASE_Address=_1CH.get_setting("customDomain").replace('http://','') if not BASE_Address.startswith('http'): BASE_URL = 'http://'+BASE_Address display_name = 'PrimeWire'#'1Channel' #Label that will be displayed to the user representing this index tag = 'PrimeWire'#'1Channel' #MUST be implemented. Unique 3 or 4 character string that will be used to #identify this index required_addons = [] #MUST be implemented. A list of strings indicating which addons are required to #be installed for this index to be used. #For example: required_addons = ['script.module.beautifulsoup', 'plugin.video.youtube'] #Currently, xbmc does not provide a way to require a specific version of an addon def get_settings_xml(): """ Must be defined. This method should return XML which describes any Waldo specific settings you would like for your plugin. You should make sure that the ``id`` starts with your tag followed by an underscore. For example: xml = '<setting id="ExI_priority" ' xml += 'type="number" label="Priority" default="100"/>\\n' return xml The settings category will be your plugin's :attr:`display_name`. Returns: A string containing XML which would be valid in ``resources/settings.xml`` or boolean False if none are required """ return False def get_browsing_options():#MUST be defined """ Returns a list of dicts. Each dict represents a different method of browsing this index. The following keys MUST be provided: 'name': Label to display to the user to represent this browsing method 'function': A function (defined in this index) which will be executed when the user selects this browsing method. This function should describe and add the list items to the directory, and assume flow control from this point on. Once the user indicates the content they would like to search the providers for (usually via selecting a list item), plugin.video.waldo should be called with the following parameters (again usually via listitem): mode = 'GetAllResults' type = either 'movie', 'tvshow', 'season', or 'episode' title = The title string to look for year = The release year of the desired movie, or premiere date of the desired tv show. imdb = The imdb id of the movie or tvshow to find sources for tvdb = The tvdb id of the movie or tvshow to find sources for season = The season number for which to return results. If season is supplied, but not episode, all results for that season should be returned episode: The episode number for which to return results """ option_1 = {'name': 'Tv Shows', 'function': 'BrowseListMenu', 'kwargs': {'section': 'tv'}} option_2 = {'name': 'Movies', 'function': 'BrowseListMenu', 'kwargs': {'section': 'movies'}} return [option_1, option_2] def callback(params): """ MUST be implemented. This method will be called when the user selects a listitem you created. It will be passed a dict of parameters you passed to the listitem's url. For example, the following listitem url: plugin://plugin.video.waldo/?mode=main&section=tv&api_key=1234 Will call this function with: {'mode':'main', 'section':'tv', 'api_key':'1234'} """ try: addon.log('%s was called with the following parameters: %s' % (params.get('receiver', ''), params)) except: pass sort_by = params.get('sort', None) section = params.get('section') if sort_by: GetFilteredResults(section, sort=sort_by) def BrowseListMenu(section): #This must match the 'function' key of an option from get_browsing_options addon.add_directory({'section': section, 'sort': 'featured'}, {'title': 'Featured'}, img=art('featured.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'views'}, {'title': 'Most Popular'}, img=art('most_popular.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'ratings'}, {'title': 'Highly rated'}, img=art('highly_rated.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'release'}, {'title': 'Date released'}, img=art('date_released.png'), fanart=art('fanart.png')) addon.add_directory({'section': section, 'sort': 'date'}, {'title': 'Date added'}, img=art('date_added.png'), fanart=art('fanart.png')) addon.end_of_directory() def art(filename): adn = Addon('plugin.video.1channel', sys.argv) THEME_LIST = ['mikey1234', 'Glossy_Black', 'PrimeWire'] THEME = THEME_LIST[int(adn.get_setting('theme'))] THEME_PATH = os.path.join(adn.get_path(), 'art', 'themes', THEME) img = os.path.join(THEME_PATH, filename) return img def GetFilteredResults(section=None, genre=None, letter=None, sort='alphabet', page=None): #3000 try: addon.log('Filtered results for Section: %s Genre: %s Letter: %s Sort: %s Page: %s' % (section, genre, letter, sort, page)) except: pass pageurl = BASE_URL + '/?' if section == 'tv': pageurl += 'tv' if genre: pageurl += '&genre=' + genre if letter: pageurl += '&letter=' + letter if sort: pageurl += '&sort=' + sort if page: pageurl += '&page=%s' % page if page: page = int(page) + 1 else: page = 2 html = GetURL(pageurl) r = re.search('number_movies_result">([0-9,]+)', html) if r: total = int(r.group(1).replace(',', '')) else: total = 0 total_pages = total / 24 total = min(total, 24) r = 'class="index_item.+?href="(.+?)" title="Watch (.+?)"?\(?([0-9]{4})?\)?"?>.+?src="(.+?)"' regex = re.finditer(r, html, re.DOTALL) resurls = [] for s in regex: resurl, title, year, thumb = s.groups() if resurl not in resurls: resurls.append(resurl) li_title = '%s (%s)' % (title, year) li = xbmcgui.ListItem(li_title, iconImage=thumb, thumbnailImage=thumb) if section == 'tv': section = 'tvshow' else: section = 'movie' queries = {'waldo_mode': 'GetAllResults', 'title': title, 'vid_type': section} li_url = addon.build_plugin_url(queries) xbmcplugin.addDirectoryItem(int(sys.argv[1]), li_url, li, isFolder=True, totalItems=total) if html.find('> >> <') > -1: label = 'Skip to Page...' command = addon.build_plugin_url( {'mode': 'PageSelect', 'pages': total_pages, 'section': section, 'genre': genre, 'letter': letter, 'sort': sort}) command = 'RunPlugin(%s)' % command cm = [(label, command)] meta = {'title': 'Next Page >>'} addon.add_directory( {'mode': 'CallModule', 'receiver': 'PrimeWire', 'ind_path': os.path.dirname(__file__), 'section': section, 'genre': genre, 'letter': letter, 'sort': sort, 'page': page}, meta, cm, True, art('nextpage.png'), art('fanart.png'), is_folder=True) addon.end_of_directory() def GetURL(url, params=None, referrer=BASE_URL): try: addon.log('Fetching URL: %s' % url) except: pass USER_AGENT = 'User-Agent:Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1312.56' if params: req = urllib2.Request(url, params) else: req = urllib2.Request(url) req.add_header('User-Agent', USER_AGENT) req.add_header('Host', BASE_Address) #'www.primewire.ag' req.add_header('Referer', referrer) try: response = urllib2.urlopen(req, timeout=10) body = response.read() body = unicode(body, 'iso-8859-1') h = HTMLParser.HTMLParser() body = h.unescape(body) except Exception, e: try: addon.log('Failed to connect to %s: %s' % (url, e)) except: pass return '' return body.encode('utf-8')
apache-2.0
-3,277,716,678,351,474,000
39.866667
132
0.623121
false
3.668662
false
false
false
0x0mar/phpsploit
plugins/file_system/ls/plugin.py
1
3708
"""List directory contents SYNOPSIS: ls [<REMOTE PATH>] ... DESCRIPTION: List the files in given remote directory path - If the given element is not an accessible directory, the payload automatically considers the path's basename as a regex pattern, it allows to list files which match a specific pattern only, for example: "ls /tmp/*.txt", will list only .txt files. - Ending the argument string with a path separator (for example, '/tmp/' instead of '/tmp') explicitly indicates that the given path is the exact directory location you want to list, so it disables the pattern feature mentionned above. NOTE: If the plugin receives multiple arguments, each one will be listed in the given order. WARNING: The 'ls' plugin gives permission informations about each listed file, in unix drwxrwxrwx mode. If the permission informations are not available, then the payload tries to provide basic permission informations in drwx mode, which indicates the file rights relative to the current user. EXAMPLES: > ls - List any element in the current directory > ls ~ - List any element in the user's home directory > ls .. /home - List the path above the current working directory - After that, list the '/home' directory. > ls D:\\*.ini - List any element in D:\\ whose names end with '.ini' MAINTAINERS: nil0x42 <http://goo.gl/kb2wf> Wannes Rombouts <https://github.com/wapiflapi> """ import sys from ui.color import colorize, decolorize from api import plugin from api import server from api import environ for path in plugin.argv[1:] or [environ['PWD']]: absolute_path = server.path.abspath(path) lister = server.payload.Payload("payload.php") lister['TARGET'] = absolute_path lister['SEPARATOR'] = "/" lister['PARSE'] = 1 if absolute_path == environ['HOME'] or path.endswith(environ['PATH_SEP']): lister['PARSE'] = 0 try: response = lister.send() except server.payload.PayloadError as e: if e.args[0] == 'nodir': sys.exit("cannot access %s: No such file or directory." % (path)) if e.args[0] == 'noright': sys.exit("cannot open %s: Permission denied." % (path)) if e.args[0] == 'nomatch': sys.exit("cannot find %s: No matching elements." % (path)) target, regex, lines = response[0], response[1], response[2] # if at least one owner/group is not '?', use unix-like formatter if any((x[2] + x[3]) != '??' for x in lines): rows_hdr = ["Mode", "Owner", "Group", "Size", "Last Modified", "Name"] rows = ([l[0], l[2], l[3], l[4], l[5], l[6]] for l in lines) # otherwise, use windows-like formatter else: rows_hdr = ["Mode", "Size", "Last Modified", "Name"] rows = ([x[1], x[4], x[5], x[6]] for x in lines) # format rows the right way rows = sorted(rows, key=(lambda elem: elem[-1])) rows.insert(0, rows_hdr) rows.insert(1, [("-" * len(elem)) for elem in rows_hdr]) # format and display output title header = "Listing: %s" % target if regex: header += " (matching r'%s')" % colorize("%White", regex) print("\n" + header + "\n" + ("=" * len(decolorize(header))) + "\n") widths = [max(map(len, col)) for col in zip(*rows)] for i, row in enumerate(rows): if i > 0: if row[0].startswith('d'): row[-1] = colorize("%BoldBlue", row[-1]) elif not row[0].startswith('-'): row[-1] = colorize("%BoldPink", row[-1]) print(" ".join((val.ljust(width) for val, width in zip(row, widths)))) print()
gpl-3.0
4,447,813,382,959,210,000
33.654206
79
0.617044
false
3.667656
false
false
false
tjcsl/director
web3/apps/docs/urls.py
1
1025
from django.conf.urls import url from . import views urlpatterns = [ url(r"^$", views.index_view, name="docs_home"), url(r"new/$", views.new_article_view, name="new_article"), url(r"^list/$", views.list_articles_view, name="list_articles"), url(r"(?P<article_slug>[\w-]+)/$", views.read_article_view, name="read_article"), url(r"(?P<article_slug>[\w-]+)/save$", views.save_view, name="save_article"), url(r"(?P<article_slug>[\w-]+)/save_revision$", views.save_history_view, name="save_history_article"), url(r"(?P<article_slug>[\w-]+)/publish$", views.publish_view, name="publish_article"), url(r"(?P<article_slug>[\w-]+)/unpublish$", views.unpublish_view, name="unpublish_article"), url(r"(?P<article_slug>[\w-]+)/history$", views.article_history_view, name="article_history"), url(r"(?P<article_slug>[\w-]+)/edit$", views.edit_article_view, name="edit_article"), url(r"(?P<article_slug>[\w-]+)/r/(?P<revision_id>\d+)$", views.read_article_view, name="read_article_revision") ]
mit
1,922,998,732,607,338,200
59.294118
115
0.64
false
3.059701
false
true
false
NiJeLorg/paratransit_api
paratransit/api/migrations/0011_dropoff_locations_pickup_locations.py
1
1759
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-11 19:49 from __future__ import unicode_literals import django.contrib.gis.db.models.fields from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('api', '0010_auto_20170502_1511'), ] operations = [ migrations.CreateModel( name='dropoff_locations', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tripid', models.BigIntegerField(blank=True, default=0, null=True)), ('p_lat', models.FloatField(blank=True, default=0, null=True)), ('p_lng', models.FloatField(blank=True, default=0, null=True)), ('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)), ('trip', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='api.trips')), ], ), migrations.CreateModel( name='pickup_locations', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tripid', models.BigIntegerField(blank=True, default=0, null=True)), ('p_lat', models.FloatField(blank=True, default=0, null=True)), ('p_lng', models.FloatField(blank=True, default=0, null=True)), ('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)), ('trip', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='api.trips')), ], ), ]
mit
-2,889,880,485,835,931,000
44.102564
114
0.599204
false
3.865934
false
false
false
aricaldeira/PySPED
pysped/cte/leiaute/modais_300.py
1
26661
# -*- coding: utf-8 -*- # # PySPED - Python libraries to deal with Brazil's SPED Project # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira at tauga.com.br> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # PySPED - Bibliotecas Python para o # SPED - Sistema Público de Escrituração Digital # # Copyright (C) 2010-2012 # Copyright (C) Aristides Caldeira <aristides.caldeira arroba tauga.com.br> # # Este programa é um software livre: você pode redistribuir e/ou modificar # este programa sob os termos da licença GNU Affero General Public License, # publicada pela Free Software Foundation, em sua versão 3 ou, de acordo # com sua opção, qualquer versão posterior. # # Este programa é distribuido na esperança de que venha a ser útil, # porém SEM QUAISQUER GARANTIAS, nem mesmo a garantia implícita de # COMERCIABILIDADE ou ADEQUAÇÃO A UMA FINALIDADE ESPECÍFICA. Veja a # GNU Affero General Public License para mais detalhes. # # Você deve ter recebido uma cópia da GNU Affero General Public License # juntamente com este programa. Caso esse não seja o caso, acesse: # <http://www.gnu.org/licenses/> # from __future__ import (division, print_function, unicode_literals, absolute_import) from builtins import str from pysped.xml_sped import * from pysped.cte.leiaute import ESQUEMA_ATUAL_VERSAO_300 as ESQUEMA_ATUAL import os DIRNAME = os.path.dirname(__file__) class EmiOcc(XMLNFe): def __init__(self): super(EmiOcc, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF', tamanho=[ 2, 2] , raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.fone = TagInteiro(nome='fone', tamanho=[ 6, 14], raiz='//emiOcc', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<emiOcc>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.UF.xml xml += self.fone.xml xml += '</emiOcc>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CNPJ.xml = arquivo self.cInt.xml = arquivo self.IE.xml = arquivo self.UF.xml = arquivo self.fone.xml = arquivo xml = property(get_xml, set_xml) class Occ(XMLNFe): def __init__(self): super(Occ, self).__init__() self.serie = TagCaracter(nome='serie' , tamanho=[ 8, 8, 8], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOcc = TagInteiro(nome='nOcc' , tamanho=[ 1, 6], raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dEmi = TagData(nome='dEmi', raiz='//occ', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.emiOcc = EmiOcc() def get_xml(self): if not (self.nOcc.valor or self.dEmi.valor or self.emiOcc is not None): return '' xml = XMLNFe.get_xml(self) xml += '<occ>' xml += self.serie.xml xml += self.nOcc.xml xml += self.dEmi.xml xml += self.emiOcc.xml xml += '</occ>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.serie.xml = arquivo self.nOcc.xml = arquivo self.dEmi.xml = arquivo self.emiOcc.xml = arquivo xml = property(get_xml, set_xml) class Rodo(XMLNFe): def __init__(self): super(Rodo, self).__init__() self.RNTRC = TagCaracter(nome='RNTRC' , tamanho=[ 8, 8, 8], raiz='//rodo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.occ = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<rodo>' xml += self.RNTRC.xml for o in self.occ: xml += o.xml xml += '</rodo>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.RNTRC.xml = arquivo self.occ = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/rodo/occ', Occ, sigla_ns='cte') xml = property(get_xml, set_xml) class InfTotAP(XMLNFe): def __init__(self): super(InfTotAP, self).__init__() self.qTotProd = TagCaracter(nome='qTotProd' , tamanho=[ 1, 1, 1], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.uniAP = TagCaracter(nome='uniAP' , tamanho=[ 1, 4], raiz='//infTotAP', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<infTotAP>' xml += self.qTotProd.xml xml += self.uniAP.xml xml += '</infTotAP>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.qTotProd.xml = arquivo self.uniAP.xml = arquivo xml = property(get_xml, set_xml) class Peri(XMLNFe): def __init__(self): super(Peri, self).__init__() self.nONU = TagCaracter(nome='nONU' , tamanho=[ 4, 4, 4], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.qTotEmb = TagCaracter(nome='qTotEmb' , tamanho=[ 1, 20], raiz='//peri', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.infTotAP = InfTotAP() def get_xml(self): if not (self.nONU.valor or self.qTotEmb.valor or self.infTotAP is not None): return '' xml = XMLNFe.get_xml(self) xml += '<peri>' xml += self.nONU.xml xml += self.qTotEmb.xml xml += self.infTotAP.xml xml += '</peri>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nONU.xml = arquivo self.qTotEmb.xml = arquivo self.infTotAP.xml = arquivo xml = property(get_xml, set_xml) class Tarifa(XMLNFe): def __init__(self): super(Tarifa, self).__init__() self.CL = TagCaracter(nome='CL' , tamanho=[ 1, 1, 1], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.cTar = TagCaracter(nome='cTar' , tamanho=[ 1, 4], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.vTar = TagDecimal(nome='vTar', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//tarifa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<tarifa>' xml += self.CL.xml xml += self.cTar.xml xml += self.vTar.xml xml += '</tarifa>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CL.xml = arquivo self.cTar.xml = arquivo self.vTar.xml = arquivo xml = property(get_xml, set_xml) class TagCInfManu(TagCaracter): def __init__(self, *args, **kwargs): super(TagCInfManu, self).__init__(*args, **kwargs) self.nome = 'cInfMan' self.tamanho = [2, 2] self.raiz = '//natCarga' class NatCarga(XMLNFe): def __init__(self): super(NatCarga, self).__init__() self.xDime = TagCaracter(nome='xDime' , tamanho=[ 5, 14], raiz='//natCarga', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInfManu = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<natCarga>' xml += self.xDime.xml for c in self.cInfManu: xml += c.xml xml += '</natCarga>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xDime.xml = arquivo self.cInfManu = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/natCarga/cInfMan', TagCInfManu, sigla_ns='cte') xml = property(get_xml, set_xml) class Aereo(XMLNFe): def __init__(self): super(Aereo, self).__init__() self.nMinu = TagInteiro(nome='nMin' , tamanho=[ 9, 9, 9], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.nOCA = TagInteiro(nome='nOCA' , tamanho=[ 11, 11, 11], raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dPrevAereo = TagData(nome='dPrevAereo' , raiz='//aereo', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.natCarga = NatCarga() self.tarifa = Tarifa() self.peri = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<aereo>' xml += self.nMinu.xml xml += self.nOCA.xml xml += self.dPrevAereo.xml xml += self.natCarga.xml xml += self.tarifa.xml for p in self.peri: xml += p.xml xml += '</aereo>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nMinu.xml = arquivo self.nOCA.xml = arquivo self.dPrevAereo.xml = arquivo self.natCarga.xml = arquivo self.tarifa.xml = arquivo self.peri = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aereo/peri', Peri, sigla_ns='cte') xml = property(get_xml, set_xml) class InfNFeAquav(XMLNFe): def __init__(self): super(InfNFeAquav, self).__init__() self.chave = TagCaracter(nome='chave', tamanho=[ 44, 44], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', obrigatorio=False) def get_xml(self): if not (self.chave.valor): return '' xml = XMLNFe.get_xml(self) xml += '<infNF>' xml += self.chave.xml xml += self.unidRat.xml xml += '</infNF>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.chave.xml = arquivo self.unidRat.xml = arquivo xml = property(get_xml, set_xml) class InfNFAquav(XMLNFe): def __init__(self): super(InfNFAquav, self).__init__() self.serie = TagCaracter(nome='serie', tamanho=[ 1, 3], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nDoc = TagCaracter(nome='nDoc', tamanho=[ 1, 20], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.unidRat = TagDecimal(nome='unidRat', tamanho=[1, 3, 1], decimais=[0, 2, 2], raiz='//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', obrigatorio=False) def get_xml(self): if not (self.serie.valor or self.nDoc.valor): return '' xml = XMLNFe.get_xml(self) xml += '<infNF>' xml += self.serie.xml xml += self.nDoc.xml xml += self.unidRat.xml xml += '</infNF>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.serie.xml = arquivo self.nDoc.xml = arquivo self.unidRat.xml = arquivo xml = property(get_xml, set_xml) class InfDocAquav(XMLNFe): def __init__(self): super(InfDocAquav, self).__init__() self.infNF = [] self.infNFe = [] def get_xml(self): if (len(self.infNF)==0 and len(self.infNFe)==0): return '' xml = XMLNFe.get_xml(self) xml += '<infDoc>' for inf in self.infNF: xml += inf.xml for infe in self.infNFe: xml += infe.xml xml += '</infDoc>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.infNF = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNF', InfNFAquav, sigla_ns='cte') self.infNFe = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/infDoc/infNFe', InfNFeAquav, sigla_ns='cte') xml = property(get_xml, set_xml) class Lacre(XMLNFe): def __init__(self): super(Lacre, self).__init__() self.nLacre = TagCaracter(nome='nLacre' , tamanho=[ 1, 20], raiz='//lacre', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.nLacre.valor): return '' xml = XMLNFe.get_xml(self) xml += '<lacre>' xml += self.nLacre.xml xml += '</lacre>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nLacre.xml = arquivo xml = property(get_xml, set_xml) class DetCont(XMLNFe): def __init__(self): super(DetCont, self).__init__() self.nCont = TagCaracter(nome='xBalsa' , tamanho=[ 1, 20], raiz='//detCont', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.lacre = [] ##Evitar conflito de nome com InfDoc self.infDoc = InfDocAquav() def get_xml(self): if not (self.nCont.valor): return '' xml = XMLNFe.get_xml(self) xml += '<detCont>' xml += self.nCont.xml for l in self.lacre: xml += l.xml xml += self.infDoc.xml xml += '</detCont>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.nCont.xml = arquivo self.infDoc.xml = arquivo self.lacre = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont/lacre', Lacre, sigla_ns='cte') xml = property(get_xml, set_xml) class Balsa(XMLNFe): def __init__(self): super(Balsa, self).__init__() self.xBalsa = TagCaracter(nome='xBalsa' , tamanho=[ 1, 60], raiz='//balsa', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.xBalsa.valor): return '' xml = XMLNFe.get_xml(self) xml += '<balsa>' xml += self.xBalsa.xml xml += '</balsa>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xBalsa.xml = arquivo xml = property(get_xml, set_xml) class Aquav(XMLNFe): def __init__(self): super(Aquav, self).__init__() self.vPrest = TagDecimal(nome='vPrest', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vAFRMM = TagDecimal(nome='vAFRMM', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xNavio = TagCaracter(nome='xNavio' , tamanho=[1, 60], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.balsa = [] self.nViag = TagInteiro(nome='nViag', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.direc = TagCaracter(nome='direc', tamanho=[1, 1, 1], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.irin = TagCaracter(nome='irin', tamanho=[1, 10], raiz='//aquav', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.detCont = [] def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<aquav>' xml += self.vPrest.xml xml += self.vAFRMM.xml xml += self.xNavio.xml for b in self.balsa: xml += b.xml for d in self.detCont: xml += d.xml xml += self.nViag.xml xml += self.direc.xml xml += self.irin.xml xml += '</aquav>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.vPrest.xml = arquivo self.vAFRMM.xml = arquivo self.xNavio.xml = arquivo self.nViag.xml = arquivo self.direc.xml = arquivo self.irin.xml = arquivo self.balsa = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/balsa', Balsa, sigla_ns='cte') self.detCont = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/aquav/detCont', DetCont, sigla_ns='cte') xml = property(get_xml, set_xml) class EnderFerro(XMLNFe): def __init__(self): super(EnderFerro, self).__init__() self.xLgr = TagCaracter(nome='xLgr' , tamanho=[ 2, 255] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nro = TagCaracter(nome='nro' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xCpl = TagCaracter(nome='xCpl' , tamanho=[ 1, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xBairro = TagCaracter(nome='xBairro', tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cMun = TagInteiro(nome='cMun' , tamanho=[ 7, 7, 7], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.xMun = TagCaracter(nome='xMun' , tamanho=[ 2, 60] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CEP = TagCaracter(nome='CEP' , tamanho=[ 8, 8, 8], raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.UF = TagCaracter(nome='UF' , tamanho=[ 2, 2] , raiz='//enderFerro', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<enderFerro>' xml += self.xLgr.xml xml += self.nro.xml xml += self.xCpl.xml xml += self.xBairro.xml xml += self.cMun.xml xml += self.xMun.xml xml += self.CEP.xml xml += self.UF.xml xml += '</enderFerro>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xLgr.xml = arquivo self.nro.xml = arquivo self.xCpl.xml = arquivo self.xBairro.xml = arquivo self.cMun.xml = arquivo self.xMun.xml = arquivo self.CEP.xml = arquivo self.UF.xml = arquivo xml = property(get_xml, set_xml) class FerroEnv(XMLNFe): def __init__(self): super(FerroEnv, self).__init__() self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.cInt = TagCaracter(nome='cInt', tamanho=[ 1, 10], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.IE = TagCaracter(nome='IE', tamanho=[ 2, 14], raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.xNome = TagCaracter(nome='xNome', tamanho=[ 2, 60] , raiz='//ferroEnv', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.enderFerro = EnderFerro() def get_xml(self): if not (self.CNPJ.valor): return '' xml = XMLNFe.get_xml(self) xml += '<ferroEnv>' xml += self.CNPJ.xml xml += self.cInt.xml xml += self.IE.xml xml += self.xNome.xml xml += self.enderFerro.xml xml += '</ferroEnv>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.CNPJ.xml = arquivo self.cInt.xml = arquivo self.IE.xml = arquivo self.xNome.xml = arquivo self.enderFerro.xml = arquivo xml = property(get_xml, set_xml) class TrafMut(XMLNFe): def __init__(self): super(TrafMut, self).__init__() self.respFat = TagInteiro(nome='respFat' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.ferrEmi = TagInteiro(nome='ferrEmi' , tamanho=[ 1, 1, 1], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.vFrete = TagDecimal(nome='vFrete', tamanho=[1, 13, 1], decimais=[0, 2, 2], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.chCTeFerroOrigem = TagCaracter(nome='chCTeFerroOrigem' , tamanho=[ 44, 44], raiz='//trafMut', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.ferroEnv = [] def get_xml(self): if not (self.respFat.valor or self.ferrEmi.valor): return '' xml = XMLNFe.get_xml(self) xml += '<trafMut>' xml += self.respFat.xml xml += self.ferrEmi.xml xml += self.vFrete.xml xml += self.chCTeFerroOrigem.xml for f in self.ferroEnv: xml += f.xml xml += '</trafMut>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.respFat.xml = arquivo self.ferrEmi.xml = arquivo self.vFrete.xml = arquivo self.chCTeFerroOrigem.xml = arquivo self.ferroEnv = self.le_grupo('//CTe/infCte/infCTeNorm/infModal/ferrov/trafMut/ferroEnv', FerroEnv, sigla_ns='cte') xml = property(get_xml, set_xml) class Ferrov(XMLNFe): def __init__(self): super(Ferrov, self).__init__() self.tpTraf = TagInteiro(nome='tpTraf', tamanho=[1, 1, 1], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.trafMut = TrafMut() self.fluxo = TagCaracter(nome='fluxo', tamanho=[ 1, 10], raiz='//ferrov', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<ferrov>' xml += self.tpTraf.xml xml += self.trafMut.xml xml += self.fluxo.xml xml += '</ferrov>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.tpTraf.xml = arquivo self.trafMut.xml = arquivo self.fluxo.xml = arquivo xml = property(get_xml, set_xml) class Duto(XMLNFe): def __init__(self): super(Duto, self).__init__() self.vTar = TagDecimal(nome='vTar', tamanho=[1, 9, 1], decimais=[0, 6, 6], raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) self.dIni = TagData(nome='dIni', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.dFim = TagData(nome='dFim', raiz='//duto', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<duto>' xml += self.vTar.xml xml += self.dIni.xml xml += self.dFim.xml xml += '</duto>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.vTar.xml = arquivo self.dIni.xml = arquivo self.dFim.xml = arquivo xml = property(get_xml, set_xml) class InfSeg(XMLNFe): def __init__(self): super(Seg, self).__init__() self.xSeg = TagCaracter(nome='xSeg', tamanho=[1, 30], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.CNPJ = TagCaracter(nome='CNPJ', tamanho=[ 0, 14], raiz='//infSeg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False, obrigatorio=False) def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<infSeg>' xml += self.xSeg.xml xml += self.CNPJ.xml xml += '</infSeg>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.xSeg.xml = arquivo self.CNPJ.xml = arquivo xml = property(get_xml, set_xml) class Seg(XMLNFe): def __init__(self): super(Seg, self).__init__() self.infSeg = InfSeg() self.nApol = TagCaracter(nome='nApol', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.nAver = TagCaracter(nome='nAver', tamanho=[1, 20], raiz='//seg', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) def get_xml(self): if not (self.nApol.valor or self.infSeg is not None): return '' xml = XMLNFe.get_xml(self) xml += '<seg>' xml += self.infSeg.xml xml += self.nApol.xml xml += self.nAver.xml xml += '</seg>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.infSeg.xml = arquivo self.nApol.xml = arquivo self.nAver.xml = arquivo xml = property(get_xml, set_xml) class Multimodal(XMLNFe): def __init__(self): super(Multimodal, self).__init__() self.COTM = TagCaracter(nome='COTM', tamanho=[1, 20], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.indNegociavel = TagInteiro(nome='indNegociavel', tamanho=[1, 1, 1], raiz='//multimodal', namespace=NAMESPACE_CTE, namespace_obrigatorio=False) self.seg = Seg() def get_xml(self): xml = XMLNFe.get_xml(self) xml += '<multimodal>' xml += self.COTM.xml xml += self.indNegociavel.xml xml += self.seg.xml xml += '</multimodal>' return xml def set_xml(self, arquivo): if self._le_xml(arquivo): self.COTM.xml = arquivo self.indNegociavel.xml = arquivo self.seg.xml = arquivo xml = property(get_xml, set_xml)
lgpl-2.1
-6,469,052,802,770,192,000
36.894737
188
0.597147
false
2.894079
false
false
false
seanxwzhang/LeetCode
031 Next permutation/solution.py
1
1427
#!/usr/bin/env python # Implement next permutation, which rearranges numbers into the lexicographically next greater permutation of numbers. # If such arrangement is not possible, it must rearrange it as the lowest possible order (ie, sorted in ascending order). # The replacement must be in-place, do not allocate extra memory. # Here are some examples. Inputs are in the left-hand column and its corresponding outputs are in the right-hand column. # 1,2,3 → 1,3,2 # 3,2,1 → 1,2,3 # 1,1,5 → 1,5,1 # Brute force: find all permutation, then find the next greater permutation, complexity O(n!) # Essentially we need to find: # 1. An index i indicating the first difference between original permutation and current permutation # original[i] < current[i] # 2. current[i:] should be the smallest permutation possible # 3. index i should be the largest possible class Solution(object): def nextPermutation(self, nums): """ :type nums: List[int] :rtype: void Do not return anything, modify nums in-place instead. """ for i in range(len(nums) - 2, -1, -1): swapCandidates = [j for j in range(i + 1, len(nums)) if nums[j] > nums[i]] if len(swapCandidates) > 0: bestSwap = swapCandidates.sort(key=lambda j: nums[j])[0] candidates = sorted([nums[x] for x in range(i, len(nums)) if x != bestSwap]) return nums[:i] + candidates
mit
7,357,985,183,012,399,000
48
121
0.680507
false
3.543641
false
false
false
TheTimmy/spack
var/spack/repos/builtin/packages/nalu/package.py
1
2287
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Nalu(CMakePackage): """Nalu: a generalized unstructured massively parallel low Mach flow code designed to support a variety of energy applications of interest (most notably Wind ECP) built on the Sierra Toolkit and Trilinos solver Tpetra/Epetra stack """ homepage = "https://github.com/NaluCFD/Nalu" url = "https://github.com/NaluCFD/Nalu.git" version('master', git='https://github.com/NaluCFD/Nalu.git', branch='master') # Currently Nalu only builds static libraries; To be fixed soon depends_on('yaml-cpp+pic~shared') depends_on('trilinos~shared+exodus+tpetra+muelu+belos+ifpack2+amesos2+zoltan+stk+boost~superlu-dist+superlu+hdf5+zlib+pnetcdf@master') def cmake_args(self): spec = self.spec options = [] options.extend([ '-DTrilinos_DIR:PATH=%s' % spec['trilinos'].prefix, '-DYAML_DIR:PATH=%s' % spec['yaml-cpp'].prefix, '-DENABLE_INSTALL:BOOL=ON' ]) return options
lgpl-2.1
-967,329,023,991,329,200
40.581818
138
0.659816
false
3.780165
false
false
false
gngrwzrd/gity
python/getremotebranches.py
2
1653
# Copyright Aaron Smith 2009 # # This file is part of Gity. # # Gity is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Gity is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Gity. If not, see <http://www.gnu.org/licenses/>. from _util import * try: import re,os,subprocess,simplejson as json except Exception,e: sys.stderr.write(str(e)) exit(84) command="" try: from _argv import * if not options.misc: raise Exception("Gitty Error: getting remote branches requires a remote.") remote=sanitize_str(options.misc[0]) command="%s %s %s"%(options.git,"ls-remote --heads",remote) rcode,stout,sterr=run_command(command) if server_hung_up(sterr):exit(85) if server_unreachable(sterr):exit(86) rcode_for_git_exit(rcode,sterr) lines=re.split("\n",stout) if len(lines)>0:lines.pop() finals=[] for line in lines: if line=="":continue a=re.search("\t[a-zA-Z0-9\/].*",line) l=a.group(0) b=l.split("/")[-1] finals.append(b) sys.stdout.write(json.dumps(finals)) exit(0) except Exception, e: sys.stderr.write("The get remote branches command threw this error: " + str(e)) sys.stderr.write("\ncommand: %s\n" % command) log_gity_version(options.gityversion) log_gitv(options.git) exit(84)
gpl-3.0
7,285,313,256,843,885,000
32.755102
96
0.723533
false
2.983755
false
false
false
HaroldMills/Vesper
vesper/util/tests/test_time_frequency_analysis_utils.py
1
11794
import numpy as np from vesper.tests.test_case import TestCase import vesper.util.time_frequency_analysis_utils as tfa_utils ''' TODO: Add test cases for which window size differs from DFT size, and for which window is not rectangular. ''' ''' TODO: Given that we need to test FFTs and spectrograms implemented in various programming languages, it might make sense to prepare a set of test cases in a language-portable format like JSON that can be used by test code in the different languages. ''' class TimeFrequencyAnalysisUtilsTests(TestCase): def test_get_dft_analysis_data(self): cases = [ (1000, 4, None, 4, [0, 250, 500]) ] for sample_rate, window_size, dft_size, expected_dft_size, \ expected_freqs in cases: expected_freqs = np.array(expected_freqs) actual_dft_size, actual_freqs = tfa_utils.get_dft_analysis_data( sample_rate, window_size, dft_size) self.assertEqual(actual_dft_size, expected_dft_size) self.assertTrue(np.array_equal(actual_freqs, expected_freqs)) def test_get_dft_size(self): cases = [ (1, 1), (2, 2), (3, 4), (4, 4), (5, 8), (6, 8), (7, 8), (8, 8), (9, 16) ] for window_size, expected in cases: actual = tfa_utils.get_dft_size(window_size) self.assertEqual(actual, expected) def test_get_dft_freqs(self): cases = [ (1000, 1, [0]), (1000, 2, [0, 500]), (1000, 4, [0, 250, 500]), (2000, 8, [0, 250, 500, 750, 1000]) ] for sample_rate, dft_size, expected in cases: expected = np.array(expected) actual = tfa_utils.get_dft_freqs(sample_rate, dft_size) self.assertTrue(np.array_equal(actual, expected)) def test_get_dft_bin_num(self): cases = [ ((0, 8000, 8), 0), ((4000, 8000, 8), 4), ((1000, 8000, 8), 1), ((499, 8000, 8), 0), ((501, 8000, 8), 1), ((11024.5, 22050., 8), 4) ] for args, expected in cases: actual = tfa_utils.get_dft_bin_num(*args) self.assertEqual(actual, expected) def test_get_num_analysis_records(self): cases = [ (0, 8, 4, 0), (8, 8, 4, 1), (16, 8, 4, 3), (17, 8, 4, 3), (18, 8, 4, 3), (19, 8, 4, 3), (20, 8, 4, 4), (20, 8, 3, 5), (21, 8, 3, 5), (22, 8, 3, 5), (23, 8, 3, 6) ] for num_samples, window_size, hop_size, expected in cases: actual = tfa_utils.get_num_analysis_records( num_samples, window_size, hop_size) self.assertEqual(actual, expected) def test_get_num_analysis_records_errors(self): cases = [ # record size zero (0, 0, 1), # hop size zero (0, 1, 0), # hop size exceeds record size (0, 1, 2) ] for args in cases: self._assert_raises( ValueError, tfa_utils.get_num_analysis_records, *args) def test_get_analysis_records_1d(self): """Tests `get_analysis_records` with 1-dimensional input.""" samples = np.arange(8) cases = [ # record size and hop size equal (1, 1, [[0], [1], [2], [3], [4], [5], [6], [7]]), (2, 2, [[0, 1], [2, 3], [4, 5], [6, 7]]), (3, 3, [[0, 1, 2], [3, 4, 5]]), (4, 4, [[0, 1, 2, 3], [4, 5, 6, 7]]), (5, 5, [[0, 1, 2, 3, 4]]), (8, 8, [[0, 1, 2, 3, 4, 5, 6, 7]]), # record size and hop size not equal (2, 1, [[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7]]), (3, 2, [[0, 1, 2], [2, 3, 4], [4, 5, 6]]), (4, 2, [[0, 1, 2, 3], [2, 3, 4, 5], [4, 5, 6, 7]]), (4, 3, [[0, 1, 2, 3], [3, 4, 5, 6]]), ] self._test_get_analysis_records(samples, cases) def _test_get_analysis_records(self, samples, cases): for record_size, hop_size, expected in cases: expected = np.array(expected) actual = tfa_utils._get_analysis_records( samples, record_size, hop_size) self._assert_arrays_equal(actual, expected) def test_get_analysis_records_2d(self): """Tests `get_analysis_records` with 2-dimensional input.""" samples = np.arange(8).reshape((2, 4)) cases = [ # record size and hop size equal (1, 1, [[[0], [1], [2], [3]], [[4], [5], [6], [7]]]), (2, 2, [[[0, 1], [2, 3]], [[4, 5], [6, 7]]]), (3, 3, [[[0, 1, 2]], [[4, 5, 6]]]), (4, 4, [[[0, 1, 2, 3]], [[4, 5, 6, 7]]]), # record size and hop size not equal (2, 1, [[[0, 1], [1, 2], [2, 3]], [[4, 5], [5, 6], [6, 7]]]), (3, 1, [[[0, 1, 2], [1, 2, 3]], [[4, 5, 6], [5, 6, 7]]]), (3, 2, [[[0, 1, 2]], [[4, 5, 6]]]) ] self._test_get_analysis_records(samples, cases) def test_compute_spectrogram(self): # This tests that our spectrogram function produces the # expected output for an input comprising a single channel # with a single window's worth of cosine samples. We use # a rectangular window so the expected output spectrum has # a particularly simple form. for num_channels in [1, 2]: for dft_size in [1, 2, 4, 8, 16]: if dft_size == 1: hop_sizes = [1] else: hop_sizes = [dft_size // 2, dft_size] for hop_size in hop_sizes: for bin_num in range(dft_size // 2 + 1): self._test_compute_spectrogram( num_channels, dft_size, hop_size, bin_num) def _test_compute_spectrogram( self, num_channels, dft_size, hop_size, bin_num): num_samples = dft_size * 2 samples = self._create_test_signal( num_channels, num_samples, dft_size, bin_num) window = np.ones(dft_size) spectra = tfa_utils.compute_spectrogram( samples, window, hop_size, dft_size) expected = self._get_expected_spectra( num_channels, num_samples, hop_size, dft_size, bin_num) self.assertTrue(np.allclose(spectra, expected)) def _create_test_signal( self, num_channels, num_samples, dft_size, bin_num): phase_factor = 2 * np.pi * bin_num / dft_size samples = np.cos(phase_factor * np.arange(num_samples)) if num_channels == 2: samples = np.stack((samples, np.ones(num_samples))) return samples def _get_expected_spectra( self, num_channels, num_samples, hop_size, dft_size, bin_num): num_spectra = tfa_utils.get_num_analysis_records( num_samples, dft_size, hop_size) spectrum = self._get_expected_spectrum(dft_size, bin_num) spectra = np.ones((num_spectra, 1)) * spectrum if num_channels == 2: spectrum = self._get_expected_spectrum(dft_size, 0) spectra_1 = np.ones((num_spectra, 1)) * spectrum spectra = np.stack((spectra, spectra_1)) return spectra def _get_expected_spectrum(self, dft_size, bin_num): num_bins = dft_size // 2 + 1 spectrum = np.zeros(num_bins) spectrum[bin_num] = dft_size ** 2 if bin_num != 0 and bin_num != num_bins - 1: spectrum[bin_num] /= 4 return spectrum.reshape((1, len(spectrum))) def test_scale_spectrogram(self): cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[1], [2]], [[1], [2]]), ([[1, 2], [3, 4]], [[.5, 1], [1.5, 2]]), ([[1, 2, 3]], [[.25, 1, .75]]), # stereo ([[[1], [2]], [[3], [4]]], [[[1], [2]], [[3], [4]]]), ([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], [[[.5, 1], [1.5, 2]], [[2.5, 3], [3.5, 4]]]), ([[[1, 2, 3]], [[4, 5, 6]]], [[[.25, 1, .75]], [[1, 2.5, 1.5]]]) ] for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.scale_spectrogram, spectra) def _test_op(self, expected, op, input, *args, **kwargs): # out of place, result allocated by op actual = op(input, *args, **kwargs) self.assertFalse(actual is input) self._assert_arrays_equal(actual, expected) # out of place, result preallocated actual = np.zeros_like(expected) kwargs_ = dict(kwargs, out=actual) actual = op(input, *args, **kwargs_) self.assertFalse(actual is input) self._assert_arrays_equal(actual, expected) # in place kwargs_ = dict(kwargs, out=input) actual = op(input, *args, **kwargs_) self.assertTrue(actual is input) self._assert_arrays_equal(actual, expected) def test_linear_to_log(self): minus_infinity = tfa_utils.SMALL_POWER_DB cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[0], [1], [10]], [[minus_infinity], [0], [10]]), ([[0, 1], [1, 10]], [[minus_infinity, 0], [0, 10]]), # stereo ([[[0, 1], [1, 10]], [[1, 10], [10, 100]]], [[[minus_infinity, 0], [0, 10]], [[0, 10], [10, 20]]]) ] # default reference power for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.linear_to_log, spectra) # explicit reference power reference_power = 10 reference_power_db = 10 * np.log10(reference_power) for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') expected[expected != minus_infinity] -= reference_power_db self._test_op( expected, tfa_utils.linear_to_log, spectra, reference_power) def test_log_to_linear(self): cases = [ # empty (np.zeros((0, 1)), np.zeros((0, 1))), (np.zeros((0, 3)), np.zeros((0, 3))), # mono ([[-10], [0], [10]], [[.1], [1], [10]]), ([[-10, 0], [0, 10]], [[.1, 1], [1, 10]]), # stereo ([[[-10, 0], [0, 10]], [[0, 10], [10, 20]]], [[[.1, 1], [1, 10]], [[1, 10], [10, 100]]]) ] # default reference power for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') self._test_op(expected, tfa_utils.log_to_linear, spectra) # explicit reference power reference_power = 10 for spectra, expected in cases: spectra = np.array(spectra, dtype='float64') expected = np.array(expected, dtype='float64') expected *= reference_power self._test_op( expected, tfa_utils.log_to_linear, spectra, reference_power)
mit
602,153,895,218,348,300
28.858228
77
0.483805
false
3.364907
true
false
false
drewswinney/myo
Lumyo Desktop/Lumyo Python/lumyo/MyoREST.py
1
10667
# Coded by Walker Argendeli, Lumyo Capstone Group from urlparse import urlparse, urljoin from datetime import datetime, timedelta import requests, json from getpass import getpass from enum import Enum, IntEnum from collections import namedtuple # class API: # # def __init__(self, low_myo, timestamp, firmware_version): # super(DataCaptureListener.MyoProxy, self).__init__() # self._connect_time = None # # @property # def connected(self): # with self.synchronized: # return ( # self._connect_time is not None and # self._disconnect_time is None # ) _apiURL = "http://drewswinney.com:8080/api/" _requestSuccessful = 200 def _apiParams(**params): return params # It turns out that classes cannot have static function pointers; they'll be converted to unbound methods >< dumb # class RequestMethod(Enum): # GET=requests.get # POST=requests.post _RequestMethods = namedtuple('RequestMethod', ['GET', 'POST', 'PUT']) _RequestMethod = { _RequestMethods.GET : requests.get, _RequestMethods.POST : requests.post, _RequestMethods.PUT : requests.put } def _apiRequest(reqURL, reqParams, reqMethod): response = reqMethod(reqURL, reqParams) # auth=('user', 'pw') requestStatus = response.status_code if requestStatus != _requestSuccessful: raise RequestError("Request failed with error code " + str(requestStatus)) return response def _request(path, params, methodName): reqURL = urljoin(_apiURL, path) reqParams = params reqMethod = _RequestMethod[methodName] return _apiRequest(reqURL, reqParams, reqMethod) # _dateTimeFormatString = '%Y-%m-%d %H:%M:%S:%f' # TODO Suppport microseconds here _dateTimeFormatString = '%Y-%m-%d %H:%M:%S' def timeToSQL(pyTime): return pyTime.strftime(_dateTimeFormatString) def timeFromSQL(sqlTime): return datetime.strptime(sqlTime, _dateTimeFormatString) class RequestError(Exception): pass class Connection: __loginAuthPath = "loginauth" def __init__(self, username=None, password=None): if not username: username = raw_input("Username: ") if not password: password = getpass() self.username = username self.__password = password # TODO We should be using salted hashes for this >< self.loginID = None self.__connect() self.session = None # def __del__(self): # if self.session: # self.session.updateSession() class AuthError(RequestError): pass def __connect(self): loginAuthParams = _apiParams(username=self.username, password=self.__password) response = _request(Connection.__loginAuthPath, loginAuthParams, _RequestMethods.POST) # if responseText == "NOTFOUND": try: self.loginID = int(response.text) except: raise Connection.AuthError("Could not establish authenticated connection with API") @property def connected(self): return self.loginID is not None def openNewSession(self, sessionType, sessionStartTime=None, initialTimestamp=None): if not self.connected: if not self.__connect(): raise Connection.AuthError("Can't open new session -- no valid authenticated connection to API") self.session = Session.openNewSession(self.loginID, sessionType, sessionStartTime, initialTimestamp) return self.session def openSession(self, sessionID): if not self.connected: if not self.__connect(): raise Connection.AuthError("Can't open a session -- no valid authenticated connection to API") self.session = Session.openSession(sessionID, self.loginID) return self.session class Session: __sessionPath = "session" class SessionType(IntEnum): SLEEP = 0 PEDOMETRY = 1 REP_COUNT = 2 def __init__(self, loginID, sessionID, sessionType, sessionStartTime, initialTimestamp=None, sessionEndTime=None, sessionQuality=None): self.loginID = loginID self.sessionID = sessionID self.sessionType = sessionType self.sessionStartTime = sessionStartTime self.sessionEndTime = sessionEndTime self.initialTimestamp = initialTimestamp self.sessionQuality = sessionQuality @classmethod def openNewSession(cls, loginID, sessionType, sessionStartTime=None, initialTimestamp=None): if not sessionStartTime: sessionStartTime = datetime.now() sessionStartParam = timeToSQL(sessionStartTime) sessionTypeID = int(sessionType) sessionIDParams = _apiParams(loginID=loginID, sessionTypeID=sessionTypeID, sessionStartTime=sessionStartParam) response = _request(Session.__sessionPath, sessionIDParams, _RequestMethods.POST) responseJSON = response.json() sessionID = responseJSON['id'] return cls(loginID, sessionID, sessionType, sessionStartTime, initialTimestamp) @classmethod def openSession(cls, sessionID, loginID=None): path = Session.__sessionPath + "/" + str(sessionID) params = None response = _request(path, params, _RequestMethods.GET) responseJSON = response.json() sessionLoginID = responseJSON['loginID'] if loginID is not None: if loginID != sessionLoginID: raise RequestError("Current loginID doesn't match the loginID of the requested session") sessionType = SessionType(int(responseJSON['sessionTypeID'])) sessionStartTime = timeFromSQL(responseJSON['sessionStartTime']) sessionEndTime = timeFromSQL(responseJSON['sessionEndTime']) sessionQuality = int(responseJSON['sessionQuality']) initialTimestamp = None # TODO return cls(sessionLoginID, sessionID, sessionType, sessionStartTime, initialTimestamp, sessionEndTime, sessionQuality) def updateSession(self): updateSessionPath = Session.__sessionPath + "/" + self.sessionID sessionTypeID = int(self.sessionType) sessionStartTime = timeToSQL(self.sessionStartTime) sessionEndTime = timeToSQL(self.sessionEndTime) params = _apiParams(loginID=self.loginID, sessionTypeID=sessionTypeID, sessionStartTime=sessionStartTime, sessionEndTime=sessionEndTime, sessionQuality=self.sessionQuality) response = _request(updateSessionPath, params, _RequestMethods.PUT) responseJSON = response.json() sessionLoginID = responseJSON['loginID'] # TODO Encapsulate the below __allDPSubPath = "sbysessionid/" def getDatapoints(self, dpSubPath): path = dpSubPath + self.__allDPSubPath + str(self.sessionID) params = None response = _request(path, params, _RequestMethods.GET) return response.json() def getFirstTimestamp(self, dpSubPath, currTimestamp, timestampKey): datapoints = self.getDatapoints(dpSubPath) if isinstance(datapoints, list): if not datapoints: # If no entries firstTimestamp = currTimestamp else: # If multiple entries firstTimestamp = min(timeFromSQL(dp[timestampKey]) for dp in datapoints) else: # If 1 entry firstTimestamp = datapoints[timestampKey] return firstTimestamp def __genericDataPointParams(self, dpSubPath, timestamp, timestampKey): paramsDict = {'sessionID' : self.sessionID} if self.initialTimestamp is None: # self.initialTimestamp = self.getFirstTimestamp(dpSubPath, timestamp, timestampKey) self.initialTimestamp = timestamp microsecondsDiff = timestamp - self.initialTimestamp timeDiff = timedelta(microseconds=microsecondsDiff) pyTimestamp = self.sessionStartTime + timeDiff sqlTime = timeToSQL(pyTimestamp) paramsDict[timestampKey] = sqlTime return paramsDict __emgPath = "emgdatapoint" def addEMGDataPoint(self, timestamp, emgData): paramsDict = self.__genericDataPointParams(Session.__emgPath, timestamp, 'emgpDateTime') for podNum, podData in enumerate(emgData, 1): paramsDict['emgpPod' + str(podNum)] = podData params = _apiParams(**paramsDict) response = _request(Session.__emgPath, params, _RequestMethods.POST) __gyroPath = "rotationdatapoint" def addGyroDataPoint(self, timestamp, gyroData): paramsDict = self.__genericDataPointParams(Session.__gyroPath, timestamp, 'rdpDateTime') paramsDict['rdpXRotation'] = gyroData[0] paramsDict['rdpYRotation'] = gyroData[1] paramsDict['rdpZRotation'] = gyroData[2] params = _apiParams(**paramsDict) response = _request(Session.__gyroPath, params, _RequestMethods.POST) __accelPath = "accelerationdatapoint" def addAccelDataPoint(self, timestamp, accelData): paramsDict = self.__genericDataPointParams(Session.__accelPath, timestamp, 'adpDateTime') paramsDict['adpXAcceleration'] = accelData[0] paramsDict['adpYAcceleration'] = accelData[1] paramsDict['adpZAcceleration'] = accelData[2] params = _apiParams(**paramsDict) response = _request(Session.__accelPath, params, _RequestMethods.POST) __orientPath = "orientationdatapoint" def addOrientDataPoint(self, timestamp, orientData): # TODO Decide what to do about quaternions vs Euler angles paramsDict = self.__genericDataPointParams(Session.__orientPath, timestamp, 'odpDateTime') paramsDict['odpXRotation'] = orientData[0] paramsDict['odpYRotation'] = orientData[1] paramsDict['odpZRotation'] = orientData[2] params = _apiParams(**paramsDict) response = _request(Session.__orientPath, params, _RequestMethods.POST) def main(): testUsername = "walker" testPassword = "walkersux" connection = Connection(testUsername, testPassword) session = connection.openNewSession(Session.SessionType.SLEEP) session.initialTimestamp = 179484700928 dpTimestamp = 179488247655 dpData = [-1.062011719, 0.30859375, 0.79296875] session.addAccelDataPoint(dpTimestamp, dpData) # TODO datapoint datatypes of Double(8,2) # accell, orient: signed double(10,9) # gyro: signed double(7, 4) # emg: signed smallint if __name__ == "__main__": main()
mit
-1,072,669,365,186,132,100
39.256604
180
0.664104
false
4.136099
false
false
false
krak3n/Facio
tests/test_config.py
1
9744
# -*- coding: utf-8 -*- """ .. module:: tests.test_config :synopsis: Tests for the Facio config module. """ import os import six from mock import MagicMock, patch, PropertyMock from facio.config import ConfigurationFile, CommandLineInterface, Settings from facio.exceptions import FacioException from six import StringIO from six.moves import configparser as ConfigParser from textwrap import dedent from . import BaseTestCase class TestCommandLintInterface(BaseTestCase): def setUp(self): self._patch_clint([ 'facio.exceptions.puts', ]) patcher = patch('facio.config.state.state', new_callable=PropertyMock, create=True) self.mock_state = patcher.start() self.mock_state.context_variables = {} self.addCleanup(patcher.stop) @patch('facio.config.docopt') @patch('facio.config.CommandLineInterface.validate_project_name') def test_project_name_should_be_validated( self, mock_validate, mock_docopt): mock_docopt.return_value = { '<project_name>': 'foo' } i = CommandLineInterface() i.start() mock_validate.assert_called_with('foo') def test_valid_project_name(self): valid_names = [ 'this_is_valid', 'this1is_valid', 'Thisisvalid'] i = CommandLineInterface() for name in valid_names: i.validate_project_name(name) self.assertEqual(name, self.mock_state.project_name) self.assertEqual({'PROJECT_NAME': name}, self.mock_state.context_variables) @patch('sys.exit') def test_invalid_project_name(self, mock_exit): invalid_names = [ 'this_is_not-valid', 'this_is not_valid', '*this_is_not_valid'] i = CommandLineInterface() for name in invalid_names: with self.assertRaises(FacioException): i.validate_project_name(name) self.mocked_facio_exceptions_puts.assert_any_call( 'Error: Project names can only contain numbers letters and ' 'underscores') class TestConfigurationFile(BaseTestCase): """ Tests for facio.config.ConfigurationFile. """ config_path = os.path.expanduser('~/.facio.cfg') def setUp(self): self._patch_clint([ 'facio.exceptions.puts', 'facio.config.ConfigurationFile.out', 'facio.config.ConfigurationFile.warning', ]) def _patch_open(self, data): if six.PY3: func = 'builtins.open' else: func = '__builtin__.open' patcher = patch(func, return_value=StringIO( data)) self.addCleanup(patcher.stop) return patcher @patch('facio.config.ConfigParser.ConfigParser.readfp') def test_warning_no_config_file(self, mock_readfp): mock_readfp.side_effect = IOError c = ConfigurationFile() c.read() self.mocked_facio_config_ConfigurationFile_warning.assert_any_call( "{0} Not found".format(self.config_path)) @patch('sys.exit') def test_config_read_parse_error(self, exit_mock): config = dedent("""\ [this_is not = formatted correctly """) patch_open = self._patch_open(config) patch_open.start() with self.assertRaises(FacioException): c = ConfigurationFile() c.read() self.mocked_facio_exceptions_puts.assert_any_call( "Error: Unable to parse {0}".format(self.config_path)) self.assertTrue(exit_mock.called) def test_config_read_success(self): config = dedent("""\ [template] template1 = /foo/bar/baz template2 = /baz/bar/foo """) patch_open = self._patch_open(config) patch_open.start() c = ConfigurationFile() c.read() self.mocked_facio_config_ConfigurationFile_out.assert_any_call( "Loaded {0}".format(self.config_path)) class TestSettings(BaseTestCase): def setUp(self): self._patch_clint([ 'facio.exceptions.puts', 'facio.config.Settings.out', 'facio.config.Settings.warning', 'facio.config.Settings.error', ]) # Mocks for ConfigFile and CommandLineInterface classes self.mock_interface() self.config = MagicMock() def mock_interface(self): self.interface = MagicMock() arguments = PropertyMock(return_value={ '<project_name>': 'foo' }) type(self.interface).arguments = arguments def test_attrs_set_on_init(self): s = Settings(self.interface, self.config) self.assertIsInstance(s.config, MagicMock) self.assertIsInstance(s.interface, MagicMock) @patch('sys.exit') def test_exception_raised_select_template_no_config(self, mock_exit): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.side_effect = ConfigParser.NoSectionError('template') s = Settings(self.interface, self.config) with self.assertRaises(FacioException): with self.assertRaises(ConfigParser.NoSectionError): s.get_template_path() self.mocked_facio_exceptions_puts.assert_any_call( 'Error: Missing [template] section in Facio configuration file.') self.assertTrue(mock_exit.called) @patch('sys.exit') def test_default_template_returned_none_defined(self, mock_exit): arguments = PropertyMock(return_value={ '--select': False}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(Settings.default_template_path, path) def test_path_returned_if_not_alias(self): arguments = PropertyMock(return_value={ '--template': '/foo/bar/baz'}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo/bar/baz') def test_path_retuend_from_alias(self): arguments = PropertyMock(return_value={ '--template': 'foobar'}) type(self.interface).arguments = arguments self.config.items.return_value = [('foobar', '/foo/bar/baz')] s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo/bar/baz') @patch('facio.base.input') def test_template_selection_input_success(self, mock_input): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.return_value = [ ('foo', '/foo'), ('bar', '/bar'), ('baz', '/baz'), ] mock_input.return_value = 1 s = Settings(self.interface, self.config) path = s.get_template_path() self.assertEqual(path, '/foo') @patch('sys.exit') @patch('facio.base.input') def test_template_selection_input_error(self, mock_input, mock_exit): arguments = PropertyMock(return_value={ '--select': True}) type(self.interface).arguments = arguments self.config.items.return_value = [ ('foo', '/foo'), ] mock_input.return_value = 0 s = Settings(self.interface, self.config) with self.assertRaises(FacioException): with self.assertRaises(ValueError): s.get_template_path() self.mocked_facio_exceptions_puts.assert_any_call( 'Error: A template was not selected') self.assertTrue(mock_exit.called) def test_get_variables_from_cli(self): arguments = PropertyMock(return_value={ '--vars': 'foo=bar'}) type(self.interface).arguments = arguments s = Settings(self.interface, self.config) self.assertEqual(s.get_variables(), {'foo': 'bar'}) def test_empty_copy_ignore_no_files_section(self): self.config.get.side_effect = ConfigParser.NoSectionError('files') s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), []) def test_empty_copy_ignore_no_option(self): self.config.get.side_effect = ConfigParser.NoOptionError( 'files', 'copy_ignore') s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), []) def test_copy_ignore_returned_as_list(self): self.config.get.return_value = 'foo=bar,baz=foo' s = Settings(self.interface, self.config) self.assertEqual(s.copy_ignore_globs(), ['foo=bar', 'baz=foo']) def test_empty_render_ignore_no_section(self): self.config.get.side_effect = ConfigParser.NoSectionError('files') s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), []) def test_empty_render_ignore_no_option(self): self.config.get.side_effect = ConfigParser.NoOptionError( 'files', 'render_ignore') s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), []) def test_render_ignore_returned_as_list(self): self.config.get.return_value = 'foo=bar,baz=foo' s = Settings(self.interface, self.config) self.assertEqual(s.render_ignore_globs(), ['foo=bar', 'baz=foo'])
bsd-3-clause
-4,704,710,260,870,213,000
30.230769
79
0.603448
false
3.965812
true
false
false
xuet0ng/github-enterprise-wechat
rest/endpoint.py
1
1834
# -*- coding:utf-8 -*- import json import os from flask import Flask, request from message import pr_msg, ci_failed_msg from wechat_sdk import WeChat app = Flask(__name__) @app.route('/') def hello_world(): return 'Hello World!' if 'GITHUB_WECHAT_CORP_SECRET' in os.environ.keys(): we_github = WeChat( url=os.environ.get('WECHAT_BASE_URL'), corp_id=os.environ.get('WECHAT_CORP_ID'), corp_secret=os.environ.get('GITHUB_WECHAT_CORP_SECRET'), agent_id=os.environ.get('GITHUB_WECHAT_AGENT_ID') ) @app.route('/wechat', methods=['POST']) def github(): payload = json.loads(request.data) repo = { 'full_name': payload['repository']['full_name'], 'owner': payload['repository']['owner']['login'] } if 'pull_request' in payload: if payload['action'] not in ['closed', 'opened', 'reopened']: return 'ignore ' + payload['action'] we_github.auto_send_text_card_message( pr_msg(repo, payload) ) return 'yoyoyo' elif 'issue' in payload: return else: return 'not support' if 'CI_WECHAT_CORP_SECRET' in os.environ.keys(): we_ci = WeChat( url=os.environ.get('WECHAT_BASE_URL'), corp_id=os.environ.get('WECHAT_CORP_ID'), corp_secret=os.environ.get('CI_WECHAT_CORP_SECRET'), agent_id=os.environ.get('CI_WECHAT_AGENT_ID') ) @app.route('/jenkins', methods=['POST']) def jenkins(): payload = request.json if 'FINALIZED' in payload['build']['phase']: we_ci.auto_send_text_card_message( ci_failed_msg(payload) ) return 'yoyoyo' return 'ignore' if __name__ == '__main__': app.run()
apache-2.0
9,202,398,030,092,775,000
23.783784
73
0.556707
false
3.383764
false
false
false
j91321/rext
modules/misc/cobham/admin_reset_code.py
1
1835
# Name:Cobham Aviator/Explorer/Sailor admin reset code generator # File:admin_reset_code.py # Author:Ján Trenčanský # License: GNU GPL v3 # Created: 4.12.2015 # Last modified: 4.12.2015 # Shodan Dork: # Description: generates predictable admin reset code for Cobham Aviator/Explorer/Sailor - CVE-2014-2943 # Based on work by Sinnet3000 and # https://www.blackhat.com/docs/us-14/materials/us-14-Santamarta-SATCOM-Terminals-Hacking-By-Air-Sea-And-Land.pdf import hashlib import core.Misc import core.io from interface.messages import print_help, print_info class Misc(core.Misc.RextMisc): """ Name:Cobham Aviator/Explorer/Sailor admin reset code generator File:admin_reset_code.py Author:Ján Trenčanský License: GNU GPL v3 Created: 4.12.2015 Description: generates predictable admin reset code for Cobham Aviator/Explorer/Sailor - CVE-2014-2943 Based on: Work by Sinnet3000 and https://www.blackhat.com/docs/us-14/materials/us-14-Santamarta-SATCOM-Terminals-Hacking-By-Air-Sea-And-Land.pdf Options: Name Description serial Serial number of the device """ serial = "12345678" def __init__(self): core.Misc.RextMisc.__init__(self) def do_set(self, e): args = e.split(' ') if args[0] == "serial": self.serial = args[1] print_info("Serial number set to: " + self.serial) def do_run(self, e): m = hashlib.md5() m.update(bytearray.fromhex(self.serial) + b'\x00'*12 + "kdf04rasdfKKduzA".encode('utf-8')) code = m.hexdigest() print("Reset code: " + code) def do_serial(self, e): print_info(self.serial) def help_set(self): print_help("Set value of variable: \"set serial 12345678\"") def help_serial(self): print_help("Prints value of variable serial") Misc()
gpl-3.0
-2,954,850,911,019,823,000
28.031746
115
0.680153
false
3.033167
false
false
false
kmuthupa/dash-listener
dash-listener-alt.py
1
1226
import socket import struct import binascii import time import json import urllib2 import requests macs = { '74c246c3e349' : 'dash_tide' } def post_data(): data = { "date": time.strftime("%Y-%m-%d"), "tally": '1' } requests.post("https://sheetsu.com/apis/390c6259", data) def record_tally(): print 'triggering tally... ' post_data(); rawSocket = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(0x0003)) while True: packet = rawSocket.recvfrom(2048) ethernet_header = packet[0][0:14] ethernet_detailed = struct.unpack("!6s6s2s", ethernet_header) arp_header = packet[0][14:42] arp_detailed = struct.unpack("2s2s1s1s2s6s4s6s4s", arp_header) # skip non-ARP packets ethertype = ethernet_detailed[2] if ethertype != '\x08\x06': continue source_mac = binascii.hexlify(arp_detailed[5]) source_ip = socket.inet_ntoa(arp_detailed[6]) dest_ip = socket.inet_ntoa(arp_detailed[8]) if source_mac in macs: #print "ARP from " + macs[source_mac] + " with IP " + source_ip if macs[source_mac] == 'dash_tide': record_tally() else: print "Unknown MAC " + source_mac + " from IP " + source_ip
mit
4,926,280,512,311,301,000
26.863636
82
0.6354
false
3.004902
false
false
false
sellberg/SACLA2016B8055
scripts/02_process_batch_runs.py
2
2603
#!/home/software/SACLA_tool/bin/python2.7 import os import sys from argparse import ArgumentParser parser = ArgumentParser() parser = ArgumentParser(description="Wrapper to run 01_process_runs.py that can be run as batch job") parser.add_argument("-start", "--start-run", type=int, dest="start", required=True, help="first run to process") parser.add_argument("-stop", "--stop-run", type=int, dest="stop", default=0, help="last run to process (default: only process 1 run)") parser.add_argument("-exp", "--exp-year", type=int, dest="exp", default=2016, help="experimental year to compress") parser.add_argument("-multi", "--multi-run", action="store_true", dest="multi", required=False, default=False, help="process multi-file run converted using DataConvert4") parser.add_argument("-force", "--force-run", action="store_true", dest="force", required=False, default=False, help="force run to process (overwrite existing folder)") parser.add_argument("-tag", "--output-tag", type=str, dest="tag", default="run", help="tag for output folder (default: run)") parser.add_argument("-o", "--output-flag", type=str, dest="outputFlag", help="where to process runs. 'W' refers to /work/perakis/ and 'UD' refers to '/UserData/fperakis'", choices=['W','UD'], default='UD') args = parser.parse_args() log_dump = '/home/fperakis/qsub_dump/processed/' if (args.stop < args.start): args.stop = args.start assert( args.stop >= args.start ) for run in xrange( args.start, args.stop+1 ): RUN = str(run) # run number as string # create a batchfile as a string (first line) #batchFile = '#PBS -u fperakis\n' batchFile = '#PBS -N %s_process\n' % RUN batchFile += '#PBS -d %s\n' % log_dump # second line is the command cmd = ['%s/01_process_runs.py -start %s -exp %d -tag %s -o %s' % (os.getcwd(), RUN, args.exp, args.tag, args.outputFlag)] if args.multi: cmd.append('-multi') if args.force: cmd.append('-force') batchFile = batchFile + ' '.join(cmd) # save a temp bacthfile and direct it to qsub outfile = open('tmpc.batch', 'w') outfile.write(batchFile) outfile.close() os.system('qsub < tmpc.batch') os.remove('tmpc.batch') # check the status of the submitted job using: # 'qstat' # check output of the submitted job using: # 'tail /home/fperakis/qsub_dump/processed/%s_process.o*' # check error of the submitted job using: # 'tail /home/fperakis/qsub_dump/processed/%S_process.e*'
bsd-2-clause
514,893,911,962,557,500
40.31746
125
0.64272
false
3.307497
false
false
false
IZSVenezie/VetEpiGIS-Stat
plugin/globalt_dialog.py
1
5863
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'globalt_dialog_base.ui' # # Created: Sat Jan 7 15:11:01 2017 # by: PyQt4 UI code generator 4.10.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName(_fromUtf8("Dialog")) Dialog.resize(561, 471) self.gridLayout_3 = QtGui.QGridLayout(Dialog) self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3")) self.splitter = QtGui.QSplitter(Dialog) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName(_fromUtf8("splitter")) self.label = QtGui.QLabel(self.splitter) self.label.setObjectName(_fromUtf8("label")) self.comboBox = QtGui.QComboBox(self.splitter) self.comboBox.setMinimumSize(QtCore.QSize(251, 0)) self.comboBox.setObjectName(_fromUtf8("comboBox")) self.gridLayout_3.addWidget(self.splitter, 0, 0, 1, 7) self.label_5 = QtGui.QLabel(Dialog) self.label_5.setObjectName(_fromUtf8("label_5")) self.gridLayout_3.addWidget(self.label_5, 1, 0, 1, 1) self.comboBox_5 = QtGui.QComboBox(Dialog) self.comboBox_5.setObjectName(_fromUtf8("comboBox_5")) self.gridLayout_3.addWidget(self.comboBox_5, 1, 1, 1, 3) self.lineEdit = QtGui.QLineEdit(Dialog) self.lineEdit.setInputMethodHints(QtCore.Qt.ImhNone) self.lineEdit.setInputMask(_fromUtf8("")) self.lineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.lineEdit.setObjectName(_fromUtf8("lineEdit")) self.gridLayout_3.addWidget(self.lineEdit, 1, 4, 1, 1) self.comboBox_6 = QtGui.QComboBox(Dialog) self.comboBox_6.setObjectName(_fromUtf8("comboBox_6")) self.gridLayout_3.addWidget(self.comboBox_6, 1, 5, 1, 1) spacerItem = QtGui.QSpacerItem(25, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem, 1, 6, 1, 1) self.gridLayout = QtGui.QGridLayout() self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.label_2 = QtGui.QLabel(Dialog) self.label_2.setObjectName(_fromUtf8("label_2")) self.gridLayout.addWidget(self.label_2, 0, 0, 1, 1) self.comboBox_2 = QtGui.QComboBox(Dialog) self.comboBox_2.setObjectName(_fromUtf8("comboBox_2")) self.gridLayout.addWidget(self.comboBox_2, 0, 1, 1, 1) self.gridLayout_3.addLayout(self.gridLayout, 2, 0, 1, 2) spacerItem1 = QtGui.QSpacerItem(64, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem1, 2, 2, 1, 1) self.label_4 = QtGui.QLabel(Dialog) self.label_4.setObjectName(_fromUtf8("label_4")) self.gridLayout_3.addWidget(self.label_4, 2, 3, 1, 2) self.comboBox_4 = QtGui.QComboBox(Dialog) self.comboBox_4.setObjectName(_fromUtf8("comboBox_4")) self.gridLayout_3.addWidget(self.comboBox_4, 2, 5, 1, 1) self.gridLayout_2 = QtGui.QGridLayout() self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.label_3 = QtGui.QLabel(Dialog) self.label_3.setObjectName(_fromUtf8("label_3")) self.gridLayout_2.addWidget(self.label_3, 0, 0, 1, 1) self.comboBox_3 = QtGui.QComboBox(Dialog) self.comboBox_3.setObjectName(_fromUtf8("comboBox_3")) self.gridLayout_2.addWidget(self.comboBox_3, 0, 1, 1, 1) self.gridLayout_3.addLayout(self.gridLayout_2, 3, 0, 1, 3) spacerItem2 = QtGui.QSpacerItem(254, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.gridLayout_3.addItem(spacerItem2, 3, 4, 1, 2) self.toolButton = QtGui.QToolButton(Dialog) self.toolButton.setIconSize(QtCore.QSize(30, 30)) self.toolButton.setObjectName(_fromUtf8("toolButton")) self.gridLayout_3.addWidget(self.toolButton, 3, 6, 1, 1) self.plainTextEdit = QtGui.QPlainTextEdit(Dialog) self.plainTextEdit.setObjectName(_fromUtf8("plainTextEdit")) self.gridLayout_3.addWidget(self.plainTextEdit, 4, 0, 1, 7) self.buttonBox = QtGui.QDialogButtonBox(Dialog) self.buttonBox.setOrientation(QtCore.Qt.Horizontal) self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close|QtGui.QDialogButtonBox.Save) self.buttonBox.setObjectName(_fromUtf8("buttonBox")) self.gridLayout_3.addWidget(self.buttonBox, 5, 4, 1, 3) self.retranslateUi(Dialog) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept) QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): Dialog.setWindowTitle(_translate("Dialog", "Dialog", None)) self.label.setText(_translate("Dialog", "Data field:", None)) self.label_5.setText(_translate("Dialog", "Neighbouring method:", None)) self.label_2.setText(_translate("Dialog", "Weighting scheme:", None)) self.label_4.setText(_translate("Dialog", "Variance assumption:", None)) self.label_3.setText(_translate("Dialog", "Alternative hypothesis:", None)) self.toolButton.setText(_translate("Dialog", "...", None))
gpl-3.0
6,605,957,696,505,159,000
51.348214
104
0.681051
false
3.527677
false
false
false
lampjian/effectivelang
Python/lug/67.209.186.120:8888.py
1
2648
#!/usr/bin/python3 import base64 import pickle from flask import Flask, request from handies import file_contents, safe_unpickle import flag app = Flask(__name__) class Credential: "If the user wants the flag, he or she must have a credential." def __init__(self, username: str, password: str): self.username = username self.password = password def __hash__(self): return hash(self.username) ^ hash(self.password) def __str__(self): raise NotImplemented() class CredentialProxy: "A credential proxy is an authorized credential, with its own flag." def __init__(self, username: str, password: str, flag: str): self.username = username self.password = password self.flag = flag def flag(self): return self.flag def __str__(self): return "wtf, the proxy is not supported??" @app.route('/') def index(): apple = request.args.get('credential') if apple: try: banana = base64.b64decode(apple) # Good safe_unpickle can prevent 99% attacks! orange = safe_unpickle(banana) # if the orange is a credential, try it if isinstance(orange, Credential): flag.try_login(orange) return flag.flag # TODO: no proxy support # Time is limited, so this feature is delayed # return the orange to confuse the user! :-) else: return str(orange) except pickle.UnpicklingError as e: return str(e) except: return 'Wrong user or password' else: return " " + file_contents('app.py') + " " app.run(host="0.0.0.0", port=8888, threaded=True) # _ooOoo_ # o8888888o # 88" . "88 # (| -_- |) # O\ = /O # ____/`---'\____ # .' \\| |// `. # / \\||| : |||// \ # / _||||| -:- |||||- \ # | | \\\ - /// | | # | \_| ''\---/'' | | # \ .-\__ `-` ___/-. / # ___`. .' /--.--\ `. . __ # ."" '< `.___\_<|>_/___.' >'"". # | | : `- \`.;`\ _ /`;.`/ - ` : | | # \ \ `-. \_ __\ /__ _/ .-` / / # ======`-.____`-.___\_____/___.-`____.-'====== # `=---=' # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # 佛祖保佑 永无BUG
gpl-3.0
-8,001,954,234,055,832,000
28.617978
72
0.404021
false
3.59618
false
false
false
maveron58/indiana
positioning/strategy/nn_with_linreg.py
1
1610
from positioning.chains import LocalLinRegDataChain from positioning.computations.rssi_linear_regression import RssiLinearRegression from positioning.computations.trilateration import Trilateration from positioning.strategy import NearestNeighbourStrategy from positioning.strategy.abstract_location_strategy import AbstractLocationStrategy class NnWithLinearRegressionStrategy(AbstractLocationStrategy): def __init__(self, **kwargs): self.access_point_dao = kwargs["access_point_dao"] self.locations = {ap.mac.mac: ap.location for ap in self.access_point_dao.active()} self.linregs_per_location = None self.lin_reg_chain = LocalLinRegDataChain(**kwargs) self.trilateration = Trilateration() self.nn_strategy = NearestNeighbourStrategy(**kwargs) def initialise(self, **kwargs): self.nn_strategy.initialise(**kwargs) lin_reg_datas_per_location = self.lin_reg_chain.calculate(**kwargs)["lin_reg_data_per_location"] self.linregs_per_location = {} for loc_str, lin_reg_datas in lin_reg_datas_per_location.items(): reg = RssiLinearRegression() reg.fit(lin_reg_datas) self.linregs_per_location[loc_str] = reg def locate(self, measures): loc = self.nn_strategy.locate(measures) dists = self.linregs_per_location[str(loc)].predict(measures) sorted_dists = list(sorted(dists.items(), key=lambda t: t[1])) closest = dict(sorted_dists[:min(100, len(sorted_dists))]) return self.trilateration.locate(closest, self.locations, start_pos=(loc.x, loc.y))
mit
4,677,599,620,941,124,000
50.967742
104
0.710559
false
3.659091
false
false
false
waynegm/OpendTect-Plugins
bin/python/wmpy/Filtering/ex_spatial_filter_circular.py
2
4264
# Spatial Filter # # Applies a Lowpass, Highpass, Band Reject or Bandpass circular symmetric filter # by convolution # import sys,os import numpy as np import scipy.special as ss # # Import the module with the I/O scaffolding of the External Attribute # sys.path.insert(0, os.path.join(sys.path[0], '..')) import extattrib as xa # # The attribute parameters # xa.params = { 'Inputs': ['Input'], 'StepOut' : {'Value': [9,9], 'Minimum': [9,9], 'Hidden': False, 'Same': True}, 'Par_0' : {'Name': 'Norm. Spatial Frequency', 'Value': 0.5}, 'Select' : {'Name': 'Type', 'Values': ['Low Pass', 'High Pass', 'Band Pass', 'Band Reject'], 'Selection': 0}, 'Help' : 'http://waynegm.github.io/OpendTect-Plugin-Docs/external_attributes/Spatial_Filter_Circular.html' } # # Define the compute function # def doCompute(): # # Compute the filter kernel # nil = xa.SI['nrinl'] nxl = xa.SI['nrcrl'] centre_trace_x = nil//2 centre_trace_y = nxl//2 freq = xa.params['Par_0']['Value'] type = xa.params['Select']['Selection'] kernelFunc = lpKernel if type==0 else hpKernel if type==1 else bpKernel if type==2 else brKernel kernel = np.zeros((nil,nil,1)) N = nil//2 if (N%2 == 0): N=N-1 kernel[1:2*N+2,1:2*N+2,1] = kernelFunc(N, freq) else: kernel = kernelFunc(N, freq) # # This is the trace processing loop # while True: xa.doInput() # # Get the input # indata = xa.Input['Input'] # # Apply the kernel # outdata = np.sum(kernel * indata, axis=(0,1)) #------------------------------------------------------------------------------------ # xa.Output = outdata xa.doOutput() def lpKernel(N, freq): # # Lowpass filter kernel generator # # N is the filter half-size, must be odd # freq is the normalised cutoff frequency # # Returns the filter kernel of size (2N+1,2N+1) # num = 2*N + 1 result = np.zeros((num,num,1)) for m in range(N+1): i = m+N im = -m+N for n in range(N+1): j = n+N jm = -n+N if (m==0 and n==0): result[i,j] = np.pi*freq**2/4 else: pandq = m%2 + n%2 mandn = np.sqrt(m*m+n*n) val = (ss.factorial2(N)**4 * np.pi**pandq * freq * ss.jn(1,np.pi * freq * mandn)) / ( 2**(pandq+1) * ss.factorial2(N+m) * ss.factorial2(N-m) * ss.factorial2(N+n) *ss.factorial2(N-n) *mandn ) result[i,j] = val result[j,i] = val result[im,j] = val result[i,jm] = val result[im,jm] = val return result/np.sum(result) def hpKernel(N, freq): # # Highpass circular symmetric filter kernel generator # # N is the filter half-size, must be odd # freq is the normalised cutoff frequency # # Returns the filter kernel of size (2N+1,2N+1) # result = lpKernel(N, freq) for m in range(-N,N+1): i = m+N for n in range(-N,N+1): j = n+N if (m==0 and n==0): result[i,j] = 1-result[i,j] else: result[i,j] = -result[i,j] return result def brKernel(N, freq, halfwidth=0.1): # # Band Reject circular symmetric filter kernel generator # # N is the filter half-size, must be odd # freq is the normalised centre frequency of the reject band # halfwidth controls the aperture of the reject band to freq +/- halfwidth # # Returns the filter kernel of size (2N+1,2N+1) # kernel_lp = lpKernel(N, freq-halfwidth) kernel_hp = hpKernel(N, freq+halfwidth) result = kernel_lp + kernel_hp return result def bpKernel(N, freq , halfwidth=0.1): # # Bandpass circular symmetric filter kernel generator # # N is the filter half-size, must be odd # freq is the normalised centre frequency of the pass band # halfwidth controls the aperture of the pass band to freq +/- halfwidth # # Returns the filter kernel of size (2N+1,2N+1) # result = brKernel(N, freq, halfwidth) for m in range(-N,N+1): i = m+N for n in range(-N,N+1): j = n+N if (m==0 and n==0): result[i,j] = 1-result[i,j] else: result[i,j] = -result[i,j] return result # # Assign the compute function to the attribute # xa.doCompute = doCompute # # Do it # xa.run(sys.argv[1:])
gpl-3.0
6,843,834,675,507,858,000
25.81761
206
0.583021
false
2.920548
false
false
false
sanguinariojoe/FreeCAD
src/Mod/Arch/ArchEquipment.py
11
14419
# -*- coding: utf8 -*- #*************************************************************************** #* Copyright (c) 2014 Yorik van Havre <[email protected]> * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* This program is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Library General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with this program; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #*************************************************************************** __title__ = "FreeCAD Equipment" __author__ = "Yorik van Havre" __url__ = "https://www.freecadweb.org" import FreeCAD,ArchComponent,DraftVecUtils if FreeCAD.GuiUp: import FreeCADGui from PySide import QtGui from DraftTools import translate from PySide.QtCore import QT_TRANSLATE_NOOP else: # \cond def translate(ctxt,txt): return txt def QT_TRANSLATE_NOOP(ctxt,txt): return txt # \endcond ## @package ArchEquipment # \ingroup ARCH # \brief The Equipment object and tools # # This module provides tools to build equipment objects. # Equipment is used to represent furniture and all kinds of electrical # or hydraulic appliances in a building def makeEquipment(baseobj=None,placement=None,name="Equipment"): "makeEquipment([baseobj,placement,name]): creates an equipment object from the given base object." if not FreeCAD.ActiveDocument: FreeCAD.Console.PrintError("No active document. Aborting\n") return obj = FreeCAD.ActiveDocument.addObject("Part::FeaturePython","Equipment") _Equipment(obj) if baseobj: if baseobj.isDerivedFrom("Mesh::Feature"): obj.Mesh = baseobj else: obj.Base = baseobj obj.Label = translate("Arch",name) if placement: obj.Placement = placement if FreeCAD.GuiUp: _ViewProviderEquipment(obj.ViewObject) if baseobj: baseobj.ViewObject.hide() return obj def createMeshView(obj,direction=FreeCAD.Vector(0,0,-1),outeronly=False,largestonly=False): """createMeshView(obj,[direction,outeronly,largestonly]): creates a flat shape that is the projection of the given mesh object in the given direction (default = on the XY plane). If outeronly is True, only the outer contour is taken into consideration, discarding the inner holes. If largestonly is True, only the largest segment of the given mesh will be used.""" import Mesh, math, Part, DraftGeomUtils if not obj.isDerivedFrom("Mesh::Feature"): return mesh = obj.Mesh # 1. Flattening the mesh proj = [] for f in mesh.Facets: nf = [] for v in f.Points: v = FreeCAD.Vector(v) a = v.negative().getAngle(direction) l = math.cos(a)*v.Length p = v.add(FreeCAD.Vector(direction).multiply(l)) p = DraftVecUtils.rounded(p) nf.append(p) proj.append(nf) flatmesh = Mesh.Mesh(proj) # 2. Removing wrong faces facets = [] for f in flatmesh.Facets: if f.Normal.getAngle(direction) < math.pi: facets.append(f) cleanmesh = Mesh.Mesh(facets) #Mesh.show(cleanmesh) # 3. Getting the bigger mesh from the planar segments if largestonly: c = cleanmesh.getSeparateComponents() #print(c) cleanmesh = c[0] segs = cleanmesh.getPlanarSegments(1) meshes = [] for s in segs: f = [cleanmesh.Facets[i] for i in s] meshes.append(Mesh.Mesh(f)) a = 0 for m in meshes: if m.Area > a: boundarymesh = m a = m.Area #Mesh.show(boundarymesh) cleanmesh = boundarymesh # 4. Creating a Part and getting the contour shape = None for f in cleanmesh.Facets: p = Part.makePolygon(f.Points+[f.Points[0]]) #print(p,len(p.Vertexes),p.isClosed()) try: p = Part.Face(p) if shape: shape = shape.fuse(p) else: shape = p except Part.OCCError: pass shape = shape.removeSplitter() # 5. Extracting the largest wire if outeronly: count = 0 largest = None for w in shape.Wires: if len(w.Vertexes) > count: count = len(w.Vertexes) largest = w if largest: try: f = Part.Face(w) except Part.OCCError: print("Unable to produce a face from the outer wire.") else: shape = f return shape class _CommandEquipment: "the Arch Equipment command definition" def GetResources(self): return {'Pixmap' : 'Arch_Equipment', 'MenuText': QT_TRANSLATE_NOOP("Arch_Equipment","Equipment"), 'Accel': "E, Q", 'ToolTip': QT_TRANSLATE_NOOP("Arch_Equipment","Creates an equipment object from a selected object (Part or Mesh)")} def IsActive(self): return not FreeCAD.ActiveDocument is None def Activated(self): s = FreeCADGui.Selection.getSelection() if not s: FreeCAD.Console.PrintError(translate("Arch","You must select a base shape object and optionally a mesh object")) else: base = "" mesh = "" if len(s) == 2: if hasattr(s[0],'Shape'): base = s[0].Name elif s[0].isDerivedFrom("Mesh::Feature"): mesh = s[0].Name if hasattr(s[1],'Shape'): if mesh: base = s[1].Name elif s[1].isDerivedFrom("Mesh::Feature"): if base: mesh = s[1].Name else: if hasattr(s[0],'Shape'): base = s[0].Name elif s[0].isDerivedFrom("Mesh::Feature"): mesh = s[0].Name FreeCAD.ActiveDocument.openTransaction(str(translate("Arch","Create Equipment"))) FreeCADGui.addModule("Arch") if base: base = "FreeCAD.ActiveDocument." + base FreeCADGui.doCommand("obj = Arch.makeEquipment(" + base + ")") if mesh: FreeCADGui.doCommand("obj.Mesh = FreeCAD.ActiveDocument." + mesh) FreeCADGui.addModule("Draft") FreeCADGui.doCommand("Draft.autogroup(obj)") FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() # get diffuse color info from base object if base and hasattr(s[0].ViewObject,"DiffuseColor"): FreeCADGui.doCommand("FreeCAD.ActiveDocument.Objects[-1].ViewObject.DiffuseColor = " + base + ".ViewObject.DiffuseColor") return class _Command3Views: "the Arch 3Views command definition" def GetResources(self): return {'Pixmap' : 'Arch_3Views', 'MenuText': QT_TRANSLATE_NOOP("Arch_3Views","3 views from mesh"), 'ToolTip': QT_TRANSLATE_NOOP("Arch_3Views","Creates 3 views (top, front, side) from a mesh-based object")} def IsActive(self): return not FreeCAD.ActiveDocument is None def Activated(self): s = FreeCADGui.Selection.getSelection() if len(s) != 1: FreeCAD.Console.PrintError(translate("Arch","You must select exactly one base object")) else: obj = s[0] if not obj.isDerivedFrom("Mesh::Feature"): FreeCAD.Console.PrintError(translate("Arch","The selected object must be a mesh")) else: if obj.Mesh.CountFacets > 1000: msgBox = QtGui.QMessageBox() msgBox.setText(translate("Arch","This mesh has more than 1000 facets.")) msgBox.setInformativeText(translate("Arch","This operation can take a long time. Proceed?")) msgBox.setStandardButtons(QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel) msgBox.setDefaultButton(QtGui.QMessageBox.Cancel) ret = msgBox.exec_() if ret == QtGui.QMessageBox.Cancel: return elif obj.Mesh.CountFacets >= 500: FreeCAD.Console.PrintWarning(translate("Arch","The mesh has more than 500 facets. This will take a couple of minutes...")) FreeCAD.ActiveDocument.openTransaction(str(translate("Arch","Create 3 views"))) FreeCADGui.addModule("Arch") FreeCADGui.addModule("Part") FreeCADGui.doCommand("s1 = Arch.createMeshView(FreeCAD.ActiveDocument." + obj.Name + ",FreeCAD.Vector(0,0,-1),outeronly=False,largestonly=False)") FreeCADGui.doCommand("Part.show(s1)") FreeCADGui.doCommand("s2 = Arch.createMeshView(FreeCAD.ActiveDocument." + obj.Name + ",FreeCAD.Vector(1,0,0),outeronly=False,largestonly=False)") FreeCADGui.doCommand("Part.show(s2)") FreeCADGui.doCommand("s3 = Arch.createMeshView(FreeCAD.ActiveDocument." + obj.Name + ",FreeCAD.Vector(0,1,0),outeronly=False,largestonly=False)") FreeCADGui.doCommand("Part.show(s3)") FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() return class _Equipment(ArchComponent.Component): "The Equipment object" def __init__(self,obj): ArchComponent.Component.__init__(self,obj) obj.Proxy = self self.setProperties(obj) from ArchIFC import IfcTypes if "Furniture" in IfcTypes: # IfcFurniture is new in IFC4 obj.IfcType = "Furniture" elif "Furnishing Element" in IfcTypes: # IFC2x3 does know a IfcFurnishingElement obj.IfcType = "Furnishing Element" else: obj.IfcType = "Undefined" def setProperties(self,obj): pl = obj.PropertiesList if not "Model" in pl: obj.addProperty("App::PropertyString","Model","Equipment",QT_TRANSLATE_NOOP("App::Property","The model description of this equipment")) if not "ProductURL" in pl: obj.addProperty("App::PropertyString","ProductURL","Equipment",QT_TRANSLATE_NOOP("App::Property","The URL of the product page of this equipment")) if not "StandardCode" in pl: obj.addProperty("App::PropertyString","StandardCode","Equipment",QT_TRANSLATE_NOOP("App::Property","A standard code (MasterFormat, OmniClass,...)")) if not "SnapPoints" in pl: obj.addProperty("App::PropertyVectorList","SnapPoints","Equipment",QT_TRANSLATE_NOOP("App::Property","Additional snap points for this equipment")) if not "EquipmentPower" in pl: obj.addProperty("App::PropertyFloat","EquipmentPower","Equipment",QT_TRANSLATE_NOOP("App::Property","The electric power needed by this equipment in Watts")) obj.setEditorMode("VerticalArea",2) obj.setEditorMode("HorizontalArea",2) obj.setEditorMode("PerimeterLength",2) self.Type = "Equipment" def onDocumentRestored(self,obj): ArchComponent.Component.onDocumentRestored(self,obj) self.setProperties(obj) def onChanged(self,obj,prop): self.hideSubobjects(obj,prop) ArchComponent.Component.onChanged(self,obj,prop) def execute(self,obj): if self.clone(obj): return pl = obj.Placement if obj.Base: base = None if hasattr(obj.Base,'Shape'): base = obj.Base.Shape.copy() base = self.processSubShapes(obj,base,pl) self.applyShape(obj,base,pl,allowinvalid=False,allownosolid=True) def computeAreas(self,obj): return class _ViewProviderEquipment(ArchComponent.ViewProviderComponent): "A View Provider for the Equipment object" def __init__(self,vobj): ArchComponent.ViewProviderComponent.__init__(self,vobj) def getIcon(self): import Arch_rc if hasattr(self,"Object"): if hasattr(self.Object,"CloneOf"): if self.Object.CloneOf: return ":/icons/Arch_Equipment_Clone.svg" return ":/icons/Arch_Equipment_Tree.svg" def attach(self, vobj): self.Object = vobj.Object from pivy import coin sep = coin.SoSeparator() self.coords = coin.SoCoordinate3() sep.addChild(self.coords) self.coords.point.deleteValues(0) symbol = coin.SoMarkerSet() symbol.markerIndex = FreeCADGui.getMarkerIndex("", 5) sep.addChild(symbol) rn = vobj.RootNode rn.addChild(sep) ArchComponent.ViewProviderComponent.attach(self,vobj) def updateData(self, obj, prop): if prop == "SnapPoints": if obj.SnapPoints: self.coords.point.setNum(len(obj.SnapPoints)) self.coords.point.setValues([[p.x,p.y,p.z] for p in obj.SnapPoints]) else: self.coords.point.deleteValues(0) if FreeCAD.GuiUp: FreeCADGui.addCommand('Arch_Equipment',_CommandEquipment()) FreeCADGui.addCommand('Arch_3Views', _Command3Views())
lgpl-2.1
-4,741,429,345,536,449,000
37.553476
168
0.578126
false
4.065125
false
false
false
google-research/google-research
ravens/ravens/tasks/task.py
1
13097
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #!/usr/bin/env python """Base Task class.""" import collections import os import random import string import cv2 import numpy as np import pybullet as p from ravens import cameras from ravens import primitives from ravens import utils from ravens.grippers import Suction class Task(): """Base Task class.""" def __init__(self): self.ee = Suction self.mode = 'train' self.sixdof = False self.primitive = primitives.PickPlace() self.oracle_cams = cameras.Oracle.CONFIG # Evaluation epsilons (for pose evaluation metric). self.pos_eps = 0.01 self.rot_eps = np.deg2rad(15) # Workspace bounds. self.pix_size = 0.003125 self.bounds = np.array([[0.25, 0.75], [-0.5, 0.5], [0, 0.3]]) def reset(self, env): # pylint: disable=unused-argument self.goals = [] self.progress = 0 # Task progression metric in range [0, 1]. self._rewards = 0 # Cumulative returned rewards. #------------------------------------------------------------------------- # Oracle Agent #------------------------------------------------------------------------- def oracle(self, env): """Oracle agent.""" OracleAgent = collections.namedtuple('OracleAgent', ['act']) def act(obs, info): # pylint: disable=unused-argument """Calculate action.""" # Oracle uses perfect RGB-D orthographic images and segmentation masks. _, hmap, obj_mask = self.get_true_image(env) # Unpack next goal step. objs, matches, targs, replace, rotations, _, _, _ = self.goals[0] # Match objects to targets without replacement. if not replace: # Modify a copy of the match matrix. matches = matches.copy() # Ignore already matched objects. for i in range(len(objs)): object_id, (symmetry, _) = objs[i] pose = p.getBasePositionAndOrientation(object_id) targets_i = np.argwhere(matches[i, :]).reshape(-1) for j in targets_i: if self.is_match(pose, targs[j], symmetry): matches[i, :] = 0 matches[:, j] = 0 # Get objects to be picked (prioritize farthest from nearest neighbor). nn_dists = [] nn_targets = [] for i in range(len(objs)): object_id, (symmetry, _) = objs[i] xyz, _ = p.getBasePositionAndOrientation(object_id) targets_i = np.argwhere(matches[i, :]).reshape(-1) if len(targets_i) > 0: # pylint: disable=g-explicit-length-test targets_xyz = np.float32([targs[j][0] for j in targets_i]) dists = np.linalg.norm( targets_xyz - np.float32(xyz).reshape(1, 3), axis=1) nn = np.argmin(dists) nn_dists.append(dists[nn]) nn_targets.append(targets_i[nn]) # Handle ignored objects. else: nn_dists.append(0) nn_targets.append(-1) order = np.argsort(nn_dists)[::-1] # Filter out matched objects. order = [i for i in order if nn_dists[i] > 0] pick_mask = None for pick_i in order: pick_mask = np.uint8(obj_mask == objs[pick_i][0]) # Erode to avoid picking on edges. # pick_mask = cv2.erode(pick_mask, np.ones((3, 3), np.uint8)) if np.sum(pick_mask) > 0: break # Trigger task reset if no object is visible. if pick_mask is None or np.sum(pick_mask) == 0: self.goals = [] print('Object for pick is not visible. Skipping demonstration.') return # Get picking pose. pick_prob = np.float32(pick_mask) pick_pix = utils.sample_distribution(pick_prob) # For "deterministic" demonstrations on insertion-easy, use this: # pick_pix = (160,80) pick_pos = utils.pix_to_xyz(pick_pix, hmap, self.bounds, self.pix_size) pick_pose = (pick_pos, (0, 0, 0, 1)) # Get placing pose. targ_pose = targs[nn_targets[pick_i]] # pylint: disable=undefined-loop-variable obj_pose = p.getBasePositionAndOrientation(objs[pick_i][0]) # pylint: disable=undefined-loop-variable if not self.sixdof: obj_euler = utils.quatXYZW_to_eulerXYZ(obj_pose[1]) obj_quat = utils.eulerXYZ_to_quatXYZW((0, 0, obj_euler[2])) obj_pose = (obj_pose[0], obj_quat) world_to_pick = utils.invert(pick_pose) obj_to_pick = utils.multiply(world_to_pick, obj_pose) pick_to_obj = utils.invert(obj_to_pick) place_pose = utils.multiply(targ_pose, pick_to_obj) # Rotate end effector? if not rotations: place_pose = (place_pose[0], (0, 0, 0, 1)) return {'pose0': pick_pose, 'pose1': place_pose} return OracleAgent(act) #------------------------------------------------------------------------- # Reward Function and Task Completion Metrics #------------------------------------------------------------------------- def reward(self): """Get delta rewards for current timestep. Returns: A tuple consisting of the scalar (delta) reward, plus `extras` dict which has extra task-dependent info from the process of computing rewards that gives us finer-grained details. Use `extras` for further data analysis. """ reward, info = 0, {} # Unpack next goal step. objs, matches, targs, _, _, metric, params, max_reward = self.goals[0] # Evaluate by matching object poses. if metric == 'pose': step_reward = 0 for i in range(len(objs)): object_id, (symmetry, _) = objs[i] pose = p.getBasePositionAndOrientation(object_id) targets_i = np.argwhere(matches[i, :]).reshape(-1) for j in targets_i: target_pose = targs[j] if self.is_match(pose, target_pose, symmetry): step_reward += max_reward / len(objs) break # Evaluate by measuring object intersection with zone. elif metric == 'zone': zone_pts, total_pts = 0, 0 obj_pts, zones = params for zone_pose, zone_size in zones: # Count valid points in zone. for obj_id in obj_pts: pts = obj_pts[obj_id] obj_pose = p.getBasePositionAndOrientation(obj_id) world_to_zone = utils.invert(zone_pose) obj_to_zone = utils.multiply(world_to_zone, obj_pose) pts = np.float32(utils.apply(obj_to_zone, pts)) if len(zone_size) > 1: valid_pts = np.logical_and.reduce([ pts[0, :] > -zone_size[0] / 2, pts[0, :] < zone_size[0] / 2, pts[1, :] > -zone_size[1] / 2, pts[1, :] < zone_size[1] / 2, pts[2, :] < self.bounds[2, 1]]) zone_pts += np.sum(np.float32(valid_pts)) total_pts += pts.shape[1] step_reward = max_reward * (zone_pts / total_pts) # Get cumulative rewards and return delta. reward = self.progress + step_reward - self._rewards self._rewards = self.progress + step_reward # Move to next goal step if current goal step is complete. if np.abs(max_reward - step_reward) < 0.01: self.progress += max_reward # Update task progress. self.goals.pop(0) return reward, info def done(self): """Check if the task is done or has failed. Returns: True if the episode should be considered a success, which we use for measuring successes, which is particularly helpful for tasks where one may get successes on the very last time step, e.g., getting the cloth coverage threshold on the last alllowed action. However, for bag-items-easy and bag-items-hard (which use the 'bag-items' metric), it may be necessary to filter out demos that did not attain sufficiently high reward in external code. Currently, this is done in `main.py` and its ignore_this_demo() method. """ # # For tasks with self.metric == 'pose'. # if hasattr(self, 'goal'): # goal_done = len(self.goal['steps']) == 0 # pylint: # disable=g-explicit-length-test return (len(self.goals) == 0) or (self._rewards > 0.99) # pylint: disable=g-explicit-length-test # return zone_done or defs_done or goal_done #------------------------------------------------------------------------- # Environment Helper Functions #------------------------------------------------------------------------- def is_match(self, pose0, pose1, symmetry): """Check if pose0 and pose1 match within a threshold.""" # Get translational error. diff_pos = np.float32(pose0[0][:2]) - np.float32(pose1[0][:2]) dist_pos = np.linalg.norm(diff_pos) # Get rotational error around z-axis (account for symmetries). diff_rot = 0 if symmetry > 0: rot0 = np.array(utils.quatXYZW_to_eulerXYZ(pose0[1]))[2] rot1 = np.array(utils.quatXYZW_to_eulerXYZ(pose1[1]))[2] diff_rot = np.abs(rot0 - rot1) % symmetry if diff_rot > (symmetry / 2): diff_rot = symmetry - diff_rot return (dist_pos < self.pos_eps) and (diff_rot < self.rot_eps) def get_true_image(self, env): """Get RGB-D orthographic heightmaps and segmentation masks.""" # Capture near-orthographic RGB-D images and segmentation masks. color, depth, segm = env.render(self.oracle_cams[0]) # Combine color with masks for faster processing. color = np.concatenate((color, segm[Ellipsis, None]), axis=2) # Reconstruct real orthographic projection from point clouds. hmaps, cmaps = utils.reconstruct_heightmaps( [color], [depth], self.oracle_cams, self.bounds, self.pix_size) # Split color back into color and masks. cmap = np.uint8(cmaps)[0, Ellipsis, :3] hmap = np.float32(hmaps)[0, Ellipsis] mask = np.int32(cmaps)[0, Ellipsis, 3:].squeeze() return cmap, hmap, mask def get_random_pose(self, env, obj_size): """Get random collision-free object pose within workspace bounds.""" # Get erosion size of object in pixels. max_size = np.sqrt(obj_size[0]**2 + obj_size[1]**2) erode_size = int(np.round(max_size / self.pix_size)) _, hmap, obj_mask = self.get_true_image(env) # Randomly sample an object pose within free-space pixels. free = np.ones(obj_mask.shape, dtype=np.uint8) for obj_ids in env.obj_ids.values(): for obj_id in obj_ids: free[obj_mask == obj_id] = 0 free[0, :], free[:, 0], free[-1, :], free[:, -1] = 0, 0, 0, 0 free = cv2.erode(free, np.ones((erode_size, erode_size), np.uint8)) if np.sum(free) == 0: return pix = utils.sample_distribution(np.float32(free)) pos = utils.pix_to_xyz(pix, hmap, self.bounds, self.pix_size) pos = (pos[0], pos[1], obj_size[2] / 2) theta = np.random.rand() * 2 * np.pi rot = utils.eulerXYZ_to_quatXYZW((0, 0, theta)) return pos, rot #------------------------------------------------------------------------- # Helper Functions #------------------------------------------------------------------------- def fill_template(self, template, replace): """Read a file and replace key strings.""" filepath = os.path.dirname(os.path.abspath(__file__)) template = os.path.join(filepath, '..', template) with open(template, 'r') as file: fdata = file.read() for field in replace: for i in range(len(replace[field])): fdata = fdata.replace(f'{field}{i}', str(replace[field][i])) alphabet = string.ascii_lowercase + string.digits rname = ''.join(random.choices(alphabet, k=16)) fname = f'{template}.{rname}' with open(fname, 'w') as file: file.write(fdata) return fname def get_random_size(self, min_x, max_x, min_y, max_y, min_z, max_z): """Get random box size.""" size = np.random.rand(3) size[0] = size[0] * (max_x - min_x) + min_x size[1] = size[1] * (max_y - min_y) + min_y size[2] = size[2] * (max_z - min_z) + min_z return tuple(size) def get_object_points(self, obj): obj_shape = p.getVisualShapeData(obj) obj_dim = obj_shape[0][3] xv, yv, zv = np.meshgrid( np.arange(-obj_dim[0] / 2, obj_dim[0] / 2, 0.02), np.arange(-obj_dim[1] / 2, obj_dim[1] / 2, 0.02), np.arange(-obj_dim[2] / 2, obj_dim[2] / 2, 0.02), sparse=False, indexing='xy') return np.vstack((xv.reshape(1, -1), yv.reshape(1, -1), zv.reshape(1, -1))) def color_random_brown(self, obj): shade = np.random.rand() + 0.5 color = np.float32([shade * 156, shade * 117, shade * 95, 255]) / 255 p.changeVisualShape(obj, -1, rgbaColor=color)
apache-2.0
3,601,930,096,458,876,400
36.31339
108
0.588913
false
3.43033
false
false
false
TheAlgorithms/Python
maths/euclidean_gcd.py
1
1272
""" https://en.wikipedia.org/wiki/Euclidean_algorithm """ def euclidean_gcd(a: int, b: int) -> int: """ Examples: >>> euclidean_gcd(3, 5) 1 >>> euclidean_gcd(6, 3) 3 """ while b: a, b = b, a % b return a def euclidean_gcd_recursive(a: int, b: int) -> int: """ Recursive method for euclicedan gcd algorithm Examples: >>> euclidean_gcd_recursive(3, 5) 1 >>> euclidean_gcd_recursive(6, 3) 3 """ return a if b == 0 else euclidean_gcd_recursive(b, a % b) def main(): print(f"euclidean_gcd(3, 5) = {euclidean_gcd(3, 5)}") print(f"euclidean_gcd(5, 3) = {euclidean_gcd(5, 3)}") print(f"euclidean_gcd(1, 3) = {euclidean_gcd(1, 3)}") print(f"euclidean_gcd(3, 6) = {euclidean_gcd(3, 6)}") print(f"euclidean_gcd(6, 3) = {euclidean_gcd(6, 3)}") print(f"euclidean_gcd_recursive(3, 5) = {euclidean_gcd_recursive(3, 5)}") print(f"euclidean_gcd_recursive(5, 3) = {euclidean_gcd_recursive(5, 3)}") print(f"euclidean_gcd_recursive(1, 3) = {euclidean_gcd_recursive(1, 3)}") print(f"euclidean_gcd_recursive(3, 6) = {euclidean_gcd_recursive(3, 6)}") print(f"euclidean_gcd_recursive(6, 3) = {euclidean_gcd_recursive(6, 3)}") if __name__ == "__main__": main()
mit
-7,999,576,104,044,941,000
26.06383
77
0.584906
false
2.795604
false
false
false
XefPatterson/INF8225_Project
Model/model_flex.py
1
12332
import numpy as np import tensorflow as tf from queues import create_queues_for_bucket from termcolor import cprint FLAGS = None class Seq2Seq(object): def __init__(self, buckets, forward_only=False): """ Seq2Seq model :param buckets: List of pairs Each pair correspond to (max_size_in_bucket_for_encoder_sentence, max_size_in_bucket_for_decoder_sentence) :param forward_only: Boolean (False) Whether to update the model, or only predict. Now it only supports False, but it should not be a big deal """ self.max_gradient_norm = FLAGS.max_gradient_norm self.learning_rate = tf.Variable(float(FLAGS.learning_rate), trainable=False) self.global_step = tf.Variable(0, trainable=False) self.learning_rate_decay_op = tf.train.exponential_decay(FLAGS.learning_rate, self.global_step, FLAGS.decay_learning_rate_step, FLAGS.learning_rate_decay_factor, staircase=True) self.buckets = buckets self.encoder_inputs = [] self.decoder_inputs = [] self.targets = [] self.target_weights = [] for i in range(self.buckets[-1][0]): self.encoder_inputs.append(tf.placeholder(tf.int32, shape=[None], name="encoder{0}".format(i))) for i in range(self.buckets[-1][1]): self.targets.append(tf.placeholder(tf.int32, shape=[None], name="decoder{0}".format(i))) # decoder inputs : 'GO' + [ y1, y2, ... y_t-1 ] self.decoder_inputs = [tf.zeros_like(self.targets[0], dtype=tf.int64, name='GO')] + self.targets[:-1] #Binary mask useful for padded sequences. self.target_weights = [tf.ones_like(label, dtype=tf.float32) for label in self.targets] self.gradient_norms = [] self.updates = [] self.forward_only = forward_only # Which bucket to extract examples self.bucket_id = tf.placeholder_with_default(0, [], name="bucket_id") def _build_queues(self): """ Build the queues :return: """ self.queues, self.op_starting_queue = create_queues_for_bucket(FLAGS.batch_size, "train", self.buckets) q = tf.QueueBase.from_list(self.bucket_id, self.queues) inputs = tf.squeeze(q.dequeue()) self._questions = inputs[0] self._answers = inputs[1] def build(self): """ Build the model :return: """ cprint("[*] Building model (G)", color="yellow") single_cell = tf.contrib.rnn.GRUCell(FLAGS.hidden_size) cell = tf.contrib.rnn.DropoutWrapper(single_cell, output_keep_prob=FLAGS.keep_prob) if FLAGS.num_layers > 1: cell = tf.contrib.rnn.MultiRNNCell([single_cell] * FLAGS.num_layers) def seq2seq_f(encoder_inputs, decoder_inputs, do_decode): return tf.contrib.legacy_seq2seq.embedding_attention_seq2seq( encoder_inputs, decoder_inputs, cell, num_encoder_symbols=FLAGS.vocab_size, num_decoder_symbols=FLAGS.vocab_size, embedding_size=128, output_projection=None, feed_previous=do_decode) with tf.variable_scope("seq2seq") as scope: self.train_outputs, self.train_losses = tf.contrib.legacy_seq2seq.model_with_buckets( self.encoder_inputs, self.decoder_inputs, self.targets, self.target_weights, self.buckets, lambda x, y: seq2seq_f(x, y, False)) scope.reuse_variables() self.test_outputs, self.test_losses = tf.contrib.legacy_seq2seq.model_with_buckets( self.encoder_inputs, self.decoder_inputs, self.targets, self.target_weights, self.buckets, lambda x, y: seq2seq_f(x, y, True)) cprint("[*] Building model (D)", color="yellow") # Question encoder for D. # Should be similar to the one used for G. single_cell = tf.contrib.rnn.GRUCell(FLAGS.hidden_size) disc_q_cell = tf.contrib.rnn.DropoutWrapper(single_cell, output_keep_prob=FLAGS.keep_prob) if FLAGS.num_layers > 1: disc_q_cell = tf.contrib.rnn.MultiRNNCell([single_cell] * FLAGS.num_layers) disc_q_outputs, disc_q_states = tf.nn.dynamic_rnn( cell=disc_q_cell, inputs=self.encoder_inputs, # TODO, we need a 2D tensor instead of list, should be this with Louis'code. sequence_length=question_length, #TODO use length dtype=tf.float32, swap_memory=True) # TODO: Combine Real and Fake answers into a single minibatch. Use DECODER max length. # TODO: Should we use vs.get_variable? # Another "classic" dynamic RNN, therefore, transfer LIST inputs into 2D tensor inputs. real_fake_answers = tf.placeholder(tf.int32, shape=[self.batch_size*2, self.max_decoder_sequence_length], name="real_and_fake_answers") for t in range(self.max_decoder_sequence_length): real_fake_answers[:self.batch_size, t] = self.decoder_inputs[t] real_fake_answers[self.batch_size:, t] = self.decoder_outputs[0][t] #TODO use eventual G_decoder outputs # TODO: Produce target weights (binary mask) for this too! # TODO: get char/word embeddings of real+fake inputs - use Louis' function : # We assume d_embeddings.shape = (batch, T, embedding_size) - similar to dynamic RNN outputs. d_inputs, d_embeddings = embedded_sequence(real_fake_answers, num_encoder_symbols, embedding_size_encoder) # TODO: Get length of minibatch for dynamic RNN - use Louis's function: answer_length = length_sequence(d_inputs) # TODO: take last state or output question encoder ... # TODO: Do we need a call to tf.expand_dims to keep the time dimension (to further allow broadcast durin concat)? self.question_representation = disc_q_outputs[0][:, question_length-1, :] #shape = (batch, 1, cell_size) # TODO: ... and double the values for the double batch (real - fake) ... self.question_representation = tf.concat(0, [question_representation, question_representation], name="question_representation" ) # TODO: ... and concat to each embedding_t self.question_representation = tf.concat(2, [question_representation, d_embeddings], name="question_representation" ) # TODO: Feed this to a "basic" stacked gru/lstm single_cell = tf.contrib.rnn.GRUCell(FLAGS.hidden_size) disc_a_cell = tf.contrib.rnn.DropoutWrapper(single_cell, output_keep_prob=FLAGS.keep_prob) last_cell = tf.contrib.rnn.GRUCell(1) if FLAGS.num_layers > 1: disc_a_cell = tf.contrib.rnn.MultiRNNCell([single_cell] * FLAGS.num_layers + [last_cell]) else: disc_a_cell = tf.contrib.rnn.MultiRNNCell([single_cell, last_cell]) disc_a_outputs, disc_a_states = tf.nn.dynamic_rnn( cell=disc_a_cell, inputs=self.question_representation, sequence_length=answer_length, dtype=tf.float32, swap_memory=True) # TODO: use sigmoid to classify each timestep as real/fake. d_probs = tf.sigmoid(disc_a_outputs) # TODO: define optimization for D and for G d_loss = - tf.reduce_mean(tf.log(d_probs)) g_rewards = 1.0 - d_probs[self.batch_size:] g_loss = tf.reduce_mean(tf.log(d_probs[self.batch_size:])) # Optimization : params = tf.trainable_variables() if not self.forward_only: opt = tf.train.AdamOptimizer(self.learning_rate) for b in range(len(self.buckets)): cprint("Constructing the forward pass for bucket {}".format(b)) gradients = tf.gradients(self.train_losses[b], params, aggregation_method=2) clipped_gradients, norm = tf.clip_by_global_norm(gradients, self.max_gradient_norm) self.gradient_norms.append(norm) self.updates.append(opt.apply_gradients( zip(clipped_gradients, params), global_step=self.global_step)) self._summary() cprint("[!] Model built", color="green") def _summary(self, scope_name="summary"): """ Create an operation to retrieve all summaries added to the graph :return: """ with tf.variable_scope(scope_name) as _: self.merged_summary = [] for bucket_id in range(len(self.buckets)): self.merged_summary.append( tf.summary.scalar(name="training_loss_bucket_{}".format(bucket_id), tensor=self.train_losses[bucket_id])) tf.summary.scalar(name="bucket_id", tensor=self.bucket_id) def forward_with_feed_dict(self, bucket_id, session, questions, answers): encoder_size, decoder_size = self.buckets[bucket_id] input_feed = {self.bucket_id: bucket_id} # Instead of an array of dim (batch_size, bucket_length), # the model is passed a list of sized batch_size, containing vector of size bucket_length for l in range(encoder_size): input_feed[self.encoder_inputs[l].name] = questions[:, l] # Same for decoder_input for l in range(decoder_size): input_feed[self.targets[l].name] = answers[:, l] input_feed[self.target_weights[l].name] = np.not_equal(answers[:, l], 0).astype(np.float32) #input_feed[self.decoder_inputs[decoder_size].name] = np.zeros_like(answers[:, 0], dtype=np.int64) output_feed = [self.merged_summary[bucket_id], # Summary operation self.global_step, # Current global step self.updates[bucket_id], # Nothing self.gradient_norms[bucket_id], # A scalar the gradient norm self.train_losses[bucket_id]] # Training loss, a scalar for l in range(decoder_size): # Will return a numpy array [batch_size x size_vocab x 1]. Value are not restricted to [-1, 1] output_feed.append(self.train_outputs[bucket_id][l]) # outputs is a list of size (3 + decoder_size) outputs = session.run(output_feed, input_feed) return outputs def predict(self, bucket_id, session, questions, answers): """ Forward pass and backward :param bucket_id: :param session: :return: """ # Retrieve size of sentence for this bucket encoder_size, decoder_size = self.buckets[bucket_id] input_feed = {self.bucket_id: bucket_id} # questions, answers = session.run([self._questions, self._answers], input_feed) # Instead of an array of dim (batch_size, bucket_length), # the model is passed a list of sized batch_size, containing vector of size bucket_length for l in range(encoder_size): input_feed[self.encoder_inputs[l].name] = questions[:, l] # Same for decoder_input for l in range(decoder_size): input_feed[self.targets[l].name] = answers[:, l] input_feed[self.target_weights[l].name] = np.not_equal(answers[:, l], 0).astype(np.float32) output_feed = [] for l in range(decoder_size): output_feed.append(self.test_outputs[bucket_id][l]) outputs = session.run(output_feed, input_feed) return outputs, questions, answers
mit
-1,201,654,647,298,813,000
45.014925
121
0.577522
false
4.0078
false
false
false
osmanbaskaya/mapping-impact
run/dataset-stats.py
1
1312
# -*- coding: utf-8 -*- __author__ = "Osman Baskaya" import sys from collections import Counter, defaultdict as dd import os from nlp_utils import calc_perp key_file = sys.argv[1] d = dd(list) all_senses = [] total_num_instance = 0 for line in open(key_file): total_num_instance += 1 word, instance_id, sense = line.split() d[word].append(sense) all_senses.append(sense) basename = os.path.splitext(os.path.basename(key_file))[0] print basename with open("instance-num-%s.txt" % basename, 'w') as f: f.write("{}\t{}\t{}\t{}\n".format("Word", "Number of Instance", "Number of Sense", "Perplexity")) sense_dist = dd(int) for word, senses in d.iteritems(): num_sense = len(set(senses)) perplexity = calc_perp(senses) f.write("{}\t{}\t{}\t{}\n".format(word, len(senses), num_sense, perplexity)) sense_dist[num_sense] += 1 with open("sense-dist-%s.txt" % basename, 'w') as f: f.write("Total # of Instance: {}\tTotal # of words: {}\n".format(total_num_instance, len(d))) f.write("Mean # of instance: {}\n".format(float(total_num_instance) / len(d))) f.write("Dataset Perplexity: {}\n".format(calc_perp(all_senses))) for n, c in sorted(sense_dist.items()): f.write("{}\t{}\n".format(n, c))
mit
1,706,787,379,362,280,400
33.526316
97
0.608232
false
2.981818
false
false
false
Arkapravo/morse-0.6
src/morse/actuators/kuka_lwr.py
1
3893
import logging; logger = logging.getLogger("morse." + __name__) import math import morse.actuators.armature_actuator from morse.helpers.morse_math import normalise_angle from morse.core.services import service from morse.core.exceptions import MorseRPCInvokationError class KukaActuatorClass(morse.actuators.armature_actuator.ArmatureActuatorClass): """ Class to control the KUKA LWR arm using Blender armatures. Sub class of ArmatureActuatorClass, that considers the specific case of objects that can be mounted on the arm. This class has many MORSE Services that you can access via sockets/telnet. """ def __init__(self, obj, parent=None): """ Constructor method. Receives the reference to the Blender object. """ logger.info('%s initialization' % obj.name) # Call the constructor of the parent class super(self.__class__,self).__init__(obj, parent) # Get the references to the segment at the tip of the arm for child in self.blender_obj.childrenRecursive: if 'kuka_7' in child.name: self._arm_tip = child # Any other objects children of the Kuka arm are assumed # to be mounted on the tip of the arm for child in self.blender_obj.children: if not 'kuka' in child.name: child.setParent(self._arm_tip) #self._tolerance = math.radians(0.5) logger.info('Component initialized') def default_action(self): """ Apply rotation angles to the segments of the arm """ armature = self.blender_obj logger.debug("The armature is: '%s' (%s)" % (armature, type(armature))) for channel in armature.channels: segment_angle = channel.joint_rotation logger.debug("Channel '%s' st: [%.4f, %.4f, %.4f]" % (channel, segment_angle[0], segment_angle[1], segment_angle[2])) # Get the normalised angle for this segment target_angle = normalise_angle(self.local_data[channel.name]) # Use the corresponding direction for each rotation if self._dofs[channel.name][1] == 1: segment_angle[1] = target_angle elif self._dofs[channel.name][2] == 1: segment_angle[2] = target_angle logger.debug("Channel '%s' fn: [%.4f, %.4f, %.4f]" % (channel, segment_angle[0], segment_angle[1], segment_angle[2])) channel.joint_rotation = segment_angle armature.update() @service def set_rotation(self, channel_name, rotation): """ MORSE Service to set the rotation angle of the given channel_name to the angles list (x,y,z). Overrides the default method of the ArmatureActuatorClass, so that data is stored in the expected format of a list of angles for each joint. """ try: for i in range(3): if self._dofs[channel_name][i] != 0: self.local_data[channel_name] = rotation[i] return None except KeyError: msg = str(channel_name) + " is not a valid channel name " raise MorseRPCInvokationError(msg) @service def set_rotation_array(self, *rotation_array): """ MORSE service to set the rotation for each of the arm joints. It receives an array containing the angle to give to each of the robot articulations. The array contains only one angle for each joint. """ i = 0 for channel in self.blender_obj.channels: try: self.local_data[channel.name] = rotation_array[i] i += 1 # If there are no more arguments, set the rotation values to zero except IndexError: self.local_data[channel.name] = 0.0 return None
bsd-3-clause
-9,090,081,137,052,656,000
38.323232
129
0.610326
false
4.042575
false
false
false
Nate-Devv/Tuxemon
tuxemon/core/components/controller.py
3
6080
#!/usr/bin/python # -*- coding: utf-8 -*- # # Tuxemon # Copyright (C) 2014, William Edwards <[email protected]>, # Benjamin Bean <[email protected]> # # This file is part of Tuxemon. # # Tuxemon is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Tuxemon is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Tuxemon. If not, see <http://www.gnu.org/licenses/>. # # Contributor(s): # # William Edwards <[email protected]> # # # core.components.controller Controller overlay functions for mobile. # # import logging import pygame from . import screen # Create a logger for optional handling of debug messages. logger = logging.getLogger(__name__) logger.debug("core.controller successfully imported") class Controller(object): """Handles the controller overlay functionality for mobile versions of the game. This includes detecting screen touches of on-screen buttons so they can be translated to keystrokes as well as drawing the controller overlay itself. .. image:: images/menu/controller_overlay01.png :param game: The main game object that contains all the game’s variables. :type game: tuxemon.Game """ def __init__(self, game): self.game = game self.dpad = {} def load(self): from core import prepare self.dpad["surface"] = pygame.image.load("resources/gfx/d-pad.png").convert_alpha() self.dpad["surface"] = pygame.transform.scale(self.dpad["surface"], (self.dpad["surface"].get_width() * prepare.SCALE, self.dpad["surface"].get_height() * prepare.SCALE)) self.dpad["position"] = (0, prepare.SCREEN_SIZE[1] - self.dpad["surface"].get_height() ) # Create the collision rectangle objects for the dpad so we can see if we're pressing a button self.dpad["rect"] = {} self.dpad["rect"]["up"] = pygame.Rect(self.dpad["position"][0] + (self.dpad["surface"].get_width() /3), self.dpad["position"][1], # Rectangle position_y self.dpad["surface"].get_width() /3, # Rectangle size_x self.dpad["surface"].get_height() /2) # Rectangle size_y self.dpad["rect"]["down"] = pygame.Rect(self.dpad["position"][0] + (self.dpad["surface"].get_width() /3), self.dpad["position"][1] + (self.dpad["surface"].get_height() /2), self.dpad["surface"].get_width() /3, self.dpad["surface"].get_height() /2) self.dpad["rect"]["left"] = pygame.Rect(self.dpad["position"][0], self.dpad["position"][1] + (self.dpad["surface"].get_height() /3), self.dpad["surface"].get_width() /2, self.dpad["surface"].get_height() /3) self.dpad["rect"]["right"] = pygame.Rect(self.dpad["position"][0] + (self.dpad["surface"].get_width() /2), self.dpad["position"][1] + (self.dpad["surface"].get_height() /3), self.dpad["surface"].get_width() /2, self.dpad["surface"].get_height() /3) # Create the buttons self.a_button = {} self.a_button["surface"] = pygame.image.load("resources/gfx/a-button.png").convert_alpha() self.a_button["surface"] = pygame.transform.scale(self.a_button["surface"], (self.a_button["surface"].get_width() * prepare.SCALE, self.a_button["surface"].get_height() * prepare.SCALE)) self.a_button["position"] = (prepare.SCREEN_SIZE[0] - int( self.a_button["surface"].get_width() * 1.0 ), (self.dpad["position"][1] + (self.dpad["surface"].get_height() / 2) - (self.a_button["surface"].get_height() / 2))) self.a_button["rect"] = pygame.Rect( self.a_button["position"][0], self.a_button["position"][1], self.a_button["surface"].get_width(), self.a_button["surface"].get_height()) self.b_button = {} self.b_button["surface"] = pygame.image.load("resources/gfx/b-button.png").convert_alpha() self.b_button["surface"] = pygame.transform.scale(self.b_button["surface"], (self.b_button["surface"].get_width() * prepare.SCALE, self.b_button["surface"].get_height() * prepare.SCALE)) self.b_button["position"] = (prepare.SCREEN_SIZE[0] - int( self.b_button["surface"].get_width() * 2.1 ), (self.dpad["position"][1] + (self.dpad["surface"].get_height() / 2) - (self.b_button["surface"].get_height() / 2))) self.b_button["rect"] = pygame.Rect( self.b_button["position"][0], self.b_button["position"][1], self.b_button["surface"].get_width(), self.b_button["surface"].get_height()) def draw(self, game): """Draws the controller overlay to the screen. :param game: The main game object that contains all the game’s variables. :type game: tuxemon.Game :rtype: None :returns: None """ screen.blit_alpha(game.screen, self.dpad["surface"], self.dpad["position"], game.config.controller_transparency) screen.blit_alpha(game.screen, self.a_button["surface"], self.a_button["position"], game.config.controller_transparency) screen.blit_alpha(game.screen, self.b_button["surface"], self.b_button["position"], game.config.controller_transparency) #game.screen.fill((122,122,122, 22), self.dpad["rect"]["right"], special_flags=pygame.BLEND_RGBA_MIN)
gpl-3.0
3,111,139,258,999,061,000
44.684211
127
0.60237
false
3.603796
false
false
false
bureaucratic-labs/natasha-factRuEval-2016
main.py
1
2053
import os import re from collections import namedtuple from random import seed, sample from tqdm import tqdm as log_progress from natasha import NamesExtractor from natasha.markup import show_markup from natasha.grammars.name import Name FACTRU_DIR = 'factRuEval-2016' DEVSET_DIR = os.path.join(FACTRU_DIR, 'devset') TESTSET_DIR = os.path.join(FACTRU_DIR, 'testset') RESULTS_DIR = os.path.join(FACTRU_DIR, 'results') TAGS = { Name: 'PER' } Factru = namedtuple( 'Factru', ['id', 'text'] ) Task1Record = namedtuple( 'Task1Record', ['tag', 'start', 'size'] ) def load_text(path): with open(path) as file: return file.read() def load_factru(dir): for filename in os.listdir(dir): match = re.match('book_(\d+)\.txt', filename) if match: id = int(match.group(1)) path = os.path.join(dir, filename) text = load_text(path) yield Factru(id, text) def get_task1_path(id): filename = 'book_{id}.task1'.format(id=id) return os.path.join( RESULTS_DIR, filename ) def format_task1(result): for record in result: yield '{record.tag} {record.start} {record.size}'.format( record=record ) def dump_lines(lines, path): with open(path, 'w') as file: for line in lines: file.write(line + '\n') def dump_task1(id, result): path = get_task1_path(id) lines = format_task1(result) dump_lines(lines, path) def match_to_task1(match): start, stop = match.span size = stop - start fact = match.fact tag = TAGS[type(fact)] return Task1Record(tag, start, size) def maybe_mkdir(dir): if not os.path.exists(dir): os.mkdir(dir) if __name__ == '__main__': factru = list(load_factru(TESTSET_DIR)) extractor = NamesExtractor() maybe_mkdir(RESULTS_DIR) for record in log_progress(factru): matches = extractor(record.text) guess = [match_to_task1(_) for _ in matches] dump_task1(record.id, guess)
mit
-2,350,521,041,399,897,600
20.385417
65
0.617633
false
3.163328
false
false
false
magfest/panels
panels/models/__init__.py
1
7130
from panels import * from panels.config import panels_config as config def url_domain(url): url = url.strip().replace('//', '/') url = re.sub(r'^https?:/', '', url) url = re.sub(r'^www\.', '', url) return url.split('/', 1)[0].strip('@#?=. ') @Session.model_mixin class SessionMixin: def panel_apps(self): return self.query(PanelApplication).order_by('applied').all() def panel_applicants(self): return self.query(PanelApplicant).options(joinedload(PanelApplicant.application)).order_by('first_name', 'last_name') class SocialMediaMixin(JSONColumnMixin('social_media', c.SOCIAL_MEDIA)): _social_media_urls = config.get('social_media_urls', {}) _social_media_placeholders = config.get('social_media_placeholders', {}) @classmethod def get_placeholder(cls, name): name = cls.unqualify(name) return cls._social_media_placeholders.get(name, '') @property def has_social_media(self): return any(getattr(self, f) for f in self._social_media_fields.keys()) def __getattr__(self, name): if name.endswith('_url'): field_name = self.unqualify(name[:-4]) if field_name in self._social_media_fields: attr = super(SocialMediaMixin, self).__getattr__(field_name) attr = attr.strip('@#?=. ') if attr else '' if attr: if attr.startswith('http:') or attr.startswith('https:'): return attr else: url = self._social_media_urls.get(field_name, '{}') if url_domain(url.format('')) in url_domain(attr): return attr return url.format(attr) return '' else: return super(SocialMediaMixin, self).__getattr__(name) elif name.endswith('_placeholder'): return self.get_placeholder(name[:-12]) else: return super(SocialMediaMixin, self).__getattr__(name) @Session.model_mixin class Attendee: assigned_panelists = relationship('AssignedPanelist', backref='attendee') panel_applicants = relationship('PanelApplicant', backref='attendee') panel_applications = relationship('PanelApplication', backref='poc') panel_feedback = relationship('EventFeedback', backref='attendee') class Event(MagModel): location = Column(Choice(c.EVENT_LOCATION_OPTS)) start_time = Column(UTCDateTime) duration = Column(Integer) # half-hour increments name = Column(UnicodeText, nullable=False) description = Column(UnicodeText) assigned_panelists = relationship('AssignedPanelist', backref='event') applications = relationship('PanelApplication', backref='event') panel_feedback = relationship('EventFeedback', backref='event') @property def half_hours(self): half_hours = set() for i in range(self.duration): half_hours.add(self.start_time + timedelta(minutes=30 * i)) return half_hours @property def minutes(self): return (self.duration or 0) * 30 @property def start_slot(self): if self.start_time: return int((self.start_time_local - c.EPOCH).total_seconds() / (60 * 30)) @property def end_time(self): return self.start_time + timedelta(minutes=self.minutes) class AssignedPanelist(MagModel): attendee_id = Column(UUID, ForeignKey('attendee.id', ondelete='cascade')) event_id = Column(UUID, ForeignKey('event.id', ondelete='cascade')) def __repr__(self): if self.attendee: return '<{self.attendee.full_name} panelisting {self.event.name}>'.format(self=self) else: return super(AssignedPanelist, self).__repr__() class PanelApplication(MagModel): event_id = Column(UUID, ForeignKey('event.id', ondelete='SET NULL'), nullable=True) poc_id = Column(UUID, ForeignKey('attendee.id', ondelete='SET NULL'), nullable=True) name = Column(UnicodeText) length = Column(Choice(c.PANEL_LENGTH_OPTS), default=c.SIXTY_MIN) length_text = Column(UnicodeText) length_reason = Column(UnicodeText) description = Column(UnicodeText) unavailable = Column(UnicodeText) available = Column(UnicodeText) affiliations = Column(UnicodeText) past_attendance = Column(UnicodeText) presentation = Column(Choice(c.PRESENTATION_OPTS)) other_presentation = Column(UnicodeText) tech_needs = Column(MultiChoice(c.TECH_NEED_OPTS)) other_tech_needs = Column(UnicodeText) need_tables = Column(Boolean, default=False) tables_desc = Column(UnicodeText) has_cost = Column(Boolean, default=False) cost_desc = Column(UnicodeText) livestream = Column(Choice(c.LIVESTREAM_OPTS), default=c.DONT_CARE) panelist_bringing = Column(UnicodeText) extra_info = Column(UnicodeText) applied = Column(UTCDateTime, server_default=utcnow()) status = Column(Choice(c.PANEL_APP_STATUS_OPTS), default=c.PENDING, admin_only=True) comments = Column(UnicodeText, admin_only=True) applicants = relationship('PanelApplicant', backref='application') email_model_name = 'app' @property def email(self): return self.submitter and self.submitter.email @property def submitter(self): for a in self.applicants: if a.submitter: return a return None @property def other_panelists(self): return [a for a in self.applicants if not a.submitter] @property def matched_attendees(self): return [a.attendee for a in self.applicants if a.attendee_id] @property def unmatched_applicants(self): return [a for a in self.applicants if not a.attendee_id] class PanelApplicant(SocialMediaMixin, MagModel): app_id = Column(UUID, ForeignKey('panel_application.id', ondelete='cascade')) attendee_id = Column(UUID, ForeignKey('attendee.id', ondelete='cascade'), nullable=True) submitter = Column(Boolean, default=False) first_name = Column(UnicodeText) last_name = Column(UnicodeText) email = Column(UnicodeText) cellphone = Column(UnicodeText) communication_pref = Column(MultiChoice(c.COMMUNICATION_PREF_OPTS)) other_communication_pref = Column(UnicodeText) occupation = Column(UnicodeText) website = Column(UnicodeText) other_credentials = Column(UnicodeText) @property def has_credentials(self): return any([self.occupation, self.website, self.other_credentials]) @property def full_name(self): return self.first_name + ' ' + self.last_name class EventFeedback(MagModel): event_id = Column(UUID, ForeignKey('event.id')) attendee_id = Column(UUID, ForeignKey('attendee.id', ondelete='cascade')) headcount_starting = Column(Integer, default=0) headcount_during = Column(Integer, default=0) comments = Column(UnicodeText) rating = Column(Choice(c.PANEL_RATING_OPTS), default=c.UNRATED) from panels.models.attraction import * # noqa: F401,E402,F403
agpl-3.0
8,330,503,066,072,709,000
34.472637
125
0.651192
false
3.82716
false
false
false
samba-team/samba
python/samba/tests/dns_aging.py
1
111372
# Unix SMB/CIFS implementation. # Copyright (C) Kai Blin <[email protected]> 2011 # Copyright (C) Catalyst.NET 2021 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import sys from samba import dsdb from samba import dsdb_dns from samba.ndr import ndr_unpack, ndr_pack from samba.samdb import SamDB from samba.auth import system_session import ldb from samba import credentials from samba.dcerpc import dns, dnsp, dnsserver from samba.dnsserver import TXTRecord, ARecord from samba.dnsserver import recbuf_from_string, ipv6_normalise from samba.tests.subunitrun import SubunitOptions, TestProgram from samba import werror, WERRORError from samba.tests.dns_base import DNSTest import samba.getopt as options import optparse import time from samba.colour import c_RED, c_GREEN, c_DARK_YELLOW parser = optparse.OptionParser( "dns_aging.py <server name> <server ip> [options]") sambaopts = options.SambaOptions(parser) parser.add_option_group(sambaopts) # use command line creds if available credopts = options.CredentialsOptions(parser) parser.add_option_group(credopts) subunitopts = SubunitOptions(parser) parser.add_option_group(subunitopts) opts, args = parser.parse_args() if len(args) < 2: parser.print_usage() sys.exit(1) LP = sambaopts.get_loadparm() CREDS = credopts.get_credentials(LP) SERVER_NAME = args[0] SERVER_IP = args[1] CREDS.set_krb_forwardable(credentials.NO_KRB_FORWARDABLE) DOMAIN = CREDS.get_realm().lower() # Unix time start, in DNS timestamp (24 * 365.25 * 369) # These are ballpark extremes for the timestamp. DNS_TIMESTAMP_1970 = 3234654 DNS_TIMESTAMP_2101 = 4383000 DNS_TIMESTAMP_1981 = 3333333 # a middling timestamp IPv4_ADDR = "127.0.0.33" IPv6_ADDR = "::1" IPv4_ADDR_2 = "127.0.0.66" IPv6_ADDR_2 = "1::1" def get_samdb(): return SamDB(url=f"ldap://{SERVER_IP}", lp=LP, session_info=system_session(), credentials=CREDS) def get_file_samdb(): # For Samba only direct file access, needed for the tombstoning functions. # (For Windows, we instruct it to tombstone over RPC). return SamDB(url=LP.samdb_url(), lp=LP, session_info=system_session(), credentials=CREDS) def get_rpc(): return dnsserver.dnsserver(f"ncacn_ip_tcp:{SERVER_IP}[sign]", LP, CREDS) def create_zone(name, rpc=None, aging=True): if rpc is None: rpc = get_rpc() z = dnsserver.DNS_RPC_ZONE_CREATE_INFO_LONGHORN() z.pszZoneName = name z.dwZoneType = dnsp.DNS_ZONE_TYPE_PRIMARY z.fAging = int(bool(aging)) z.dwDpFlags = dnsserver.DNS_DP_DOMAIN_DEFAULT z.fDsIntegrated = 1 z.fLoadExisting = 1 z.fAllowUpdate = dnsp.DNS_ZONE_UPDATE_UNSECURE rpc.DnssrvOperation2(dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, None, 0, 'ZoneCreate', dnsserver.DNSSRV_TYPEID_ZONE_CREATE, z) def delete_zone(name, rpc=None): if rpc is None: rpc = get_rpc() rpc.DnssrvOperation2(dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, name, 0, 'DeleteZoneFromDs', dnsserver.DNSSRV_TYPEID_NULL, None) def txt_s_list(txt): """Construct a txt record string list, which is a fiddly matter.""" if isinstance(txt, str): txt = [txt] s_list = dnsp.string_list() s_list.count = len(txt) s_list.str = txt return s_list def make_txt_record(txt): r = dns.txt_record() r.txt = txt_s_list(txt) return r def copy_rec(rec): copy = dnsserver.DNS_RPC_RECORD() copy.wType = rec.wType copy.dwFlags = rec.dwFlags copy.dwSerial = rec.dwSerial copy.dwTtlSeconds = rec.dwTtlSeconds copy.data = rec.data copy.dwTimeStamp = rec.dwTimeStamp return copy def guess_wtype(data): if isinstance(data, list): data = make_txt_record(data) return (data, dnsp.DNS_TYPE_TXT) if ":" in data: return (data, dnsp.DNS_TYPE_AAAA) return (data, dnsp.DNS_TYPE_A) class TestDNSAging(DNSTest): """Probe DNS aging and scavenging, using LDAP and RPC to set and test the timestamps behind DNS's back.""" server = SERVER_NAME server_ip = SERVER_IP creds = CREDS def setUp(self): super().setUp() self.rpc_conn = get_rpc() self.samdb = get_samdb() # We always have a zone of our own named after the test function. self.zone = self.id().rsplit('.', 1)[1] self.addCleanup(delete_zone, self.zone, self.rpc_conn) try: create_zone(self.zone, self.rpc_conn) except WERRORError as e: if e.args[0] != werror.WERR_DNS_ERROR_ZONE_ALREADY_EXISTS: raise print(f"zone {self.zone} already exists") # Though we set this in create_zone(), that doesn't work on # Windows, so we repeat again here. self.set_zone_int_params(AllowUpdate=dnsp.DNS_ZONE_UPDATE_UNSECURE) self.zone_dn = (f"DC={self.zone},CN=MicrosoftDNS,DC=DomainDNSZones," f"{self.samdb.get_default_basedn()}") def set_zone_int_params(self, zone=None, **kwargs): """Keyword arguments set parameters on the zone. e.g.: self.set_zone_int_params(Aging=1, RefreshInterval=222) See [MS-DNSP] 3.1.1.2.1 "DNS Zone Integer Properties" for names. """ if zone is None: zone = self.zone for key, val in kwargs.items(): name_param = dnsserver.DNS_RPC_NAME_AND_PARAM() name_param.dwParam = val name_param.pszNodeName = key try: self.rpc_conn.DnssrvOperation2( dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, zone, 0, 'ResetDwordProperty', dnsserver.DNSSRV_TYPEID_NAME_AND_PARAM, name_param) except WERRORError as e: self.fail(str(e)) def rpc_replace(self, name, old=None, new=None): """Replace a DNS_RPC_RECORD or DNS_RPC_RECORD_BUF""" # wrap our recs, if necessary if isinstance(new, dnsserver.DNS_RPC_RECORD): rec = new new = dnsserver.DNS_RPC_RECORD_BUF() new.rec = rec if isinstance(old, dnsserver.DNS_RPC_RECORD): rec = old old = dnsserver.DNS_RPC_RECORD_BUF() old.rec = rec try: self.rpc_conn.DnssrvUpdateRecord2( dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, self.zone, name, new, old) except WERRORError as e: self.fail(f"could not replace record ({e})") def get_unique_txt_record(self, name, txt): """Get the TXT record on Name with value txt, asserting that there is only one.""" if isinstance(txt, str): txt = [txt] recs = self.ldap_get_records(name) match = None for r in recs: if r.wType != dnsp.DNS_TYPE_TXT: continue txt2 = [x for x in r.data.str] if txt2 == txt: self.assertIsNone(match) match = r return match def get_unique_ip_record(self, name, addr, wtype=None): """Get an A or AAAA record on name with the matching data.""" if wtype is None: addr, wtype = guess_wtype(addr) recs = self.ldap_get_records(name) # We need to use the internal dns_record_match because not all # forms always match on strings (e.g. IPv6) rec = dnsp.DnssrvRpcRecord() rec.wType = wtype rec.data = addr match = None for r in recs: if dsdb_dns.records_match(r, rec): self.assertIsNone(match) match = r return match def dns_query(self, name, qtype=dns.DNS_QTYPE_ALL): """make a query, which might help Windows notice LDAP changes""" p = self.make_name_packet(dns.DNS_OPCODE_QUERY) fullname = "%s.%s" % (name, self.zone) q = self.make_name_question(fullname, qtype, dns.DNS_QCLASS_IN) self.finish_name_packet(p, [q]) r, rp = self.dns_transaction_udp(p, host=SERVER_IP) return r def dns_update_non_text(self, name, data, wtype=None, qclass=dns.DNS_QCLASS_IN): if wtype is None: data, wtype = guess_wtype(data) if qclass == dns.DNS_QCLASS_IN: ttl = 123 else: ttl = 0 fullname = "%s.%s" % (name, self.zone) p = self.make_name_packet(dns.DNS_OPCODE_UPDATE) u = self.make_name_question(self.zone, dns.DNS_QTYPE_SOA, dns.DNS_QCLASS_IN) self.finish_name_packet(p, [u]) r = dns.res_rec() r.name = fullname r.rr_type = wtype r.rr_class = qclass r.ttl = ttl if data is not None: r.length = 0xffff r.rdata = data else: r.length = 0 p.nscount = 1 p.nsrecs = [r] (code, response) = self.dns_transaction_udp(p, host=SERVER_IP) self.assert_dns_rcode_equals(code, dns.DNS_RCODE_OK) return response def dns_delete(self, name, data, wtype=None): return self.dns_update_non_text(name, data, wtype, qclass=dns.DNS_QCLASS_NONE) def dns_delete_type(self, name, wtype): return self.dns_update_non_text(name, None, wtype, qclass=dns.DNS_QCLASS_ANY) def dns_update_record(self, name, txt, ttl=900): if isinstance(txt, str): txt = [txt] p = self.make_txt_update(name, txt, self.zone, ttl=ttl) (code, response) = self.dns_transaction_udp(p, host=SERVER_IP) if code.operation & dns.DNS_RCODE == dns.DNS_RCODE_REFUSED: # sometimes you might forget this print("\n\ngot DNS_RCODE_REFUSED\n") print("Are you running this in the fl2003 environment?\n") print("try `SELFTEST_TESTENV='fl2003dc:local' make testenv`\n\n") self.assert_dns_rcode_equals(code, dns.DNS_RCODE_OK) return self.get_unique_txt_record(name, txt) def rpc_update_record(self, name, txt, **kwargs): """Add the record that self.dns_update_record() would add, via the dnsserver RPC pipe. As with DNS update, if the record already exists, we replace it. """ if isinstance(txt, str): txt = [txt] old = TXTRecord(txt) rec = TXTRecord(txt) for k, v in kwargs.items(): setattr(rec, k, v) try: self.rpc_replace(name, old, rec) except AssertionError as e: # we have caught and wrapped the WERRor inside if 'WERR_DNS_ERROR_RECORD_DOES_NOT_EXIST' not in str(e): raise self.rpc_replace(name, None, rec) return self.get_unique_txt_record(name, txt) def rpc_delete_txt(self, name, txt): if isinstance(txt, str): txt = [txt] old = TXTRecord(txt) self.rpc_replace(name, old, None) def get_one_node(self, name): expr = f"(&(objectClass=dnsNode)(name={name}))" nodes = self.samdb.search(base=self.zone_dn, scope=ldb.SCOPE_SUBTREE, expression=expr, attrs=["dnsRecord", "dNSTombstoned", "name"]) if len(nodes) > 1: self.fail( f"expected 0 or 1 dnsNodes for {name}, found {len(nodes)}") if len(nodes) == 0: return None return nodes[0] def ldap_get_records(self, name): node = self.get_one_node(name) if node is None: return [] records = node.get('dnsRecord') return [ndr_unpack(dnsp.DnssrvRpcRecord, r) for r in records] def ldap_get_non_tombstoned_records(self, name): all_records = self.ldap_get_records(name) records = [] for r in all_records: if r.wType != dnsp.DNS_TYPE_TOMBSTONE: records.append(r) return records def assert_tombstoned(self, name, tombstoned=True, timestamp=None): # If run with tombstoned=False, assert it isn't tombstoned # (and has no traces of tombstone). Otherwise assert it has # all the necessary bits. # # with timestamp=<non-zero number of hours>, we assert that # the nttime timestamp is about that time. # # with timestamp=None, we assert it is within a century or so. # # with timestamp=False (or 0), we don't assert on it. node = self.get_one_node(name) if node is None: self.fail(f"no node named {name}") dnsts = node.get("dNSTombstoned") if dnsts is None: is_tombstoned = False else: self.assertEqual(len(dnsts), 1) if dnsts[0] == b'TRUE': is_tombstoned = True else: is_tombstoned = False if tombstoned != is_tombstoned: if is_tombstoned: self.fail(f"{name} is tombstoned") else: self.fail(f"{name} is not tombstoned") recs = self.ldap_get_records(name) if is_tombstoned: self.assertEqual(len(recs), 1) self.assertEqual(recs[0].wType, dnsp.DNS_TYPE_TOMBSTONE) if timestamp is None: self.assert_nttime_in_hour_range(recs[0].data) elif timestamp: self.assert_nttime_in_hour_range(recs[0].data, timestamp - 3, timestamp + 3) else: for r in recs: self.assertNotEqual(recs[0].wType, dnsp.DNS_TYPE_TOMBSTONE) def ldap_replace_records(self, name, records): # We use raw ldap to avoid the "helpfulness" of dsdb_dns.replace() dn = f'DC={name},{self.zone_dn}' msg = ldb.Message.from_dict(self.samdb, {'dn': dn, 'dnsRecord': [ndr_pack(r) for r in records] }, ldb.FLAG_MOD_REPLACE) try: self.samdb.modify(msg) except ldb.LdbError as e: if 'LDAP_NO_SUCH_OBJECT' not in e.args[1]: raise # We need to do an add msg["objectClass"] = ["top", "dnsNode"] msg["dnsRecord"].set_flags(ldb.FLAG_MOD_ADD) self.samdb.add(msg) def ldap_update_core(self, name, wtype, data, **kwargs): """This one is not TXT specific.""" records = self.ldap_get_records(name) # default values rec = dnsp.DnssrvRpcRecord() rec.wType = wtype rec.rank = dnsp.DNS_RANK_ZONE rec.dwTtlSeconds = 900 rec.dwSerial = 110 rec.dwTimeStamp = 0 rec.data = data # override defaults, as required for k, v in kwargs.items(): setattr(rec, k, v) for i, r in enumerate(records[:]): if dsdb_dns.records_match(r, rec): records[i] = rec break else: # record not found records.append(rec) self.ldap_replace_records(name, records) return rec def ldap_update_record(self, name, txt, **kwargs): """Add the record that self.dns_update_record() would add, via ldap, thus allowing us to set additional dnsRecord features like dwTimestamp. """ rec = self.ldap_update_core(name, dnsp.DNS_TYPE_TXT, txt_s_list(txt), **kwargs) recs = self.ldap_get_records(name) match = None for r in recs: if r.wType != rec.wType: continue if r.data.str == rec.data.str: self.assertIsNone(match, f"duplicate records for {name}") match = r self.assertEqual(match.rank, rec.rank & 255) self.assertEqual(match.dwTtlSeconds, rec.dwTtlSeconds) self.assert_timestamps_equal(match.dwTimeStamp, rec.dwTimeStamp) return match def ldap_delete_record(self, name, data, wtype=dnsp.DNS_TYPE_TXT): rec = dnsp.DnssrvRpcRecord() if wtype == dnsp.DNS_TYPE_TXT: data = txt_s_list(data) rec.wType = wtype rec.data = data records = self.ldap_get_records(name) for i, r in enumerate(records[:]): if dsdb_dns.records_match(r, rec): del records[i] break else: self.fail(f"record {data} not found") self.ldap_replace_records(name, records) def add_ip_record(self, name, addr, wtype=None, **kwargs): if wtype is None: addr, wtype = guess_wtype(addr) rec = self.ldap_update_core(name, wtype, addr, **kwargs) recs = self.ldap_get_records(name) match = None for r in recs: if dsdb_dns.records_match(r, rec): self.assertIsNone(match, f"duplicate records for {name}") match = r self.assertEqual(match.rank, rec.rank & 255) self.assertEqual(match.dwTtlSeconds, rec.dwTtlSeconds) self.assert_timestamps_equal(match.dwTimeStamp, rec.dwTimeStamp) return match def ldap_modify_timestamps(self, name, delta): records = self.ldap_get_records(name) for rec in records: rec.dwTimeStamp += delta self.ldap_replace_records(name, records) def get_rpc_records(self, name, dns_type=None): if dns_type is None: dns_type = dnsp.DNS_TYPE_ALL select_flags = dnsserver.DNS_RPC_VIEW_AUTHORITY_DATA buflen, res = self.rpc_conn.DnssrvEnumRecords2( dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, self.zone, name, None, dns_type, select_flags, None, None) recs = [] if not res or res.count == 0: return [] for rec in res.rec: recs.extend(rec.records) return recs def dns_tombstone(self, name, epoch_hours=DNS_TIMESTAMP_1981, epoch_nttime=None): dn = f'DC={name},{self.zone_dn}' r = dnsp.DnssrvRpcRecord() r.wType = dnsp.DNS_TYPE_TOMBSTONE # r.dwTimeStamp is a 32 bit value in hours, and r.data is an # NTTIME (100 nanosecond intervals), both in the 1601 epoch. A # tombstome will have both, but expiration calculations use # the r.data NTTIME EntombedTime timestamp (see [MS-DNSP]). r.dwTimeStamp = epoch_hours if epoch_nttime is None: r.data = epoch_hours * 3600 * 10 * 1000 * 1000 else: r.data = epoch_nttime msg = ldb.Message.from_dict(self.samdb, {'dn': dn, 'dnsRecord': [ndr_pack(r)], 'dnsTombstoned': 'TRUE' }, ldb.FLAG_MOD_REPLACE) try: self.samdb.modify(msg) except ldb.LdbError as e: if 'LDAP_NO_SUCH_OBJECT' not in e.args[1]: raise # We need to do an add msg["objectClass"] = ["top", "dnsNode"] self.samdb.add(msg) def set_aging(self, enable=False): self.set_zone_int_params(Aging=int(bool(enable))) def assert_timestamp_in_ballpark(self, rec): self.assertGreater(rec.dwTimeStamp, DNS_TIMESTAMP_1970) self.assertLess(rec.dwTimeStamp, DNS_TIMESTAMP_2101) def assert_nttime_in_hour_range(self, t, hour_min=DNS_TIMESTAMP_1970, hour_max=DNS_TIMESTAMP_2101): t //= int(3600 * 1e7) self.assertGreater(t, hour_min) self.assertLess(t, hour_max) def assert_soon_after(self, timestamp, reference): """Assert that a timestamp is the same or very slightly higher than a reference timestamp. Typically we expect the timestamps to be identical, unless an hour has clicked over since the reference was taken. However we allow one more hour in case it happens during a daylight savings transition or something. """ if hasattr(timestamp, 'dwTimeStamp'): timestamp = timestamp.dwTimeStamp if hasattr(reference, 'dwTimeStamp'): reference = reference.dwTimeStamp diff = timestamp - reference days = abs(diff / 24.0) if diff < 0: msg = f"timestamp is {days} days ({abs(diff)} hours) before reference" elif diff > 2: msg = f"timestamp is {days} days ({diff} hours) after reference" else: return raise AssertionError(msg) def assert_timestamps_equal(self, ts1, ts2): """Just like assertEqual(), but tells us the difference, not the absolute values. e.g: self.assertEqual(a, b) AssertionError: 3685491 != 3685371 self.assert_timestamps_equal(a, b) AssertionError: -120 (first is 5.0 days earlier than second) Also, we turn a record into a timestamp if we need """ if hasattr(ts1, 'dwTimeStamp'): ts1 = ts1.dwTimeStamp if hasattr(ts2, 'dwTimeStamp'): ts2 = ts2.dwTimeStamp if ts1 == ts2: return diff = ts1 - ts2 days = abs(diff / 24.0) if ts1 == 0 or ts2 == 0: # when comparing to zero we don't want the number of days. msg = f"timestamp {ts1} != {ts2}" elif diff > 0: msg = f"{ts1} is {days} days ({diff} hours) after {ts2}" else: msg = f"{ts1} is {days} days ({abs(diff)} hours) before {ts2}" raise AssertionError(msg) def test_update_timestamps_aging_off_then_on(self): # we will add a record with aging off # it will have the current timestamp self.set_aging(False) name = 'timestamp-now' name2 = 'timestamp-eightdays' rec = self.dns_update_record(name, [name]) start_time = rec.dwTimeStamp self.assert_timestamp_in_ballpark(rec) # alter the timestamp -8 days using RPC # with aging turned off, we expect no change # when aging is on, we expect change eight_days_ago = start_time - 8 * 24 rec = self.ldap_update_record(name2, [name2], dwTimeStamp=eight_days_ago) self.assert_timestamps_equal(rec.dwTimeStamp, eight_days_ago) # if aging was on, this would change rec = self.dns_update_record(name2, [name2]) self.assert_timestamps_equal(rec.dwTimeStamp, eight_days_ago) self.set_aging(True) rec = self.dns_update_record(name2, [name2]) self.assertGreaterEqual(rec.dwTimeStamp, start_time) def test_rpc_update_timestamps(self): # RPC always sets timestamps to zero on Windows. self.set_aging(False) name = 'timestamp-now' rec = self.dns_update_record(name, [name]) start_time = rec.dwTimeStamp self.assert_timestamp_in_ballpark(rec) # attempt to alter the timestamp to something close by. eight_days_ago = start_time - 8 * 24 rec = self.rpc_update_record(name, [name], dwTimeStamp=eight_days_ago) self.assertEqual(rec.dwTimeStamp, 0) # try again, with aging on self.set_aging(True) rec = self.rpc_update_record(name, [name], dwTimeStamp=eight_days_ago) self.assertEqual(rec.dwTimeStamp, 0) # now that the record is static, a dns update won't change it rec = self.dns_update_record(name, [name]) self.assertEqual(rec.dwTimeStamp, 0) # but another record on the same node will behave normally # i.e. the node is not static, the record is. name2 = 'timestamp-eightdays' rec = self.dns_update_record(name2, [name2]) self.assert_soon_after(rec.dwTimeStamp, start_time) def get_txt_timestamps(self, name, *txts): records = self.ldap_get_records(name) ret = [] for t in txts: for r in records: t2 = [x for x in r.data.str] if t == t2: ret.append(r.dwTimeStamp) return ret def test_update_aging_disabled_2(self): # With aging disabled, Windows updates the timestamps of all # records when one is updated. name = 'test' txt1 = ['test txt'] txt2 = ['test', 'txt2'] txt3 = ['test', 'txt3'] self.set_aging(False) current_time = self.dns_update_record(name, txt1).dwTimeStamp six_days_ago = current_time - 6 * 24 eight_days_ago = current_time - 8 * 24 fifteen_days_ago = current_time - 15 * 24 hundred_days_ago = current_time - 100 * 24 thousand_days_ago = current_time - 1000 * 24 for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago): # wind back self.ldap_update_record(name, txt1, dwTimeStamp=timestamp) self.assertEqual(self.get_txt_timestamps(name, txt1), [timestamp]) # no change here update_timestamp = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(update_timestamp, timestamp) # adding a fresh record for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 100): # wind back timestamp1 = self.ldap_update_record( name, txt1, dwTimeStamp=timestamp).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp) self.dns_update_record(name, txt2) timestamps = self.get_txt_timestamps(name, txt1, txt2) self.assertEqual(timestamps, [timestamp, current_time]) self.ldap_delete_record(name, txt2) timestamps = self.get_txt_timestamps(name, txt1) self.assertEqual(timestamps, [timestamp]) # add record 2. timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 100): # wind back self.ldap_update_record(name, txt1, dwTimeStamp=timestamp) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp) timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp # txt1 timestamp is now current time timestamps = self.get_txt_timestamps(name, txt1, txt2) self.assertEqual(timestamps, [timestamp, current_time]) # with 3 records, no change for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 10): # wind back self.ldap_update_record(name, txt1, dwTimeStamp=timestamp) self.ldap_update_record(name, txt2, dwTimeStamp=timestamp) self.ldap_update_record(name, txt3, dwTimeStamp=(timestamp + 30)) timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp3, timestamp + 30) self.dns_update_record(name, txt2).dwTimeStamp timestamps = self.get_txt_timestamps(name, txt1, txt2, txt3) self.assertEqual(timestamps, [timestamp, timestamp, timestamp + 30]) # with 3 records, one of which is static # first we set the updatee's timestamp to a recognisable number self.ldap_update_record(name, txt2, dwTimeStamp=999999) for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 10): # wind back self.ldap_update_record(name, txt1, dwTimeStamp=0) self.ldap_update_record(name, txt3, dwTimeStamp=(timestamp - 9)) timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp3, timestamp - 9) self.dns_update_record(name, txt2) timestamps = self.get_txt_timestamps(name, txt1, txt2, txt3) self.assertEqual(timestamps, [0, 999999, timestamp - 9]) # with 3 records, updating one which is static timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 10): # wind back self.ldap_update_record(name, txt1, dwTimeStamp=0) self.ldap_update_record(name, txt2, dwTimeStamp=0) self.ldap_update_record(name, txt3, dwTimeStamp=(timestamp + 30)) timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp3, timestamp + 30) self.dns_update_record(name, txt2).dwTimeStamp timestamps = self.get_txt_timestamps(name, txt1, txt2, txt3) self.assertEqual(timestamps, [0, 0, timestamp + 30]) # with 3 records, after the static nodes have been replaced self.ldap_update_record(name, txt1, dwTimeStamp=777777) self.ldap_update_record(name, txt2, dwTimeStamp=888888) timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp for timestamp in (current_time, six_days_ago, eight_days_ago, fifteen_days_ago, hundred_days_ago, thousand_days_ago, 100000, 10): # wind back self.ldap_update_record(name, txt3, dwTimeStamp=(timestamp)) timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp3, timestamp) self.dns_update_record(name, txt2) timestamps = self.get_txt_timestamps(name, txt1, txt2, txt3) self.assertEqual(timestamps, [777777, 888888, timestamp]) def _test_update_aging_disabled_n_days_ago(self, n_days): name = 'test' txt1 = ['1'] txt2 = ['2'] self.set_aging(False) current_time = self.dns_update_record(name, txt1).dwTimeStamp # rewind timestamp using ldap self.ldap_modify_timestamps(name, n_days * -24) n_days_ago = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assertGreater(current_time, n_days_ago) # no change when updating this record update_timestamp = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(update_timestamp, n_days_ago) # add another record, which should have the current timestamp timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) # get the original record timestamp. NOW it matches current_time timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp2) # let's repeat that, this time with txt2 existing self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) # this update is not an add timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) # now timestamp1 is not changed timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) # delete record2, try again self.ldap_delete_record(name, txt2) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) # here we are re-adding the deleted record timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp # It gets weird HERE. # note how the SIBLING of the deleted, re-added record differs # from the sibling of freshly added record, depending on the # time difference. if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_timestamps_equal(timestamp1, timestamp2) # re-timestamp record2, try again self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) # no change timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp2, n_days_ago) # also no change timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp2) # let's introduce another record txt3 = ['3'] self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp3, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_timestamps_equal(timestamp1, timestamp3) self.assert_timestamps_equal(timestamp2, timestamp3) self.ldap_delete_record(name, txt3) timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp3, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_timestamps_equal(timestamp1, timestamp3) self.assert_timestamps_equal(timestamp2, timestamp3) # and here we'll make txt3 static txt4 = ['4'] # and here we'll make txt1 static self.ldap_update_record(name, txt1, dwTimeStamp=0) self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt3, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp timestamp4 = self.dns_update_record(name, txt4).dwTimeStamp self.assertEqual(timestamp1, 0) self.assert_timestamps_equal(timestamp2, n_days_ago) self.assert_timestamps_equal(timestamp3, n_days_ago) self.assert_soon_after(timestamp4, current_time) def test_update_aging_disabled_in_no_refresh_window(self): self._test_update_aging_disabled_n_days_ago(4) def test_update_aging_disabled_on_no_refresh_boundary(self): self._test_update_aging_disabled_n_days_ago(7) def test_update_aging_disabled_in_refresh_window(self): self._test_update_aging_disabled_n_days_ago(9) def test_update_aging_disabled_beyond_refresh_window(self): self._test_update_aging_disabled_n_days_ago(16) def test_update_aging_disabled_in_eighteenth_century(self): self._test_update_aging_disabled_n_days_ago(100000) def test_update_aging_disabled_static(self): name = 'test' txt1 = ['1'] txt2 = ['2'] self.set_aging(False) current_time = self.dns_update_record(name, txt1).dwTimeStamp self.ldap_update_record(name, txt1, dwTimeStamp=0) # no change when updating this record timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assertEqual(timestamp1, 0) # add another record, which should have the current timestamp timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_soon_after(timestamp1, current_time) # let's repeat that, this time with txt2 existing timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assert_soon_after(timestamp2, current_time) timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) # delete record2, try again self.ldap_delete_record(name, txt2) self.ldap_update_record(name, txt1, dwTimeStamp=0) # no change when updating this record timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assertEqual(timestamp1, 0) timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assertEqual(timestamp2, 0) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assertEqual(timestamp1, 0) # re-timestamp record2, try again self.ldap_update_record(name, txt2, dwTimeStamp=1) self.ldap_update_record(name, txt1, dwTimeStamp=0) # no change when updating this record timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp2, 1) def test_update_aging_disabled(self): # With aging disabled, Windows updates the timestamps of all # records when one is updated. name = 'test' txt1 = ['test txt'] txt2 = ['test', 'txt2'] txt3 = ['test', 'txt3'] minus_6 = -6 * 24 minus_8 = -8 * 24 self.set_aging(False) current_time = self.dns_update_record(name, txt1).dwTimeStamp # rewind timestamp using ldap self.ldap_modify_timestamps(name, minus_6) after_mod = self.get_unique_txt_record(name, txt1) six_days_ago = after_mod.dwTimeStamp self.assert_timestamps_equal(six_days_ago, current_time + minus_6) # no change update_timestamp = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(update_timestamp, six_days_ago) self.check_query_txt(name, txt1, zone=self.zone) # another record timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp # without aging, timestamp1 is changed!! self.assert_timestamps_equal(timestamp1, timestamp2) # Set both records back to 8 days ago. self.ldap_modify_timestamps(name, minus_8) eight_days_ago = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(eight_days_ago, current_time + minus_8) update2 = self.dns_update_record(name, txt2) # Without aging on, an update should not change the timestamps. self.assert_timestamps_equal(update2.dwTimeStamp, eight_days_ago) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, eight_days_ago) # Add another txt record. The new record should have the now # timestamp, and drag the others up with it. timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp3, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp3) self.assert_timestamps_equal(timestamp2, timestamp3) hundred_days_ago = current_time - 100 * 24 thousand_days_ago = current_time - 1000 * 24 record = self.ldap_update_record(name, txt1, dwTimeStamp=hundred_days_ago) self.assert_timestamps_equal(record.dwTimeStamp, hundred_days_ago) record = self.ldap_update_record(name, txt2, dwTimeStamp=thousand_days_ago) self.assert_timestamps_equal(record.dwTimeStamp, thousand_days_ago) # update 3, will others change (because beyond RefreshInterval)? yes. timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp3, current_time) self.assert_timestamps_equal(timestamp1, hundred_days_ago) self.assert_timestamps_equal(timestamp2, thousand_days_ago) fifteen_days_ago = current_time - 15 * 24 self.ldap_update_record(name, txt3, dwTimeStamp=fifteen_days_ago) timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp # DNS update has no effect because all records are old self.assert_timestamps_equal(timestamp2, thousand_days_ago) self.assert_timestamps_equal(timestamp1, hundred_days_ago) self.assert_timestamps_equal(timestamp3, fifteen_days_ago) # Does update of old record affect timestamp of refreshable record? No. self.ldap_update_record(name, txt3, dwTimeStamp=eight_days_ago) timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp # DNS update has no effect because all records are old self.assert_timestamps_equal(timestamp2, thousand_days_ago) self.assert_timestamps_equal(timestamp1, hundred_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) # RPC zeros timestamp, after which updates won't change it. # BUT it refreshes all others! self.rpc_update_record(name, txt2) timestamp2 = self.dns_update_record(name, txt3).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assertEqual(timestamp2, 0) self.assert_soon_after(timestamp1, current_time) self.assert_timestamps_equal(timestamp3, eight_days_ago) def test_update_aging_enabled(self): name = 'test' txt1 = ['test txt'] txt2 = ['test', 'txt2'] txt3 = ['test', 'txt3'] txt4 = ['4'] self.set_aging(True) current_time = self.dns_update_record(name, txt2).dwTimeStamp six_days_ago = current_time - 6 * 24 eight_days_ago = current_time - 8 * 24 fifteen_days_ago = current_time - 15 * 24 hundred_days_ago = current_time - 100 * 24 self.ldap_update_record(name, txt1, dwTimeStamp=six_days_ago) # with or without aging, a delta of -6 days does not affect # timestamps, because dwNoRefreshInterval is 7 days. timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, six_days_ago) self.assert_soon_after(timestamp2, current_time) self.ldap_update_record(name, txt3, dwTimeStamp=eight_days_ago) timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp3, eight_days_ago) # update 1, what happens to 2 and 3? Nothing? timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp1, six_days_ago) self.assert_soon_after(timestamp2, current_time) self.assert_timestamps_equal(timestamp3, eight_days_ago) # now set 1 to 8 days, and we should see changes self.ldap_update_record(name, txt1, dwTimeStamp=eight_days_ago) # update 1, what happens to 2 and 3? Nothing? timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp1, current_time) self.assert_soon_after(timestamp2, current_time) self.assert_timestamps_equal(timestamp3, eight_days_ago) # next few ones use these numbers self.ldap_update_record(name, txt1, dwTimeStamp=fifteen_days_ago) self.ldap_update_record(name, txt2, dwTimeStamp=six_days_ago) self.ldap_update_record(name, txt3, dwTimeStamp=eight_days_ago) # change even though 1 is outside the window timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp1, current_time) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) # reset 1 self.ldap_update_record(name, txt1, dwTimeStamp=fifteen_days_ago) # no change, because 2 is outside the window timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp1, fifteen_days_ago) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) # 3 changes, others do not timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, fifteen_days_ago) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_soon_after(timestamp3, current_time) # reset 3 to 100 days self.ldap_update_record(name, txt3, dwTimeStamp=hundred_days_ago) # 3 changes, others do not timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, fifteen_days_ago) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_soon_after(timestamp3, current_time) # reset 1 and 3 to 8 days. does update of 1 affect 3? self.ldap_update_record(name, txt1, dwTimeStamp=eight_days_ago) self.ldap_update_record(name, txt3, dwTimeStamp=eight_days_ago) # 1 changes, others do not timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp1, current_time) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) # Try an RPC update, zeroing 1 --> what happens to 3? timestamp1 = self.rpc_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assertEqual(timestamp1, 0) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) # with 2 and 3 at 8 days, does static record change things? self.ldap_update_record(name, txt2, dwTimeStamp=eight_days_ago) # 2 changes, but to zero! timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp1, 0) self.assert_timestamps_equal(timestamp2, 0) self.assert_timestamps_equal(timestamp3, eight_days_ago) self.ldap_update_record(name, txt2, dwTimeStamp=six_days_ago) self.ldap_update_record(name, txt1, dwTimeStamp=3000000) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, 3000000) # dns update remembers that node is static, even with no # static records. timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assertEqual(timestamp1, 0) # Add another txt record. The new record should have the now # timestamp, and the others should remain unchanged. # BUT somehow record 1 is static!? timestamp4 = self.dns_update_record(name, txt4).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp self.assert_timestamps_equal(timestamp1, 0) self.assert_timestamps_equal(timestamp2, six_days_ago) self.assert_timestamps_equal(timestamp3, eight_days_ago) self.assert_timestamps_equal(timestamp4, 0) def _test_update_aging_enabled_n_days_ago(self, n_days): name = 'test' txt1 = ['1'] txt2 = ['2'] delta = n_days * -24 self.set_aging(True) current_time = self.dns_update_record(name, txt1).dwTimeStamp # rewind timestamp using ldap self.ldap_modify_timestamps(name, delta) n_days_ago = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assertGreater(current_time, n_days_ago) # update changes timestamp depending on time. timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_soon_after(timestamp1, current_time) # add another record, which should have the current timestamp timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) # first record should not have changed timestamp1_b = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp1_b) # let's repeat that, this time with txt2 existing self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp1_b) # this update is not an add. record 2 is already up-to-date timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) # now timestamp1 is not changed timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp1_b) # delete record2, try again self.ldap_delete_record(name, txt2) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_soon_after(timestamp1, current_time) # here we are re-adding the deleted record timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_soon_after(timestamp2, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp # It gets weird HERE. # note how the SIBLING of the deleted, re-added record differs # from the sibling of freshly added record, depending on the # time difference. if n_days <= 7: self.assert_timestamps_equal(timestamp1, n_days_ago) else: self.assert_timestamps_equal(timestamp1, timestamp2) # re-timestamp record2, try again self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) # this should make no difference timestamp1_b = self.dns_update_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp1_b) # no change timestamp2 = self.dns_update_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp2, timestamp1) # also no change timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, timestamp2) # let's introduce another record txt3 = ['3'] self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt1, dwTimeStamp=n_days_ago) timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp3, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) self.assert_timestamps_equal(timestamp2, n_days_ago) self.ldap_delete_record(name, txt3) timestamp3 = self.dns_update_record(name, txt3).dwTimeStamp self.assert_soon_after(timestamp3, current_time) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp self.assert_timestamps_equal(timestamp1, n_days_ago) self.assert_timestamps_equal(timestamp2, n_days_ago) txt4 = ['4'] # Because txt1 is static, txt4 is static self.ldap_update_record(name, txt1, dwTimeStamp=0) self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt3, dwTimeStamp=n_days_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp timestamp4 = self.dns_update_record(name, txt4).dwTimeStamp self.assert_timestamps_equal(timestamp1, 0) self.assert_timestamps_equal(timestamp2, n_days_ago) self.assert_timestamps_equal(timestamp3, n_days_ago) self.assert_timestamps_equal(timestamp4, 0) longer_ago = n_days_ago // 2 # remove all static records. self.ldap_delete_record(name, txt4) self.ldap_update_record(name, txt1, dwTimeStamp=longer_ago) self.ldap_update_record(name, txt2, dwTimeStamp=n_days_ago) self.ldap_update_record(name, txt3, dwTimeStamp=n_days_ago) timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp self.assert_timestamps_equal(timestamp1, longer_ago) timestamp4 = self.dns_update_record(name, txt4).dwTimeStamp timestamp2 = self.get_unique_txt_record(name, txt2).dwTimeStamp timestamp3 = self.get_unique_txt_record(name, txt3).dwTimeStamp timestamp1 = self.get_unique_txt_record(name, txt1).dwTimeStamp # Here, although there is no record frm which to get the zero # timestamp, record 4 does it anyway. self.assert_timestamps_equal(timestamp1, longer_ago) self.assert_timestamps_equal(timestamp2, n_days_ago) self.assert_timestamps_equal(timestamp3, n_days_ago) self.assert_timestamps_equal(timestamp4, 0) # and now record 1 wants to be static. self.ldap_update_record(name, txt4, dwTimeStamp=longer_ago) timestamp4 = self.get_unique_txt_record(name, txt4).dwTimeStamp self.assert_timestamps_equal(timestamp4, longer_ago) timestamp1 = self.dns_update_record(name, txt1).dwTimeStamp timestamp4 = self.get_unique_txt_record(name, txt4).dwTimeStamp self.assert_timestamps_equal(timestamp1, 0) self.assert_timestamps_equal(timestamp4, longer_ago) def test_update_aging_enabled_in_no_refresh_window(self): self._test_update_aging_enabled_n_days_ago(4) def test_update_aging_enabled_on_no_refresh_boundary(self): self._test_update_aging_enabled_n_days_ago(7) def test_update_aging_enabled_in_refresh_window(self): self._test_update_aging_enabled_n_days_ago(9) def test_update_aging_enabled_beyond_refresh_window(self): self._test_update_aging_enabled_n_days_ago(16) def test_update_aging_enabled_in_eighteenth_century(self): self._test_update_aging_enabled_n_days_ago(100000) def test_update_static_stickiness(self): name = 'test' A = ['A'] B = ['B'] C = ['C'] D = ['D'] self.set_aging(False) self.dns_update_record(name, A).dwTimeStamp self.ldap_update_record(name, B, dwTimeStamp=0) self.dns_update_record(name, B) self.dns_update_record(name, C) ctime = self.get_unique_txt_record(name, C).dwTimeStamp self.assertEqual(ctime, 0) btime = self.get_unique_txt_record(name, B).dwTimeStamp self.assertEqual(btime, 0) self.ldap_replace_records(name, []) self.dns_update_record(name, D) dtime = self.get_unique_txt_record(name, D).dwTimeStamp self.assertEqual(dtime, 0) def _test_update_timestamp_weirdness(self, n_days, aging=True): name = 'test' A = ['A'] B = ['B'] self.set_aging(aging) current_time = self.dns_update_record(name, A).dwTimeStamp # rewind timestamp using ldap self.ldap_modify_timestamps(name, n_days * -24) n_days_ago = self.get_unique_txt_record(name, A).dwTimeStamp time_A = self.dns_update_record(name, A).dwTimeStamp # that dns_update should have reset the timestamp ONLY if # aging is on and the old timestamp is > noRefresh period (7 # days) if n_days > 7 and aging: self.assert_soon_after(time_A, current_time) else: self.assert_timestamps_equal(time_A, n_days_ago) # add another record, which should have the current timestamp time_B = self.dns_update_record(name, B).dwTimeStamp self.assert_soon_after(time_B, current_time) time_A = self.get_unique_txt_record(name, A).dwTimeStamp if aging and n_days <= 7: self.assert_timestamps_equal(time_A, n_days_ago) else: self.assert_soon_after(time_A, current_time) # delete B, try again self.ldap_delete_record(name, B) self.ldap_update_record(name, A, dwTimeStamp=n_days_ago) time_A = self.dns_update_record(name, A).dwTimeStamp # here we are re-adding the deleted record time_B = self.dns_update_record(name, B).dwTimeStamp self.assert_soon_after(time_B, current_time) time_A = self.get_unique_txt_record(name, A).dwTimeStamp return n_days_ago, time_A, time_B def test_update_timestamp_weirdness_no_refresh_no_aging(self): n_days_ago, time_A, time_B = \ self._test_update_timestamp_weirdness(5, False) # the timestamp of the SIBLING of the deleted, re-added record # differs from the sibling of freshly added record. self.assert_timestamps_equal(time_A, n_days_ago) def test_update_timestamp_weirdness_no_refresh_aging(self): n_days_ago, time_A, time_B = \ self._test_update_timestamp_weirdness(5, True) # the timestamp of the SIBLING of the deleted, re-added record # differs from the sibling of freshly added record. self.assert_timestamps_equal(time_A, n_days_ago) def test_update_timestamp_weirdness_refresh_no_aging(self): n_days_ago, time_A, time_B = \ self._test_update_timestamp_weirdness(9, False) self.assert_timestamps_equal(time_A, time_B) def test_update_timestamp_weirdness_refresh_aging(self): n_days_ago, time_A, time_B = \ self._test_update_timestamp_weirdness(9, True) self.assert_timestamps_equal(time_A, time_B) def test_aging_refresh(self): name, txt = 'agingtest', ['test txt'] no_refresh = 200 refresh = 160 self.set_zone_int_params(NoRefreshInterval=no_refresh, RefreshInterval=refresh, Aging=1) before_mod = self.dns_update_record(name, txt) start_time = before_mod.dwTimeStamp # go back 86 hours, which is in the no-refresh time (but # wouldn't be if we had stuck to the default of 168). self.ldap_modify_timestamps(name, -170) rec = self.dns_update_record(name, txt) self.assert_timestamps_equal(rec.dwTimeStamp, start_time - 170) # back to -202 hours, into the refresh zone # the update should reset the timestamp to now. self.ldap_modify_timestamps(name, -32) rec = self.dns_update_record(name, txt) self.assert_soon_after(rec.dwTimeStamp, start_time) # back to -362 hours, beyond the end of the refresh period. # Actually nothing changes at this time -- we can still # refresh, but the record is liable for scavenging. self.ldap_modify_timestamps(name, -160) rec = self.dns_update_record(name, txt) self.assert_soon_after(rec.dwTimeStamp, start_time) def test_add_no_timestamp(self): # check zero timestamp is implicit self.set_aging(True) rec = self.ldap_update_record('ldap', 'test') self.assertEqual(rec.dwTimeStamp, 0) rec = self.rpc_update_record('rpc', 'test') self.assertEqual(rec.dwTimeStamp, 0) def test_add_zero_timestamp(self): rec = self.ldap_update_record('ldap', 'test', dwTimeStamp=0) self.assertEqual(rec.dwTimeStamp, 0) rec = self.rpc_update_record('rpc', 'test', dwTimeStamp=0) self.assertEqual(rec.dwTimeStamp, 0) def test_add_update_timestamp(self): # LDAP can change timestamp, RPC can't rec = self.ldap_update_record('ldap', 'test', dwTimeStamp=123456) self.assertEqual(rec.dwTimeStamp, 123456) rec = self.rpc_update_record('rpc', 'test', dwTimeStamp=123456) self.assertEqual(rec.dwTimeStamp, 0) # second time is a different code path (add vs update) rec = self.rpc_update_record('rpc', 'test', dwTimeStamp=123456) self.assertEqual(rec.dwTimeStamp, 0) # RPC update the one with timestamp, zeroing it. rec = self.rpc_update_record('ldap', 'test', dwTimeStamp=123456) self.assertEqual(rec.dwTimeStamp, 0) def test_add_update_ttl(self): # RPC *can* set dwTtlSeconds. rec = self.ldap_update_record('ldap', 'test', dwTtlSeconds=1234) self.assertEqual(rec.dwTtlSeconds, 1234) rec = self.rpc_update_record('rpc', 'test', dwTtlSeconds=1234) self.assertEqual(rec.dwTtlSeconds, 1234) # does update work like add? rec = self.rpc_update_record('rpc', 'test', dwTtlSeconds=4321) self.assertEqual(rec.dwTtlSeconds, 4321) rec = self.rpc_update_record('ldap', 'test', dwTtlSeconds=5678) self.assertEqual(rec.dwTtlSeconds, 5678) def test_add_update_ttl_serial(self): # when setting dwTtlSeconds, what happens to serial number? rec = self.ldap_update_record('ldap', 'test', dwTtlSeconds=1234, dwSerial=123) self.assertEqual(rec.dwTtlSeconds, 1234) self.assertEqual(rec.dwSerial, 123) rec = self.rpc_update_record('rpc', 'test', dwTtlSeconds=1234) self.assertEqual(rec.dwTtlSeconds, 1234) serial = rec.dwSerial self.assertLess(serial, 4) rec = self.rpc_update_record('rpc', 'test', dwTtlSeconds=4321) self.assertEqual(rec.dwTtlSeconds, 4321) self.assertEqual(rec.dwSerial, serial + 1) rec = self.rpc_update_record('ldap', 'test', dwTtlSeconds=5678) self.assertEqual(rec.dwTtlSeconds, 5678) self.assertEqual(rec.dwSerial, 124) def test_add_update_dwFlags(self): # dwFlags splits into rank and flags. # according to [MS-DNSP] 2.3.2.2, flags MUST be zero rec = self.ldap_update_record('ldap', 'test', flags=22222, rank=222) self.assertEqual(rec.flags, 22222) self.assertEqual(rec.rank, 222) rec = self.rpc_update_record('ldap', 'test', dwFlags=3333333) # rank != 3333333 & 0xff == 213 self.assertEqual(rec.rank, 240) # RPC fixes rank self.assertEqual(rec.flags, 0) self.assertRaises(OverflowError, self.ldap_update_record, 'ldap', 'test', flags=777777777, rank=777) # reset to no default (rank overflows) rec = self.ldap_update_record('ldap', 'test', flags=7777, rank=777) self.assertEqual(rec.flags, 7777) self.assertEqual(rec.rank, 9) # DNS update zeros flags, sets rank to 240 (RANK_ZONE) rec = self.dns_update_record('ldap', 'test', ttl=999) self.assertEqual(rec.flags, 0) self.assertEqual(rec.rank, 240) rec = self.rpc_update_record('ldap', 'test', dwFlags=321) self.assertEqual(rec.flags, 0) self.assertEqual(rec.rank, 240) # RPC adding a new record: fixed rank, zero flags rec = self.rpc_update_record('ldap', 'test 2', dwFlags=12345) self.assertEqual(rec.rank, 240) self.assertEqual(rec.flags, 0) def test_add_update_dwReserved(self): # RPC does not change dwReserved. rec = self.ldap_update_record('ldap', 'test', dwReserved=54321) self.assertEqual(rec.dwReserved, 54321) rec = self.rpc_update_record('rpc', 'test', dwReserved=54321) self.assertEqual(rec.dwReserved, 0) rec = self.rpc_update_record('rpc', 'test', dwReserved=54321) self.assertEqual(rec.dwReserved, 0) rec = self.rpc_update_record('ldap', 'test', dwReserved=12345) self.assertEqual(rec.dwReserved, 54321) def test_add_update_dwSerial(self): # On Windows the RPC record ends up with serial 2, on Samba # serial 3. Rather than knownfail this, we accept anything # below 4 (for now). rec = self.ldap_update_record('ldap', 'test', dwSerial=123) self.assertEqual(rec.dwSerial, 123) rec = self.rpc_update_record('rpc', 'test', dwSerial=123) self.assertLess(rec.dwSerial, 4) rec = self.rpc_update_record('rpc', 'test', dwSerial=123) self.assertLess(rec.dwSerial, 4) rec = self.dns_update_record('rpc', 'test') self.assertLess(rec.dwSerial, 4) rec = self.dns_update_record('dns-0', 'test') self.assertLess(rec.dwSerial, 5) rec = self.dns_update_record('ldap', 'test') self.assertEqual(rec.dwSerial, 123) rec = self.rpc_update_record('ldap', 'test', dwSerial=123) self.assertEqual(rec.dwSerial, 123) rec = self.ldap_update_record('ldap', 'test', dwSerial=12) self.assertEqual(rec.dwSerial, 12) # when we dns-updated ldap/test, we alerted Windows to 123 as # a high water mark for the zone. (even though we have since # dropped the serial to 12, 123 is the base serial for new # records). rec = self.dns_update_record('dns', 'test') self.assertEqual(rec.dwSerial, 124) rec = self.dns_update_record('dns2', 'test') self.assertEqual(rec.dwSerial, 125) rec = self.rpc_update_record('rpc2', 'test') self.assertEqual(rec.dwSerial, 126) rec = self.dns_update_record('dns', 'test 2') self.assertEqual(rec.dwSerial, 127) def test_add_update_dwSerial_2(self): # On Samba the RPC update resets the serial to a low number, # while Windows leaves it high. rec = self.ldap_update_record('ldap', 'test', dwSerial=123) self.assertEqual(rec.dwSerial, 123) rec = self.rpc_update_record('ldap', 'test', dwSerial=321) self.assertEqual(rec.dwSerial, 123) rec = self.dns_update_record('ldap', 'test') self.assertEqual(rec.dwSerial, 123) def test_rpc_update_disparate_types(self): """Can we use update to replace a TXT with an AAAA?""" name = 'x' old = TXTRecord("x") new = ARecord("127.0.0.111") self.rpc_replace(name, None, old) recs = self.ldap_get_records(name) self.assertEqual(len(recs), 1) self.assertEqual(recs[0].wType, old.wType) self.rpc_replace(name, old, new) recs = self.ldap_get_records(name) self.assertEqual(len(recs), 1) self.assertEqual(recs[0].wType, new.wType) def test_add_update_many(self): # Samba fails often in this set, but we want to see how it # goes further down, so we print the problems and defer the # failure. failures = 0 total = 0 def _defer_wrap(f): def _defer(*args): nonlocal failures, total total += 1 try: f(*args) except self.failureException as e: from traceback import format_stack print(f"{format_stack()[-2]} {e}\n") failures += 1 return _defer defer_assertEqual = _defer_wrap(self.assertEqual) defer_assert_timestamp_in_ballpark = \ _defer_wrap(self.assert_timestamp_in_ballpark) self.set_aging(False) rec = self.ldap_update_record('ldap', 'test', version=11, rank=22, flags=33, dwSerial=44, dwTtlSeconds=55, dwReserved=66, dwTimeStamp=77) self.assertEqual(rec.version, 5) # disobeys request self.assertEqual(rec.rank, 22) self.assertEqual(rec.flags, 33) self.assertEqual(rec.dwSerial, 44) self.assertEqual(rec.dwTtlSeconds, 55) self.assertEqual(rec.dwReserved, 66) self.assertEqual(rec.dwTimeStamp, 77) # DNS updates first rec = self.dns_update_record('ldap', 'test', ttl=999) self.assertEqual(rec.version, 5) self.assertEqual(rec.rank, 240) # rank gets fixed by DNS update defer_assertEqual(rec.flags, 0) # flags gets fixed defer_assertEqual(rec.dwSerial, 45) # serial increments self.assertEqual(rec.dwTtlSeconds, 999) # TTL set defer_assertEqual(rec.dwReserved, 0) # reserved fixed defer_assert_timestamp_in_ballpark(rec) # changed on Windows ?! self.set_aging(True) rec = self.dns_update_record('ldap', 'test', ttl=1111) self.assertEqual(rec.version, 5) self.assertEqual(rec.rank, 240) defer_assertEqual(rec.flags, 0) defer_assertEqual(rec.dwSerial, 46) self.assertEqual(rec.dwTtlSeconds, 1111) # TTL set defer_assertEqual(rec.dwReserved, 0) self.assert_timestamp_in_ballpark(rec) # RPC update rec = self.rpc_update_record('ldap', 'test', version=111, dwFlags=333, dwSerial=444, dwTtlSeconds=555, dwReserved=666, dwTimeStamp=777) self.assertEqual(rec.version, 5) # no change self.assertEqual(rec.rank, 240) # no change defer_assertEqual(rec.flags, 0) # no change defer_assertEqual(rec.dwSerial, 47) # Serial increments self.assertEqual(rec.dwTtlSeconds, 555) # TTL set defer_assertEqual(rec.dwReserved, 0) # no change self.assertEqual(rec.dwTimeStamp, 0) # timestamp zeroed # RPC update, using default values rec = self.rpc_update_record('ldap', 'test') self.assertEqual(rec.version, 5) self.assertEqual(rec.rank, 240) defer_assertEqual(rec.flags, 0) defer_assertEqual(rec.dwSerial, 48) # serial increments self.assertEqual(rec.dwTtlSeconds, 900) # TTL changed defer_assertEqual(rec.dwReserved, 0) self.assertEqual(rec.dwTimeStamp, 0) self.set_aging(False) rec = self.dns_update_record('ldap', 'test', ttl=888) self.assertEqual(rec.version, 5) self.assertEqual(rec.rank, 240) defer_assertEqual(rec.flags, 0) defer_assertEqual(rec.dwSerial, 49) # serial increments self.assertEqual(rec.dwTtlSeconds, 888) # TTL set defer_assertEqual(rec.dwReserved, 0) self.assertEqual(rec.dwTimeStamp, 0) # timestamp stays zero if failures: self.fail(f"failed {failures}/{total} defered assertions") def test_static_record_dynamic_update(self): """Add a static record, then a dynamic record. The dynamic record should have a timestamp set.""" name = 'test' txt = ['static txt'] txt2 = ['dynamic txt'] self.set_aging(True) rec = self.ldap_update_record(name, txt, dwTimeStamp=0) rec2 = self.dns_update_record(name, txt2) self.assert_timestamp_in_ballpark(rec2) ts2 = rec2.dwTimeStamp # update the first record. It should stay static (timestamp 0) rec = self.dns_update_record(name, txt) self.assertEqual(rec.dwTimeStamp, 0) # and rec2 should be unchanged. self.assertEqual(rec2.dwTimeStamp, ts2) def test_dynamic_record_static_update(self): name = 'agingtest' txt1 = ['dns update before'] txt2 = ['ldap update'] txt3 = ['dns update after'] self.set_aging(True) self.dns_update_record(name, txt1) self.ldap_update_record(name, txt2) self.dns_update_record(name, txt3) recs = self.get_rpc_records(name) for r in recs: d = [x.str for x in r.data.str] if d == txt1: self.assertNotEqual(r.dwTimeStamp, 0) elif d == txt2: self.assertEqual(r.dwTimeStamp, 0) elif d == txt3: self.assertNotEqual(r.dwTimeStamp, 0) def test_tombstone_in_hours_and_nttime(self): # Until now Samba has measured tombstone timestamps in hours, # not ten-millionths of a second. After now, we want Samba to # handle both. nh, oh, nn, on, on0, onf, nn0, nnf, _1601 = 'abcdefgij' now_hours = dsdb_dns.unix_to_dns_timestamp(int(time.time())) old_hours = now_hours - 24 * 90 now_nttime = dsdb_dns.dns_timestamp_to_nt_time(now_hours) old_nttime = dsdb_dns.dns_timestamp_to_nt_time(old_hours) # calculations on hours might be based on the lower 32 bits, # so we test with these forced to extremes (the maximum change # is 429 seconds in NTTIME). old_nttime0 = old_nttime & 0xffffffff00000000 old_nttimef = old_nttime | 0xffffffff now_nttime0 = now_nttime & 0xffffffff00000000 now_nttimef = now_nttime | 0xffffffff self.dns_tombstone(nh, epoch_nttime=now_hours) self.dns_tombstone(oh, epoch_nttime=old_hours) self.dns_tombstone(nn, epoch_nttime=now_nttime) self.dns_tombstone(on, epoch_nttime=old_nttime) self.dns_tombstone(nn0, epoch_nttime=now_nttime0) self.dns_tombstone(nnf, epoch_nttime=now_nttimef) self.dns_tombstone(on0, epoch_nttime=old_nttime0) self.dns_tombstone(onf, epoch_nttime=old_nttimef) # this is our (arbitrary) threshold that will make us think in # NTTIME, not hours. self.dns_tombstone(_1601, epoch_nttime=(10 * 1000 * 1000 + 1)) try: file_samdb = get_file_samdb() except ldb.LdbError as e: raise AssertionError( f"failing because '{e}': this is Windows?") from None dsdb._dns_delete_tombstones(file_samdb) # nh and nn should not be deleted for name in nh, nn, nn0, nnf: recs = self.ldap_get_records(name) self.assertEqual(len(recs), 1) self.assert_tombstoned(name, timestamp=False) # oh and on should be GONE for name in oh, on, on0, onf, _1601: recs = self.ldap_get_records(name) self.assertEqual(len(recs), 0) def test_dns_query_for_tombstoned_results(self): # This one fails on Windows, because the dns cache holds B # after it has been tombstoned behind its back. A = 'a' B = 'b' self.dns_tombstone(A) self.assert_tombstoned(A) r = self.dns_query(A, qtype=dns.DNS_QTYPE_TXT) self.assertEqual(r.ancount, 0) self.dns_update_record(B, B) self.dns_tombstone(B) self.assert_tombstoned(B) r = self.dns_query(B, qtype=dns.DNS_QTYPE_TXT) self.assertEqual(r.ancount, 0) def test_basic_scavenging(self): # NOTE: This one fails on Windows, because the RPC call to # prompt scavenging is not immediate. On Samba, in the # testenv, we don't have the RPC call but we can connect to # the database directly. # just to be sure we have the right limits. self.set_zone_int_params(NoRefreshInterval=168, RefreshInterval=168, Aging=1) ts1, ts2, ts3, ts4, ts5, ts6 = ('1', '2', '3', '4', '5', '6') self.dns_update_record(ts1, ts1) self.dns_update_record(ts2, ts2) # ts2 is tombstoned and timestamped in 1981 self.dns_tombstone(ts2) # ts3 is tombstoned and timestamped in the future self.dns_tombstone(ts3, epoch_hours=(DNS_TIMESTAMP_2101 - 1)) # ts4 is tombstoned and timestamped in the past self.dns_tombstone(ts4, epoch_hours=1111111) # ts5 is tombstoned in the past and timestamped in the future self.dns_tombstone(ts5, epoch_hours=5555555, epoch_nttime=int(1e10)) # ts2 and ts3 should now be tombstoned. self.assert_tombstoned(ts2) self.assert_tombstoned(ts3) # let's un-tombstone ts2 # ending up with dnsTombstoned: FALSE in Samba # and no dNSTombstoned in Windows. self.dns_update_record(ts2, "ts2 untombstoned") ts2_node = self.get_one_node(ts2) ts2_tombstone = ts2_node.get("dNSTombstoned") if ts2_tombstone is not None: self.assertEqual(ts2_tombstone[0], b"FALSE") self.assert_tombstoned(ts2, tombstoned=False) r = self.dns_update_record(ts6, ts6) # put some records into the death zone. self.ldap_modify_timestamps(ts1, -15 * 24) self.ldap_modify_timestamps(ts2, -14 * 24 - 2) self.ldap_modify_timestamps(ts6, -14 * 24 + 2) # ts1 will be saved by this record self.dns_update_record(ts1, "another record") try: # Tell the server to clean-up records. # This is how it *should* work on Windows: self.rpc_conn.DnssrvOperation2( dnsserver.DNS_CLIENT_VERSION_LONGHORN, 0, SERVER_IP, None, 0, "StartScavenging", dnsserver.DNSSRV_TYPEID_NULL, None) # Samba won't get here (NOT_IMPLEMENTED error) # wait for Windows to do its cleanup. time.sleep(2) except WERRORError as e: if e.args[0] == werror.WERR_CALL_NOT_IMPLEMENTED: # This is the Samba way, talking to the file directly, # as if we were the server process. The direct # connection is needed because the tombstoning search # involves a magic system only filter. file_samdb = get_file_samdb() dsdb._scavenge_dns_records(file_samdb) dsdb._dns_delete_tombstones(file_samdb) else: raise # Now what we should have: # ts1: alive: the old record is deleted, the new one not. # ts2: tombstoned # ts3: tombstoned # ts4: deleted. gone. # ts5: deleted. timestamp affects tombstoning, but not deletion. # ts6: alive # # We order our assertions to make the windows test # fail as late as possible (on ts4, ts5, ts2). r = self.get_unique_txt_record(ts1, ["another record"]) self.assertIsNotNone(r) r = self.get_unique_txt_record(ts6, [ts6]) self.assertIsNotNone(r) self.assert_tombstoned(ts3) n = self.get_one_node(ts4) self.assertIsNone(n) n = self.get_one_node(ts5) self.assertIsNone(n) self.assert_tombstoned(ts2) def test_samba_scavenging(self): # We expect this one to fail on Windows, because scavenging # and tombstoning cannot be performed on demand. try: file_samdb = get_file_samdb() except ldb.LdbError as e: raise AssertionError( f"failing because '{e}': this is Windows?") from None # let's try different limits. self.set_zone_int_params(NoRefreshInterval=30, RefreshInterval=20, Aging=1) now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) A, B, C, D = 'ABCD' # A has current time # B has safe, non-updateable time # C has safe time # D is scavengeable atime = self.dns_update_record(A, A).dwTimeStamp btime = self.ldap_update_record(B, B, dwTimeStamp=now-20).dwTimeStamp btime = self.ldap_update_record(C, C, dwTimeStamp=now-40).dwTimeStamp dtime = self.ldap_update_record(D, D, dwTimeStamp=now-60).dwTimeStamp self.assert_soon_after(atime, now) self.assert_timestamps_equal(btime, now-20) self.assert_timestamps_equal(ctime, now-40) self.assert_timestamps_equal(dtime, now-60) dsdb._scavenge_dns_records(file_samdb) # D should be gone (tombstoned) r = self.get_unique_txt_record(D, D) self.assertIsNone(r) r = dns_query(self, D, qtype=dns.DNS_QTYPE_TXT) self.assertEqual(r.ancount, 0) recs = self.ldap_get_records(D) self.assertEqual(len(recs), 1) self.assert_tombstoned(recs[0]) # others unchanged. atime = self.get_unique_txt_record(A, A).dwTimeStamp btime = self.get_unique_txt_record(B, B).dwTimeStamp ctime = self.get_unique_txt_record(C, C).dwTimeStamp self.assert_soon_after(atime, now) self.assert_timestamps_equal(btime, now-20) self.assert_timestamps_equal(ctime, now-40) btime = self.dns_update_record(B, B).dwTimeStamp ctime = self.dns_update_record(C, C).dwTimeStamp self.assert_timestamps_equal(btime, now-40) self.assert_soon_after(ctime, now) # after this, D *should* still be a tombstone, because its # tombstone timestamp is not very old. dsdb._dns_delete_tombstones(file_samdb) recs = self.ldap_get_records(D) self.assertEqual(len(recs), 1) self.assert_tombstoned(recs[0]) # Let's delete C using rpc, and ensure it survives dns_delete_tombstones self.rpc_delete_txt(C, C) recs = self.ldap_get_records(C) self.assertEqual(len(recs), 1) self.assert_tombstoned(recs[0]) dsdb._dns_delete_tombstones(file_samdb) recs = self.ldap_get_records(C) self.assertEqual(len(recs), 1) self.assert_tombstoned(recs[0]) # now let's wind A and B back to either side of the two week # threshold. A should survive, B should not. self.dns_tombstone(A, (now - 166)) self.dns_tombstone(B, (now - 170)) dsdb._dns_delete_tombstones(file_samdb) recs = self.ldap_get_records(A) self.assertEqual(len(recs), 1) self.assert_tombstoned(recs[0]) recs = self.ldap_get_records(B) self.assertEqual(len(recs), 0) def _test_A_and_AAAA_records(self, A, B, a_days, b_days, aging): self.set_aging(aging) name = 'aargh' now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) a_initial = now - 24 * a_days b_initial = now - 24 * b_days self.dns_update_non_text(name, A) self.ldap_modify_timestamps(name, a_days * -24) rec_a = self.get_unique_ip_record(name, A) rec_b = self.add_ip_record(name, B, dwTimeStamp=b_initial) self.assert_timestamps_equal(rec_a, a_initial) self.assert_timestamps_equal(rec_b, b_initial) # touch the A record. self.dns_update_non_text(name, A) # check the A timestamp, depending on norefresh rec_a = self.get_unique_ip_record(name, A) if aging and a_days > 7: time_a = now self.assert_soon_after(rec_a, now) elif a_days > 7: # when we have NO aging and are in the refresh window, the # timestamp now reads as a_initial, but will become now # after we manipulate B for a bit. time_a = now self.assert_timestamps_equal(rec_a, a_initial) else: time_a = a_initial self.assert_timestamps_equal(rec_a, a_initial) # B timestamp should be unchanged? rec_b = self.get_unique_ip_record(name, B) self.assert_timestamps_equal(rec_b, b_initial) # touch the B record. self.dns_update_non_text(name, B) # check the B timestamp rec_b = self.get_unique_ip_record(name, B) if not aging: self.windows_variation( self.assert_soon_after, rec_b, now, msg="windows updates non-aging, samba does not") else: self.assert_soon_after(rec_b, now) # rewind B rec_b = self.add_ip_record(name, B, dwTimeStamp=b_initial) # NOW rec A might have changed! with no aging, and out of refresh. rec_a = self.get_unique_ip_record(name, A) self.assert_timestamps_equal(rec_a, time_a) self.dns_update_non_text(name, A) rec_a = self.get_unique_ip_record(name, B) self.assert_timestamps_equal(rec_b, b_initial) # now delete A _, wtype = guess_wtype(A) self.ldap_delete_record(name, A, wtype=wtype) # re-add it self.dns_update_non_text(name, A) rec_a = self.get_unique_ip_record(name, A) self.assert_soon_after(rec_a, now) rec_b = self.get_unique_ip_record(name, B) self.assert_timestamps_equal(rec_b, b_initial) def test_A_5_days_AAAA_5_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 5, 5, aging=True) def test_A_5_days_AAAA_5_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 5, 5, aging=False) def test_A_5_days_AAAA_10_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 5, 10, aging=True) def test_A_5_days_AAAA_10_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 5, 10, aging=False) def test_A_10_days_AAAA_5_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 10, 5, aging=True) def test_A_10_days_AAAA_5_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 10, 5, aging=False) def test_A_10_days_AAAA_9_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 10, 9, aging=True) def test_A_9_days_AAAA_10_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 9, 10, aging=False) def test_A_20_days_AAAA_2_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 20, 2, aging=True) def test_A_6_days_AAAA_40_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv6_ADDR, 6, 40, aging=False) def test_A_5_days_A_5_days_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv4_ADDR_2, 5, 5, aging=True) def test_A_5_days_A_10_days_no_aging(self): self._test_A_and_AAAA_records(IPv4_ADDR, IPv4_ADDR_2, 5, 10, aging=False) def test_AAAA_5_days_AAAA_6_days_aging(self): self._test_A_and_AAAA_records(IPv6_ADDR, IPv6_ADDR_2, 5, 6, aging=True) def test_AAAA_5_days_AAAA_6_days_no_aging(self): self._test_A_and_AAAA_records(IPv6_ADDR, IPv6_ADDR_2, 5, 6, aging=False) def _test_multi_records_delete(self, aging): # Batch deleting a type doesn't update other types timestamps. self.set_aging(aging) name = 'aargh' now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) back_5_days = now - 5 * 24 back_10_days = now - 10 * 24 back_25_days = now - 25 * 24 ip4s = { '1.1.1.1': now, '2.2.2.2': back_5_days, '3.3.3.3': back_10_days, } ip6s = { '::1': now, '::2': back_5_days, '::3': back_25_days, } txts = { '1': now, '2': back_5_days, '3': back_25_days, } # For windows, if we don't DNS update something, it won't know # there's anything. self.dns_update_record(name, '3') for k, v in ip4s.items(): r = self.add_ip_record(name, k, wtype=dns.DNS_QTYPE_A, dwTimeStamp=v) for k, v in ip6s.items(): r = self.add_ip_record(name, k, wtype=dns.DNS_QTYPE_AAAA, dwTimeStamp=v) for k, v in txts.items(): r = self.ldap_update_record(name, k, dwTimeStamp=v) self.dns_delete_type(name, dnsp.DNS_TYPE_A) r = self.dns_query(name, dns.DNS_QTYPE_A) self.assertEqual(r.ancount, 0) r = self.dns_query(name, dns.DNS_QTYPE_TXT) self.assertEqual(r.ancount, 3) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, set(txts)) r = self.dns_query(name, dns.DNS_QTYPE_AAAA) self.assertEqual(r.ancount, 3) rset = set(ipv6_normalise(x.rdata) for x in r.answers) self.assertEqual(rset, set(ip6s)) recs = self.ldap_get_records(name) self.assertEqual(len(recs), 6) for r in recs: if r.wType == dns.DNS_QTYPE_AAAA: k = ipv6_normalise(r.data) expected = ip6s[k] elif r.wType == dns.DNS_QTYPE_TXT: k = r.data.str[0] expected = txts[k] else: self.fail(f"unexpected wType {r.wType}") self.assert_timestamps_equal(r.dwTimeStamp, expected) def test_multi_records_delete_aging(self): self._test_multi_records_delete(True) def test_multi_records_delete_no_aging(self): self._test_multi_records_delete(False) def _test_dns_delete_times(self, n_days, aging=True): # In these tests, Windows replaces the records with # tombstones, while Samba just removes them. Both are # reasonable approaches (there is no reanimation pathway for # tombstones), but this means self.ldap_get_records() gets # different numbers for each. So we use # self.ldap_get_non_tombstoned_record(). name = 'test' A = ['A'] B = ['B'] C = ['C'] D = ['D'] self.set_aging(aging) now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) n_days_ago = max(now - n_days * 24, 0) self.dns_update_record(name, A) self.ldap_update_record(name, A, dwTimeStamp=n_days_ago) self.ldap_update_record(name, B, dwTimeStamp=n_days_ago) self.ldap_update_record(name, C, dwTimeStamp=n_days_ago) self.dns_update_record(name, D) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, set('ABCD')) atime = self.get_unique_txt_record(name, A).dwTimeStamp btime = self.get_unique_txt_record(name, B).dwTimeStamp ctime = self.get_unique_txt_record(name, C).dwTimeStamp dtime = self.get_unique_txt_record(name, D).dwTimeStamp recs = self.ldap_get_records(name) self.assertEqual(len(recs), 4) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, set('ABCD')) self.dns_delete(name, D) self.assert_timestamps_equal(atime, self.get_unique_txt_record(name, A)) self.assert_timestamps_equal(btime, self.get_unique_txt_record(name, B)) self.assert_timestamps_equal(ctime, self.get_unique_txt_record(name, C)) recs = self.ldap_get_non_tombstoned_records(name) self.assertEqual(len(recs), 3) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, set('ABC')) self.rpc_delete_txt(name, C) self.assert_timestamps_equal(atime, self.get_unique_txt_record(name, A)) self.assert_timestamps_equal(btime, self.get_unique_txt_record(name, B)) recs = self.ldap_get_non_tombstoned_records(name) self.assertEqual(len(recs), 2) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, set('AB')) self.dns_delete(name, A) self.assert_timestamps_equal(btime, self.get_unique_txt_record(name, B)) recs = self.ldap_get_records(name) self.assertEqual(len(recs), 1) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(rset, {'B'}) self.dns_delete(name, B) recs = self.ldap_get_non_tombstoned_records(name) # Windows leaves the node with zero records. Samba ends up # with a tombstone. self.assertEqual(len(recs), 0) r = self.dns_query(name, dns.DNS_QTYPE_TXT) rset = set(x.rdata.txt.str[0] for x in r.answers) self.assertEqual(len(rset), 0) def test_dns_delete_times_5_days_aging(self): self._test_dns_delete_times(5, True) def test_dns_delete_times_11_days_aging(self): self._test_dns_delete_times(11, True) def test_dns_delete_times_366_days_aging(self): self._test_dns_delete_times(366, True) def test_dns_delete_times_static_aging(self): self._test_dns_delete_times(1e10, True) def test_dns_delete_times_5_days_no_aging(self): self._test_dns_delete_times(5, False) def test_dns_delete_times_11_days_no_aging(self): self._test_dns_delete_times(11, False) def test_dns_delete_times_366_days_no_aging(self): self._test_dns_delete_times(366, False) def test_dns_delete_times_static_no_aging(self): self._test_dns_delete_times(1e10, False) def _test_dns_delete_simple(self, a_days, b_days, aging=True, touch=False): # Here we show that with aging enabled, the timestamp of # sibling records is *not* modified when a record is deleted. # # With aging disabled, it *is* modified, if the dns server has # seen it updated before ldap set the time (that is, probably # the dns server overwrites AD). This happens even if AD # thinks the record is static. name = 'test' A = ['A'] B = ['B'] self.set_aging(aging) now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) a_days_ago = max(now - a_days * 24, 0) b_days_ago = max(now - b_days * 24, 0) if touch: self.dns_update_record(name, A) self.dns_update_record(name, B) self.ldap_update_record(name, A, dwTimeStamp=a_days_ago) self.ldap_update_record(name, B, dwTimeStamp=b_days_ago) atime = self.get_unique_txt_record(name, A).dwTimeStamp self.dns_delete(name, B) if not aging and touch: # this resets the timestamp even if it is a static record. self.assert_soon_after(self.get_unique_txt_record(name, A), now) else: self.assert_timestamps_equal(self.get_unique_txt_record(name, A), atime) def test_dns_delete_simple_2_3_days_aging(self): self._test_dns_delete_simple(2, 3, True) def test_dns_delete_simple_2_3_days_no_aging(self): self._test_dns_delete_simple(2, 3, False) def test_dns_delete_simple_2_13_days_aging(self): self._test_dns_delete_simple(2, 13, True) def test_dns_delete_simple_2_13_days_no_aging(self): self._test_dns_delete_simple(2, 13, False) def test_dns_delete_simple_12_13_days_aging(self): self._test_dns_delete_simple(12, 13, True) def test_dns_delete_simple_12_13_days_no_aging(self): self._test_dns_delete_simple(12, 13, False) def test_dns_delete_simple_112_113_days_aging(self): self._test_dns_delete_simple(112, 113, True) def test_dns_delete_simple_112_113_days_no_aging(self): self._test_dns_delete_simple(112, 113, False) def test_dns_delete_simple_112_113_days_aging(self): self._test_dns_delete_simple(112, 113, True) def test_dns_delete_simple_112_113_days_no_aging(self): self._test_dns_delete_simple(112, 113, False) def test_dns_delete_simple_0_113_days_aging(self): # 1e9 hours ago evaluates to 0, i.e static self._test_dns_delete_simple(1e9, 113, True) def test_dns_delete_simple_0_113_days_no_aging(self): self._test_dns_delete_simple(1e9, 113, False) def test_dns_delete_simple_0_0_days_aging(self): self._test_dns_delete_simple(1e9, 1e9, True) def test_dns_delete_simple_0_0_days_no_aging(self): self._test_dns_delete_simple(1e9, 1e9, False) def test_dns_delete_simple_10_0_days_aging(self): self._test_dns_delete_simple(10, 1e9, True) def test_dns_delete_simple_10_0_days_no_aging(self): self._test_dns_delete_simple(10, 1e9, False) def test_dns_delete_simple_2_3_days_aging_touch(self): self._test_dns_delete_simple(2, 3, True, True) def test_dns_delete_simple_2_3_days_no_aging_touch(self): self._test_dns_delete_simple(2, 3, False, True) def test_dns_delete_simple_2_13_days_aging_touch(self): self._test_dns_delete_simple(2, 13, True, True) def test_dns_delete_simple_2_13_days_no_aging_touch(self): self._test_dns_delete_simple(2, 13, False, True) def test_dns_delete_simple_12_13_days_aging_touch(self): self._test_dns_delete_simple(12, 13, True, True) def test_dns_delete_simple_12_13_days_no_aging_touch(self): self._test_dns_delete_simple(12, 13, False, True) def test_dns_delete_simple_112_113_days_aging_touch(self): self._test_dns_delete_simple(112, 113, True, True) def test_dns_delete_simple_112_113_days_no_aging_touch(self): self._test_dns_delete_simple(112, 113, False, True) def test_dns_delete_simple_112_113_days_aging_touch(self): self._test_dns_delete_simple(112, 113, True, True) def test_dns_delete_simple_112_113_days_no_aging_touch(self): self._test_dns_delete_simple(112, 113, False, True) def test_dns_delete_simple_0_113_days_aging_touch(self): # 1e9 hours ago evaluates to 0, i.e static self._test_dns_delete_simple(1e9, 113, True, True) def test_dns_delete_simple_0_113_days_no_aging_touch(self): self._test_dns_delete_simple(1e9, 113, False, True) def test_dns_delete_simple_0_0_days_aging_touch(self): self._test_dns_delete_simple(1e9, 1e9, True, True) def test_dns_delete_simple_0_0_days_no_aging_touch(self): self._test_dns_delete_simple(1e9, 1e9, False, True) def test_dns_delete_simple_10_0_days_aging_touch(self): self._test_dns_delete_simple(10, 1e9, True, True) def test_dns_delete_simple_10_0_days_no_aging_touch(self): self._test_dns_delete_simple(10, 1e9, False, True) def windows_variation(self, fn, *args, msg=None, **kwargs): try: fn(*args, **kwargs) except AssertionError as e: print("Expected success on Windows only, failed as expected:\n" + c_GREEN(e)) return print(c_RED("known Windows failure")) if msg is not None: print(c_DARK_YELLOW(msg)) print("Expected success on Windows:\n" + c_GREEN(f"{fn.__name__} {args} {kwargs}")) def _test_dns_add_sibling(self, a_days, refresh, aging=True, touch=False): # Here we show that with aging enabled, the timestamp of # sibling records *is* modified when a record is added. # # With aging disabled, it *is* modified, if the dns server has # seen it updated before ldap set the time (that is, probably # the dns server overwrites AD). This happens even if AD # thinks the record is static. name = 'test' A = ['A'] B = ['B'] self.set_zone_int_params(RefreshInterval=int(refresh), NoRefreshInterval=7, Aging=int(aging)) now = dsdb_dns.unix_to_dns_timestamp(int(time.time())) a_days_ago = max(now - a_days * 24, 0) if touch: self.dns_update_record(name, A) self.ldap_update_record(name, A, dwTimeStamp=a_days_ago) atime = self.get_unique_txt_record(name, A).dwTimeStamp self.dns_update_record(name, B) a_rec = self.get_unique_txt_record(name, A) if not aging and touch: # On Windows, this resets the timestamp even if it is a # static record, though in that case it may be a # transitory effect of the DNS cache. We will insist on # the Samba behaviour of not changing (that is # un-static-ing) a zero timestamp, because that is the # sensible thing. if a_days_ago == 0: self.windows_variation( self.assert_soon_after, a_rec, now, msg="Windows resets static siblings (cache effect?)") self.assert_timestamps_equal(a_rec, 0) else: self.assert_soon_after(a_rec, now) else: self.assert_timestamps_equal(a_rec, atime) b_rec = self.get_unique_txt_record(name, B) self.assert_soon_after(b_rec, now) def test_dns_add_sibling_2_7_days_aging(self): self._test_dns_add_sibling(2, 7, True) def test_dns_add_sibling_2_7_days_no_aging(self): self._test_dns_add_sibling(2, 7, False) def test_dns_add_sibling_12_7_days_aging(self): self._test_dns_add_sibling(12, 7, True) def test_dns_add_sibling_12_7_days_no_aging(self): self._test_dns_add_sibling(12, 7, False) def test_dns_add_sibling_12_3_days_aging(self): self._test_dns_add_sibling(12, 3, True) def test_dns_add_sibling_12_3_days_no_aging(self): self._test_dns_add_sibling(12, 3, False) def test_dns_add_sibling_112_7_days_aging(self): self._test_dns_add_sibling(112, 7, True) def test_dns_add_sibling_112_7_days_no_aging(self): self._test_dns_add_sibling(112, 7, False) def test_dns_add_sibling_12_113_days_aging(self): self._test_dns_add_sibling(12, 113, True) def test_dns_add_sibling_12_113_days_no_aging(self): self._test_dns_add_sibling(12, 113, False) def test_dns_add_sibling_0_7_days_aging(self): # 1e9 days ago evaluates to 0, i.e static self._test_dns_add_sibling(1e9, 7, True) def test_dns_add_sibling_0_7_days_no_aging(self): self._test_dns_add_sibling(1e9, 7, False) def test_dns_add_sibling_0_0_days_aging(self): self._test_dns_add_sibling(1e9, 0, True) def test_dns_add_sibling_0_0_days_no_aging(self): self._test_dns_add_sibling(1e9, 0, False) def test_dns_add_sibling_10_0_days_aging(self): self._test_dns_add_sibling(10, 0, True) def test_dns_add_sibling_10_0_days_no_aging(self): self._test_dns_add_sibling(10, 0, False) def test_dns_add_sibling_2_7_days_aging_touch(self): self._test_dns_add_sibling(2, 7, True, True) def test_dns_add_sibling_2_7_days_no_aging_touch(self): self._test_dns_add_sibling(2, 7, False, True) def test_dns_add_sibling_12_7_days_aging_touch(self): self._test_dns_add_sibling(12, 7, True, True) def test_dns_add_sibling_12_7_days_no_aging_touch(self): self._test_dns_add_sibling(12, 7, False, True) def test_dns_add_sibling_12_3_days_aging_touch(self): self._test_dns_add_sibling(12, 3, True, True) def test_dns_add_sibling_12_3_days_no_aging_touch(self): self._test_dns_add_sibling(12, 3, False, True) def test_dns_add_sibling_112_7_days_aging_touch(self): self._test_dns_add_sibling(112, 7, True, True) def test_dns_add_sibling_112_7_days_no_aging_touch(self): self._test_dns_add_sibling(112, 7, False, True) def test_dns_add_sibling_12_113_days_aging_touch(self): self._test_dns_add_sibling(12, 113, True, True) def test_dns_add_sibling_12_113_days_no_aging_touch(self): self._test_dns_add_sibling(12, 113, False, True) def test_dns_add_sibling_0_7_days_aging_touch(self): self._test_dns_add_sibling(1e9, 7, True, True) def test_dns_add_sibling_0_7_days_no_aging_touch(self): self._test_dns_add_sibling(1e9, 7, False, True) def test_dns_add_sibling_0_0_days_aging_touch(self): self._test_dns_add_sibling(1e9, 0, True, True) def test_dns_add_sibling_0_0_days_no_aging_touch(self): self._test_dns_add_sibling(1e9, 0, False, True) def test_dns_add_sibling_10_0_days_aging_touch(self): self._test_dns_add_sibling(10, 0, True, True) def test_dns_add_sibling_10_0_days_no_aging_touch(self): self._test_dns_add_sibling(10, 0, False, True) TestProgram(module=__name__, opts=subunitopts)
gpl-3.0
5,310,065,596,641,548,000
38.975592
84
0.60095
false
3.524765
true
false
false
jnishi/chainer
tests/chainer_tests/test_variable.py
1
97383
import copy import inspect import platform import re import sys import unittest import warnings import mock import numpy as np import pytest import six import chainer from chainer import backend from chainer.backends import cuda from chainer.backends import intel64 import chainer.functions as F from chainer import initializers from chainer import testing from chainer.testing import attr import chainer.testing.backend from chainer import variable import chainerx if chainerx.is_available(): import chainerx.testing class Constant(chainer.Function): def __init__(self, outputs): self.__outputs = outputs def forward_cpu(self, inputs): return self.__outputs def forward_gpu(self, inputs): return tuple(map(cuda.to_gpu, self.__outputs)) def backward_cpu(self, inputs, grad_outputs): return tuple(map(np.zeros_like, inputs)) def backward_gpu(self, inputs, grad_outputs): return tuple(map(cuda.cupy.zeros_like, inputs)) def constant(xs, value): return Constant(value)(*xs) def get_array(xp, arr): if xp is np: return arr if xp is cuda.cupy: return cuda.to_gpu(arr) if xp is chainerx: return chainerx.array(arr) assert False def get_variable(xp, arr): return chainer.Variable(get_array(xp, arr)) class MulAdd(chainer.FunctionNode): def forward(self, inputs): self.retain_inputs((0, 1)) a, b, c = inputs return a * b + c, def backward_accumulate(self, target_input_indexes, grad_outputs, grad_inputs): a, b = self.get_retained_inputs() g, = grad_outputs ret = [] for i, g_in in zip(target_input_indexes, grad_inputs): if i == 0: ret.append( g * b if g_in is None else muladd(g, b, g_in) ) elif i == 1: ret.append( a * g if g_in is None else muladd(a, g, g_in) ) elif i == 2: ret.append( g if g_in is None else g + g_in ) else: assert False return tuple(ret) def muladd(a, b, c): return MulAdd().apply((a, b, c))[0] @testing.parameterize(*( testing.product({ 'var_mapping': [(0, 1, 2)], # distinct 'in0_isvar_hasgrad': [(False, False), (True, False), (True, True)], 'in1_isvar_hasgrad': [(False, False), (True, False), (True, True)], 'in2_isvar_hasgrad': [(False, False), (True, False), (True, True)], }) + testing.product({ 'var_mapping': [ (0, 0, 1), # a == b != c (0, 1, 0), (0, 1, 1), ], 'in0_isvar_hasgrad': [(False, False), (True, False), (True, True)], 'in1_isvar_hasgrad': [(False, False), (True, False), (True, True)], }) + testing.product({ 'var_mapping': [(0, 0, 0)], # a == b == c 'in0_isvar_hasgrad': [(False, False), (True, False), (True, True)], }) )) class TestBackwardAccumulate(unittest.TestCase): shape = 3, def setUp(self): n = max(self.var_mapping) + 1 self.inputs_isvar_hasgrad = [ getattr(self, 'in{}_isvar_hasgrad'.format(i)) for i in range(n)] shape = self.shape self.inputs_data = [ np.random.randn(*shape).astype(np.float32) for _ in range(n)] self.inputs_grad = [ np.random.randn(*shape).astype(np.float32) if hasgrad else None for _, hasgrad in self.inputs_isvar_hasgrad] self.gy = np.random.randn(*shape).astype(np.float32) def _get_inputs(self): copied_data = [x.copy() for x in self.inputs_data] copied_grad = [ None if g is None else g.copy() for g in self.inputs_data] return [ chainer.Variable(x, grad=g) if isvar else x for x, g, (isvar, _) in zip( copied_data, copied_grad, self.inputs_isvar_hasgrad ) ] def check_backward_accumulate(self, xp): inputs = self._get_inputs() a, b, c = [inputs[i] for i in self.var_mapping] y = muladd(a, b, c) y.grad = self.gy y.backward() inputs2 = self._get_inputs() a2, b2, c2 = [inputs2[i] for i in self.var_mapping] y2 = chainer.as_variable(a2 * b2 + c2) y2.grad = self.gy y2.backward() tol = {'atol': 1e-4, 'rtol': 1e-4} for x, x2, (isvar, _) in zip( inputs, inputs2, self.inputs_isvar_hasgrad): if isvar: xp.testing.assert_allclose(x.grad, x2.grad, **tol) def test_backward_accumulate_cpu(self): self.check_backward_accumulate(np) def _to_gpu(self): self.inputs_data = [cuda.to_gpu(x) for x in self.inputs_data] self.inputs_grad = [ None if g is None else cuda.to_gpu(g) for g in self.inputs_grad] self.gy = cuda.to_gpu(self.gy) @attr.gpu def test_backward_accumulate_gpu(self): self._to_gpu() self.check_backward_accumulate(cuda.cupy) class TestVariableNode(unittest.TestCase): def test_grad(self): with pytest.raises(ValueError): variable.VariableNode(chainer.Variable(), '', grad=None) @testing.parameterize( {'x_shape': (10,), 'c_shape': (2, 5), 'label': '(2, 5), float32'}, {'x_shape': (), 'c_shape': (1,), 'label': '(1), float32'}, ) class TestVariable(unittest.TestCase): def setUp(self): self.x = np.random.uniform(-1, 1, self.x_shape).astype(np.float32) self.a = np.random.uniform(0.1, 10, self.x_shape).astype(np.float32) self.size = int(np.prod(self.x_shape)) self.c = np.arange(self.size).reshape(self.c_shape).astype(np.float32) @attr.chainerx def test_chainerx_init(self): a = chainerx.asarray(self.x) x = chainer.Variable(a) chainerx.testing.assert_array_equal(x.array, a) def check_attributes(self, xp): a = get_array(xp, self.x) x = chainer.Variable(a) xp.testing.assert_array_equal(x.array, a) assert x.array is x.data assert x.shape == self.x.shape assert x.ndim == self.x.ndim assert x.size == self.x.size assert x.dtype == self.x.dtype assert x.requires_grad @attr.chainerx def test_attributes_chainerx(self): self.check_attributes(chainerx) def test_attributes_cpu(self): self.check_attributes(np) @attr.gpu def test_attributes_gpu(self): self.check_attributes(cuda.cupy) def test_uninitialized(self): a = chainer.Variable(None) assert a.xp is np def check_grad(self, xp, x, g): v = chainer.Variable(x) v.grad = g xp.testing.assert_array_equal(v.grad, g) def test_grad_cpu(self): self.check_grad(np, self.x, self.a) @attr.gpu def test_grad_gpu(self): self.check_grad(cuda.cupy, cuda.to_gpu(self.x), cuda.to_gpu(self.a)) @attr.chainerx def test_grad_chainerx(self): self.check_grad( chainerx, chainerx.array(self.x), chainerx.array(self.a)) def check_grad_var(self, xp, x, g): v = chainer.Variable(x) gv = chainer.Variable(g) v.grad_var = gv xp.testing.assert_array_equal(v.grad, g) # Same instance should be returned each time. assert v.grad_var is gv def test_grad_var_cpu(self): self.check_grad_var(np, self.x, self.a) @attr.gpu def test_grad_var_gpu(self): self.check_grad_var( cuda.cupy, cuda.to_gpu(self.x), cuda.to_gpu(self.a)) @attr.chainerx def test_grad_var_chainerx(self): self.check_grad_var( chainerx, chainerx.array(self.x), chainerx.array(self.a)) def check_len(self, a): x = chainer.Variable(a) if x.ndim == 0: pytest.raises(TypeError, x.__len__) else: assert len(x) == self.x_shape[0] def test_len_cpu(self): self.check_len(self.x) @attr.gpu def test_len_gpu(self): self.check_len(cuda.to_gpu(self.x)) @attr.chainerx def test_len_chainerx(self): self.check_len(chainerx.array(self.x)) def check_get_item(self, a): x = chainer.Variable(a) if len(self.x_shape) > 0: slices = slice(2, 5) np.testing.assert_equal(backend.CpuDevice().send(x[slices].data), backend.CpuDevice().send(self.x[slices])) slices = slice(2, 5), np.testing.assert_equal(backend.CpuDevice().send(x[slices].data), backend.CpuDevice().send(self.x[slices])) def test_get_item_cpu(self): self.check_get_item(self.x) @attr.gpu def test_get_item_gpu(self): self.check_get_item(cuda.to_gpu(self.x)) def check_label(self, expected, c): c = chainer.Variable(c) assert c.label == expected def test_label_cpu(self): self.check_label(self.label, self.c) @attr.gpu def test_label_gpu(self): self.check_label(self.label, cuda.to_gpu(self.c)) def check_backward(self, inputs, intermediates, outputs, retain_grad): for o in outputs: o.backward(retain_grad) assert all([x.grad_var is not None for x in inputs]) if retain_grad: assert all([x.grad_var is not None for x in intermediates]) else: assert all([x.grad_var is None for x in intermediates]) assert any([x.grad_var is not None for x in outputs]) # length is number of edges. So, # of Variables created is length+1 def create_linear_chain(self, length, xp): x = get_variable(xp, self.x) ret = [x] for i in six.moves.range(length): ret.append(constant((ret[i], ), (self.a, ))) if xp is cuda.cupy: ret[-1].grad = cuda.cupy.zeros_like(ret[-1].data) elif xp is np: ret[-1].grad = np.zeros_like(ret[-1].data) else: assert False return ret def test_backward_cpu(self): ret = self.create_linear_chain(2, np) self.check_backward((ret[0], ), (ret[1], ), (ret[2], ), False) @attr.gpu def test_backward_gpu(self): ret = self.create_linear_chain(2, np) self.check_backward((ret[0], ), (ret[1], ), (ret[2], ), False) @attr.chainerx def test_backward_chainerx(self): ret = self.create_linear_chain(2, np) self.check_backward((ret[0], ), (ret[1], ), (ret[2], ), False) def check_backward_accumulate(self, xp): x = get_variable(xp, self.x) y = x * x y.grad = xp.zeros_like(y.data) y.backward() assert x.grad_var.shape == self.x_shape def test_backward_accumulate_cpu(self): self.check_backward_accumulate(np) @attr.gpu def test_backward_accumulate_gpu(self): self.check_backward_accumulate(cuda.cupy) @attr.chainerx def test_backward_accumulate_chainerx(self): self.check_backward_accumulate(chainerx) def test_backward_cpu_retain_grad(self): ret = self.create_linear_chain(2, np) self.check_backward((ret[0], ), (ret[1], ), (ret[2], ), True) @attr.gpu def test_backward_gpu_retain_grad(self): ret = self.create_linear_chain(2, cuda.cupy) self.check_backward((ret[0], ), (ret[1], ), (ret[2], ), True) def check_double_backprop(self, xp): x = get_variable(xp, self.x) x.grad_var = None y = x * x * x y.grad = xp.ones_like(y.data) y.backward(enable_double_backprop=True) gx = x.grad_var x.grad_var = None # clear grad gx.grad = xp.ones_like(x.data) gx.backward() expect = 6 * x testing.assert_allclose(x.grad_var.data, expect.data) def test_double_backprop_cpu(self): self.check_double_backprop(np) @attr.gpu def test_double_backprop_gpu(self): self.check_double_backprop(cuda.cupy) @attr.chainerx def test_double_backprop_chainerx(self): self.check_double_backprop(chainerx) def test_backward_no_grad_required(self): class DummyId(chainer.functions.math.identity.Identity): def backward(self, a, b): raise Exception('backward should not be called on inputs that ' 'do not require grads') x = chainer.Variable(self.x) y1, y2 = DummyId().apply((x, x)) x.node._requires_grad = False y1.backward() def test_unchain(self): ret = self.create_linear_chain(3, np) old_rank = ret[1].rank ret[1].unchain() assert ret[1].creator is None assert ret[1].rank == old_rank self.check_backward((ret[1],), (ret[2],), (ret[3],), False) def check_set_none_to_creator(self, use_creator_node): ret = self.create_linear_chain(3, np) old_rank = ret[1].rank if use_creator_node: ret[1].creator_node = None else: ret[1].creator = None assert ret[1].creator is None assert ret[1].creator_node is None assert ret[1].rank == old_rank self.check_backward((ret[1],), (ret[2],), (ret[3],), False) def test_set_none_to_creator(self): self.check_set_none_to_creator(False) def test_set_none_to_creator_node(self): self.check_set_none_to_creator(True) def test_set_none_and_original_to_creator(self): ret = self.create_linear_chain(2, np) old_rank = ret[1].rank creator_node = ret[1].creator_node ret[1].creator = None assert ret[1].creator is None assert ret[1].rank == old_rank ret[1].node._rank = -1 ret[1].creator_node = creator_node assert ret[1].creator_node is creator_node assert ret[1].rank == creator_node.rank + 1 self.check_backward((ret[0],), (ret[1],), (ret[2],), False) def test_set_fresh_creator(self): v = chainer.Variable() f = chainer.Function() v.creator = f assert v.creator is f assert v.creator_node is f.node assert v.rank == 1 def test_set_fresh_creator_node(self): v = chainer.Variable() f = chainer.FunctionNode() v.creator_node = f assert v.creator is f assert v.creator_node is f assert v.rank == 1 def test_unchain_backward_cpu(self): ret = self.create_linear_chain(3, np) ret[1].unchain_backward() self.check_backward((ret[1], ), (ret[2], ), (ret[3], ), False) @attr.gpu def test_unchain_backward_gpu(self): ret = self.create_linear_chain(3, cuda.cupy) ret[1].unchain_backward() self.check_backward((ret[1], ), (ret[2], ), (ret[3], ), False) def test_unchain_backward_cpu_retain_grad(self): ret = self.create_linear_chain(3, np) ret[1].unchain_backward() self.check_backward((ret[1], ), (ret[2], ), (ret[3], ), False) @attr.gpu def test_unchain_backward_gpu_retain_grad(self): ret = self.create_linear_chain(3, np) ret[1].unchain_backward() self.check_backward((ret[1], ), (ret[2], ), (ret[3], ), False) def test_invalid_value_type(self): with six.assertRaisesRegex(self, TypeError, 'int'): chainer.Variable(1) def test_grad_type_check_pass(self): a = chainer.Variable(np.empty((3,), dtype=np.float32)) a.grad = np.ndarray((3,), dtype=np.float32) def test_grad_type_check_pass_type(self): a = chainer.Variable(np.empty((), dtype=np.float32)) with pytest.raises(TypeError): a.grad = np.float32() @attr.gpu def test_grad_type_check_type_cpu_gpu_mixture(self): a = chainer.Variable(np.empty((3,), dtype=np.float32)) with pytest.raises(TypeError): a.grad = cuda.cupy.empty((3,), dtype=np.float32) def test_grad_type_check_dtype(self): a = chainer.Variable(np.empty((3,), dtype=np.float32)) with pytest.raises(TypeError): a.grad = np.empty((3,), dtype=np.float64) def test_grad_type_check_shape(self): a = chainer.Variable(np.empty((3,), dtype=np.float32)) with pytest.raises(ValueError): a.grad = np.empty((2,), dtype=np.float32) def check_cleargrad(self, a_data, fill=False): xp = backend.get_array_module(a_data) a = chainer.Variable(a_data) if fill: a.grad = xp.full_like(a_data, np.nan) a.cleargrad() assert a.grad is None def test_cleargrad_cpu(self): self.check_cleargrad(np.empty(3, dtype=np.float32)) def test_cleargrad_fill_cpu(self): self.check_cleargrad(np.empty(3, dtype=np.float32), fill=True) @attr.gpu def test_cleargrad_gpu(self): self.check_cleargrad(cuda.cupy.empty(3, dtype=np.float32)) @attr.gpu def test_cleargrad_fill_gpu(self): self.check_cleargrad(cuda.cupy.empty(3, dtype=np.float32), fill=True) @attr.chainerx def test_cleargrad_chainerx(self): # TODO(hvy): Simplify to chainerx.empty(int, ...) when supported. self.check_cleargrad(chainerx.empty((3,), dtype=np.float32)) @attr.chainerx def test_cleargrad_fill_chainerx(self): # TODO(hvy): Simplify to chainerx.empty(int, ...) when supported. self.check_cleargrad(chainerx.empty((3,), dtype=np.float32), fill=True) def check_zerograd(self, a_data, fill=False, grad_var_requires_grad=True, expect_error=False): xp = backend.get_array_module(a_data) a = chainer.Variable(a_data) if fill: a.grad_var = chainer.Variable(xp.full_like(a_data, np.nan), requires_grad=grad_var_requires_grad) if xp is not chainerx: a.grad_var.creator_node = chainer.FunctionNode() with testing.assert_warns(DeprecationWarning): if expect_error: with pytest.raises(Exception): a.zerograd() return a.zerograd() assert a.grad is not None if fill and xp is not chainerx: assert a.grad_var.creator_node is None xp.testing.assert_array_equal(a.grad, xp.zeros_like(a.grad)) def test_zerograd_cpu(self): self.check_zerograd(np.empty(3, dtype=np.float32)) def test_zerograd_fill_cpu(self): self.check_zerograd(np.empty(3, dtype=np.float32), fill=True) @attr.multi_gpu(2) def test_zerograds_multi_gpu(self): cupy = cuda.cupy with cuda.get_device_from_id(1): a = chainer.Variable(cupy.empty(3, dtype=np.float32)) with testing.assert_warns(DeprecationWarning): a.zerograd() assert a.grad is not None assert int(a.grad.device) == 1 with cuda.get_device_from_id(1): g_expect = cupy.zeros_like(a.data) cupy.testing.assert_array_equal(a.grad, g_expect) @attr.multi_gpu(2) def test_zerograds_fill_multi_gpu(self): cupy = cuda.cupy with cuda.get_device_from_id(1): a = chainer.Variable(cupy.empty(3, dtype=np.float32)) a.grad = cupy.empty_like(a.data) with testing.assert_warns(DeprecationWarning): a.zerograd() assert int(a.grad.device) == 1 with cuda.get_device_from_id(1): g_expect = cupy.zeros_like(a.data) cupy.testing.assert_array_equal(a.grad, g_expect) @attr.gpu def test_zerograd_gpu(self): self.check_zerograd(cuda.cupy.empty(3, dtype=np.float32)) @attr.gpu def test_zerograd_fill_gpu(self): self.check_zerograd(cuda.cupy.empty(3, dtype=np.float32), fill=True) @attr.chainerx def test_zerograd_chainerx(self): # TODO(hvy): Simplify to chainerx.empty(int, ...) when supported. self.check_zerograd(chainerx.empty((3,), dtype=np.float32)) @attr.chainerx def test_zerograd_fill_chainerx(self): # TODO(hvy): Simplify to chainerx.empty(int, ...) when supported. self.check_zerograd(chainerx.empty((3,), dtype=np.float32), fill=True, grad_var_requires_grad=False) @attr.chainerx def test_zerograd_fill_chainerx_requiring_grad(self): # TODO(hvy): Simplify to chainerx.empty(int, ...) when supported. self.check_zerograd(chainerx.empty((3,), dtype=np.float32), fill=True, grad_var_requires_grad=True, expect_error=True) def check_copydata(self, data1, data2, expect): xp = backend.get_array_module(data1) v = chainer.Variable(data1) w = chainer.Variable(data2) v.copydata(w) xp.testing.assert_array_equal(v.data, expect) def test_copydata_cpu_to_cpu(self): self.check_copydata(np.zeros(3, dtype=np.float32), np.ones(3, dtype=np.float32), np.ones(3, dtype=np.float32)) @attr.gpu def test_copydata_cpu_to_gpu(self): cp = cuda.cupy self.check_copydata(cp.zeros(3, dtype=np.float32), np.ones(3, dtype=np.float32), cp.ones(3, dtype=np.float32)) @attr.ideep def test_copydata_cpu_to_ideep(self): self.check_copydata(intel64.ideep.array(np.zeros(3, dtype=np.float32)), np.ones(3, dtype=np.float32), np.ones(3, dtype=np.float32)) @attr.gpu def test_copydata_gpu_to_gpu(self): cp = cuda.cupy self.check_copydata(cp.zeros(3, dtype=np.float32), cp.ones(3, dtype=np.float32), cp.ones(3, dtype=np.float32)) @attr.gpu def test_copydata_gpu_to_cpu(self): cp = cuda.cupy self.check_copydata(np.zeros(3, dtype=np.float32), cp.ones(3, dtype=np.float32), np.ones(3, dtype=np.float32)) @attr.ideep def test_copydata_ideep_to_cpu(self): self.check_copydata(np.zeros(3, dtype=np.float32), intel64.ideep.array(np.ones(3, dtype=np.float32)), np.ones(3, dtype=np.float32)) @attr.multi_gpu(2) def test_copydata_gpu_to_another_gpu(self): cp = cuda.cupy with cuda.get_device_from_id(0): data1 = cp.zeros(3, dtype=np.float32) expect = cp.ones(3, dtype=np.float32) with cuda.get_device_from_id(1): data2 = cp.ones(3, dtype=np.float32) self.check_copydata(data1, data2, expect) def check_addgrad(self, src, dst, expect, clear_src_grad=False, clear_dst_grad=False): xp = backend.get_array_module(dst) a = chainer.Variable(src) a.grad = src b = chainer.Variable(dst) b.grad = dst if clear_src_grad: a.cleargrad() if clear_dst_grad: b.cleargrad() b.addgrad(a) xp.testing.assert_array_equal(b.grad, expect) assert cuda.get_device_from_array(b.data) \ == cuda.get_device_from_array(b.grad) def test_addgrad_cpu_to_cpu(self): self.check_addgrad(np.full(3, 10, dtype=np.float32), np.full(3, 20, dtype=np.float32), np.full(3, 30, dtype=np.float32)) @attr.gpu def test_addgrad_cpu_to_gpu(self): cp = cuda.cupy self.check_addgrad(np.full(3, 10, dtype=np.float32), cp.full(3, 20, dtype=np.float32), cp.full(3, 30, dtype=np.float32)) @attr.gpu def test_addgrad_gpu_to_gpu(self): cp = cuda.cupy self.check_addgrad(cp.full(3, 10, dtype=np.float32), cp.full(3, 20, dtype=np.float32), cp.full(3, 30, dtype=np.float32)) @attr.gpu def test_addgrad_gpu_to_cpu(self): cp = cuda.cupy self.check_addgrad(cp.full(3, 10, dtype=np.float32), np.full(3, 20, dtype=np.float32), np.full(3, 30, dtype=np.float32)) @attr.multi_gpu(2) def test_addgrad_gpu_to_gpu_multi(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 10, dtype=np.float32) b = cp.full(3, 20, dtype=np.float32) c = cp.full(3, 30, dtype=np.float32) with cuda.get_device_from_id(0): self.check_addgrad(a, b, c) @attr.multi_gpu(2) def test_addgrad_gpu_to_another_gpu(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 10, dtype=np.float32) with cuda.get_device_from_id(0): b = cp.full(3, 20, dtype=np.float32) c = cp.full(3, 30, dtype=np.float32) self.check_addgrad(a, b, c) def test_addgrad_cpu_to_cpu_none_src(self): self.check_addgrad(np.full(3, 10, dtype=np.float32), np.full(3, 20, dtype=np.float32), np.full(3, 20, dtype=np.float32), clear_src_grad=True) @attr.gpu def test_addgrad_gpu_to_gpu_none_src(self): cp = cuda.cupy self.check_addgrad(cp.full(3, 10, dtype=np.float32), cp.full(3, 20, dtype=np.float32), cp.full(3, 20, dtype=np.float32), clear_src_grad=True) @attr.multi_gpu(2) def test_addgrad_gpu_to_another_gpu_none_src_dev0(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 10, dtype=np.float32) with cuda.get_device_from_id(0): b = cp.full(3, 20, dtype=np.float32) c = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(0): self.check_addgrad(a, b, c, clear_src_grad=True) @attr.multi_gpu(2) def test_addgrad_gpu_to_another_gpu_none_src_dev1(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 10, dtype=np.float32) with cuda.get_device_from_id(0): b = cp.full(3, 20, dtype=np.float32) c = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(1): self.check_addgrad(a, b, c, clear_src_grad=True) def test_addgrad_cpu_to_cpu_none_dst(self): self.check_addgrad(np.full(3, 20, dtype=np.float32), np.full(3, 10, dtype=np.float32), np.full(3, 20, dtype=np.float32), clear_dst_grad=True) @attr.gpu def test_addgrad_gpu_to_gpu_none_dst(self): cp = cuda.cupy self.check_addgrad(cp.full(3, 20, dtype=np.float32), cp.full(3, 10, dtype=np.float32), cp.full(3, 20, dtype=np.float32), clear_dst_grad=True) @attr.multi_gpu(2) def test_addgrad_gpu_to_another_gpu_none_dst_dev0(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(0): b = cp.full(3, 10, dtype=np.float32) c = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(0): self.check_addgrad(a, b, c, clear_dst_grad=True) @attr.multi_gpu(2) def test_addgrad_gpu_to_another_gpu_none_dst_dev1(self): cp = cuda.cupy with cuda.get_device_from_id(1): a = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(0): b = cp.full(3, 10, dtype=np.float32) c = cp.full(3, 20, dtype=np.float32) with cuda.get_device_from_id(1): self.check_addgrad(a, b, c, clear_dst_grad=True) def test_addgrad_none_src_dst(self): x = chainer.Variable(self.x) y = chainer.Variable(self.x) y.addgrad(x) assert y.grad is None def test_pickle_cpu(self): x = chainer.Variable(self.x) x.grad = np.ones_like(x.data) binary = six.moves.cPickle.dumps(x) d = six.moves.cPickle.loads(binary) np.testing.assert_array_equal(x.data, d.data) np.testing.assert_array_equal(x.grad, d.grad) @attr.gpu def test_pickle_gpu(self): cp = cuda.cupy x = chainer.Variable(self.x) x.grad = np.ones_like(x.data) x.to_gpu() binary = six.moves.cPickle.dumps(x) d = six.moves.cPickle.loads(binary) cp.testing.assert_array_equal(x.data, d.data) cp.testing.assert_array_equal(x.grad, d.grad) @testing.parameterize( {'array_require_grad': False, 'requires_grad': 'default', 'expected': True}, {'array_require_grad': False, 'requires_grad': False, 'expected': False}, {'array_require_grad': False, 'requires_grad': True, 'expected': True}, {'array_require_grad': True, 'requires_grad': 'default', 'expected': True}, {'array_require_grad': True, 'requires_grad': False, 'expected': 'raise'}, {'array_require_grad': True, 'requires_grad': True, 'expected': True}, ) @attr.chainerx class TestVariableChainerXInitRequiresGrad(unittest.TestCase): def test_chainerx_init_requires_grad(self): x = chainerx.ones((2,), dtype=np.float32) if self.array_require_grad: x.require_grad() def v(): if self.requires_grad == 'default': return chainer.Variable(x) else: return chainer.Variable(x, requires_grad=self.requires_grad) if self.expected == 'raise': with pytest.raises(ValueError): v() else: assert v().requires_grad is self.expected @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) class TestVariableToCpu(unittest.TestCase): def setUp(self): self.x = np.zeros(self.x_shape, dtype=np.float32) self.gx = np.ones_like(self.x) def check_to_cpu(self, x, gx, requires_grad=True): x_var = chainer.Variable(x, requires_grad=requires_grad) set_grad_var = requires_grad or not isinstance(x, chainerx.ndarray) if set_grad_var: x_var.grad_var = chainer.Variable(gx, requires_grad=requires_grad) x_var.to_cpu() assert x_var.xp is np assert x_var.node is not None assert isinstance(x_var.data, np.ndarray) assert x.shape == x_var.shape assert x.dtype == x_var.dtype np.testing.assert_array_equal( backend.CpuDevice().send(x_var.data), backend.CpuDevice().send(x)) if set_grad_var: assert isinstance(x_var.grad, np.ndarray) assert gx.shape == x_var.grad.shape assert gx.dtype == x_var.grad.dtype np.testing.assert_array_equal( backend.CpuDevice().send(x_var.grad), backend.CpuDevice().send(gx)) assert x_var.grad_var is not None assert x_var.grad_var.node is not None else: assert x_var.grad is None assert x_var.grad_var is None orig_xp = backend.get_array_module(x, gx) if orig_xp is np: assert x_var.data is x assert x_var.grad is gx else: assert x_var.data is not x assert not set_grad_var or x_var.grad is not gx assert x_var.xp is not chainerx def test_to_cpu_from_cpu(self): self.check_to_cpu(self.x, self.gx) @attr.gpu def test_to_cpu_from_gpu(self): self.check_to_cpu(cuda.to_gpu(self.x), cuda.to_gpu(self.gx)) @attr.chainerx def test_to_cpu_from_chainerx(self): self.check_to_cpu( chainerx.array(self.x), chainerx.array(self.gx), requires_grad=False) @attr.chainerx def test_to_cpu_from_chainerx_requiring_grad(self): with self.assertRaises(RuntimeError): self.check_to_cpu( chainerx.array(self.x), chainerx.array(self.gx), requires_grad=True) @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @attr.gpu class TestVariableToGpu(unittest.TestCase): def setUp(self): self.x = np.zeros(self.x_shape, dtype=np.float32) self.gx = np.ones_like(self.x) def check_to_gpu(self, x, gx, device=None, requires_grad=True): x_var = chainer.Variable(x, requires_grad=requires_grad) set_grad_var = requires_grad or not isinstance(x, chainerx.ndarray) if set_grad_var: x_var.grad_var = chainer.Variable(gx, requires_grad=requires_grad) x_var.to_gpu(device) assert x_var.xp is cuda.cupy assert x_var.node is not None assert isinstance(x_var.data, cuda.cupy.ndarray) assert x.shape == x_var.shape assert x.dtype == x_var.dtype device = cuda.Device(device) assert cuda.get_device_from_array(x_var.data) == device np.testing.assert_array_equal( backend.CpuDevice().send(x_var.data), backend.CpuDevice().send(x)) if set_grad_var: assert isinstance(x_var.grad, cuda.cupy.ndarray) assert gx.shape == x_var.grad.shape assert gx.dtype == x_var.grad.dtype assert cuda.get_device_from_array(x_var.grad) == device np.testing.assert_array_equal( backend.CpuDevice().send(x_var.grad), backend.CpuDevice().send(gx)) assert x_var.grad_var is not None assert x_var.grad_var.node is not None else: assert x_var.grad is None assert x_var.grad_var is None orig_xp = backend.get_array_module(x, gx) orig_device = cuda.get_device_from_array(x) if orig_xp is cuda.cupy and orig_device == device: assert x_var.data is x assert x_var.grad is gx else: assert x_var.data is not x assert not set_grad_var or x_var.grad is not gx assert x_var.xp is not chainerx def test_to_gpu_from_cpu(self): self.check_to_gpu(self.x, self.gx) def test_to_gpu_from_gpu(self): self.check_to_gpu(cuda.to_gpu(self.x), cuda.to_gpu(self.gx)) @attr.multi_gpu(2) def test_to_gpu_from_another_gpu(self): self.check_to_gpu(cuda.to_gpu(self.x), cuda.to_gpu(self.gx), 1) @attr.chainerx def test_to_gpu_from_chainerx(self): self.check_to_gpu( chainerx.array(self.x), chainerx.array(self.gx), requires_grad=False) @attr.chainerx def test_to_gpu_from_chainerx_requiring_grad(self): with self.assertRaises(RuntimeError): self.check_to_gpu( chainerx.array(self.x), chainerx.array(self.gx), requires_grad=True) @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @attr.chainerx class TestVariableToChainerX(unittest.TestCase): def setUp(self): self.x = np.zeros(self.x_shape, dtype=np.float32) self.gx = np.ones_like(self.x) def infer_expected_device(self, *arrays): xp = backend.get_array_module(*arrays) if xp is np: return chainerx.get_device('native', 0) elif xp is cuda.cupy: return chainerx.get_device('cuda', arrays[0].device.id) elif xp is chainerx: return arrays[0].device assert False def check_to_chainerx(self, x, gx, requires_grad=True): x_var = chainer.Variable(x, requires_grad=requires_grad) x_var.grad_var = chainer.Variable(gx, requires_grad=requires_grad) x_var.to_chainerx() expected_device = self.infer_expected_device(x, gx) assert x_var.xp is chainerx with pytest.raises(RuntimeError): x_var.node assert isinstance(x_var.array, chainerx.ndarray) assert x.shape == x_var.shape assert x.dtype == x_var.dtype assert x_var.data.device is expected_device np.testing.assert_array_equal( backend.CpuDevice().send(x_var.data), backend.CpuDevice().send(x)) if requires_grad: assert isinstance(x_var.grad, chainerx.ndarray) assert gx.shape == x_var.grad.shape assert gx.dtype == x_var.grad.dtype assert x_var.grad.device is expected_device np.testing.assert_array_equal( backend.CpuDevice().send(x_var.grad), backend.CpuDevice().send(gx)) assert x_var.grad_var is not None with pytest.raises(RuntimeError): x_var.grad_var.node else: assert x_var.grad is None assert x_var.grad_var is None assert x_var.xp is chainerx def test_to_chainerx_from_numpy(self): self.check_to_chainerx(self.x, self.gx) @attr.gpu def test_to_chainerx_from_cupy(self): self.check_to_chainerx(cuda.to_gpu(self.x), cuda.to_gpu(self.gx)) # TODO(hvy): Write test when implemented. @attr.ideep def test_ideep_to_chainerx(self): raise unittest.SkipTest('Not yet supported') def test_to_chainerx_from_chainerx(self): self.check_to_chainerx( chainerx.array(self.x), chainerx.array(self.gx)) def test_to_chainerx_from_another_device(self): self.check_to_chainerx( chainerx.array(self.x), chainerx.array(self.gx)) def test_to_chainerx_not_requiring_grad(self): self.check_to_chainerx(self.x, self.gx, requires_grad=False) def test_to_chainerx_with_creator(self): x = chainer.Variable(self.x) y = x * x with self.assertRaises(RuntimeError): y.to_chainerx() @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @chainer.testing.backend.inject_backend_tests( ['test_from_chainerx'], [ # NumPy {}, # CuPy {'use_cuda': True, 'cuda_device': 0}, {'use_cuda': True, 'cuda_device': 1}, # ChainerX {'use_chainerx': True, 'chainerx_device': 'native:0'}, {'use_chainerx': True, 'chainerx_device': 'cuda:0'}, {'use_chainerx': True, 'chainerx_device': 'cuda:1'}, ]) @attr.chainerx class TestVariableFromChainerX(unittest.TestCase): def setUp(self): self.x = chainerx.zeros(self.x_shape, dtype=np.float32) def infer_expected_xp_and_device(self, x): xp = backend.get_array_module(x) if xp is np: return xp, None elif xp is cuda.cupy: return xp, x.device elif xp is chainerx: backend_name = x.device.backend.name if backend_name == 'native': return np, None elif backend_name == 'cuda': return cuda.cupy, cuda.cupy.cuda.Device(x.device.index) assert False def test_from_chainerx(self, backend_config): x = backend_config.get_array(self.x) x_var = chainer.Variable(x, requires_grad=False) x_var.from_chainerx() expected_xp, expected_device = self.infer_expected_xp_and_device(x) assert x_var.xp is expected_xp assert x_var.node is not None assert isinstance(x_var.array, expected_xp.ndarray) assert expected_device is None or x_var.array.device == expected_device assert x.shape == x_var.shape assert x.dtype == x_var.dtype assert x_var.grad is None assert x_var.grad_var is None np.testing.assert_array_equal( backend.CpuDevice().send(x_var.array), backend.CpuDevice().send(x)) def test_invalid_from_chainerx_requires_grad(self): x = chainer.Variable(self.x, requires_grad=True) with self.assertRaises(RuntimeError): x.from_chainerx() @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @attr.chainerx class TestVariableToDevice(unittest.TestCase): def setUp(self): self.x = np.zeros(self.x_shape, dtype=np.float32) self.gx = np.ones_like(self.x) def check_to_device(self, x, gx, device_spec, expected_xp): x_var = chainer.Variable(x) x_var.grad_var = chainer.Variable(gx) x_var.to_device(device_spec) assert x_var.xp is expected_xp assert x_var.grad_var.xp is expected_xp def test_to_device_numpy(self): self.check_to_device(self.x, self.gx, np, np) @attr.gpu def test_to_device_cupy(self): self.check_to_device(self.x, self.gx, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_to_device_chainerx(self): self.check_to_device(self.x, self.gx, 'native:0', chainerx) _to_device_twice_backend_params = [ # NumPy {}, # CuPy {'use_cuda': True, 'cuda_device': 0}, {'use_cuda': True, 'cuda_device': 1}, # ChainerX {'use_chainerx': True, 'chainerx_device': 'native:0'}, {'use_chainerx': True, 'chainerx_device': 'cuda:0'}, {'use_chainerx': True, 'chainerx_device': 'cuda:1'}, ] @testing.parameterize(*testing.product( { 'x_shape': [(10,), (), None], 'requires_grad': [True, False], })) @testing.backend.inject_backend_tests(None, _to_device_twice_backend_params) @testing.backend.inject_backend_tests(None, _to_device_twice_backend_params) class TestVariableToDeviceTwice(unittest.TestCase): def setUp(self): if self.x_shape is None: self.x = None else: self.x = np.zeros(self.x_shape, dtype=np.float32) def test_to_device_twice(self, backend_config1, backend_config2): device1 = backend_config1.device device2 = backend_config2.device var = chainer.Variable(self.x, requires_grad=self.requires_grad) # Transfer to device 1 var.to_device(device1) # Transfer to device 2 should_fail = ( self.requires_grad and self.x is not None and device1.xp is chainerx and device2.xp is not chainerx) if should_fail: # Non-ChainerX device to ChainerX device should fail if # requires_grad with pytest.raises(RuntimeError): var.to_device(device2) else: # Should succeed var.to_device(device2) assert var.requires_grad == self.requires_grad if self.x is None: assert var.array is None assert var.data is None else: assert isinstance(var.array, device2.xp.ndarray) assert backend.get_device_from_array(var.array) == device2 np.testing.assert_array_equal( self.x, backend.CpuDevice().send(var.array)) class TestVariableBasic(unittest.TestCase): def test_unhashable(self): a = chainer.Variable(np.ones((2,))) with six.assertRaisesRegex(self, TypeError, '^unhashable type: '): hash(a) def test_unequatable(self): a = chainer.Variable(np.ones((2,))) b = chainer.Variable(np.ones((2,))) with pytest.raises(NotImplementedError): a == b with pytest.raises(NotImplementedError): a == a with pytest.raises(NotImplementedError): a != b with pytest.raises(NotImplementedError): a != a def test_uncomparable(self): a = chainer.Variable(np.ones((2,))) b = chainer.Variable(np.ones((2,))) with pytest.raises(NotImplementedError): a < b with pytest.raises(NotImplementedError): a <= b with pytest.raises(NotImplementedError): a > b with pytest.raises(NotImplementedError): a >= b def test_bool_inconvertible(self): a = chainer.Variable(np.ones((2,))) with pytest.raises(NotImplementedError): if a: pass with pytest.raises(NotImplementedError): if not a: pass class TestVariableDataAssign(unittest.TestCase): def test_variable_data_assign(self): x = chainer.Variable(np.ones((3, 2), np.float32)) chainer.functions.sin(x) x.data = np.ones((2, 4), np.float64) assert x.data.shape == (2, 4) assert x.data.dtype == np.float64 assert x.shape == (2, 4) assert x.dtype == np.float64 assert x.node.shape == (2, 4) assert x.node.dtype == np.float64 assert x.node.data.shape == (2, 4) assert x.node.data.dtype == np.float64 @attr.gpu def test_to_gpu(self): x = chainer.Variable(np.ones((3, 2), np.float32)) chainer.functions.sin(x) x.to_gpu() assert x.data is x.node.data x.to_cpu() assert x.data is x.node.data @attr.ideep def test_to_intel64(self): x = chainer.Variable(np.ones((3, 2), np.float32)) chainer.functions.sin(x) x.to_intel64() assert x.data is x.node.data x.to_cpu() assert x.data is x.node.data class TestParameter(unittest.TestCase): def setUp(self): self.a = np.random.rand(3, 2).astype(np.float32) def test_initializer(self): x = chainer.Parameter(shape=(1,)) assert x.initializer is not None def test_initialize_by_scalar(self): x = chainer.Parameter(2., (3,)) np.testing.assert_array_equal(x.data, np.array([2., 2., 2.])) def test_initialize_by_initializer(self): x = chainer.Parameter(initializers.One(), (3,)) np.testing.assert_array_equal( x.data, np.array([1., 1., 1.], dtype='f')) def test_initialize_by_none(self): x = chainer.Parameter(None, (3,)) np.testing.assert_array_equal( x.data, np.full((3,), np.nan, dtype='f')) def test_initialize_by_array(self): data = np.array([1., 2., 3.], dtype='f') x = chainer.Parameter(data) assert x.data is data @attr.gpu def test_initialize_by_cupy_array(self): data = cuda.cupy.array([1., 2., 3.], dtype='f') x = chainer.Parameter(data, (3,)) assert isinstance(x.data, cuda.cupy.ndarray) cuda.cupy.testing.assert_array_equal(x.data, data) @attr.chainerx def test_initialize_by_chainerx_array(self): data = chainerx.array([1., 2., 3.], dtype='f') x = chainer.Parameter(data) assert isinstance(x.data, chainerx.ndarray) chainerx.testing.assert_array_equal(x.data, data) def test_update_rule(self): update_rule = mock.MagicMock() g = self.a.copy() x = chainer.Parameter(self.a) x.grad = g x.update_rule = update_rule x.update() assert update_rule.update.call_count == 1 assert update_rule.update.call_args_list[0] == [(x,), {}] def test_update_rule_without_grad(self): update_rule = mock.MagicMock() x = chainer.Parameter(self.a) x.update_rule = update_rule x.update() assert update_rule.update.call_count == 1 @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) class TestParameterToDevice(unittest.TestCase): def check_to_device(self, x, device_spec, expected_xp): assert isinstance(x, chainer.Parameter) x.to_device(device_spec) assert x.xp is expected_xp def check_initializer(self, shape, device_spec, expected_xp): x = chainer.Parameter(shape=shape) self.check_to_device(x, device_spec, expected_xp) def check_initialize_by_scalar(self, shape, device_spec, expected_xp): x = chainer.Parameter(2., shape) self.check_to_device(x, device_spec, expected_xp) def check_initialize_by_initializer(self, shape, device_spec, expected_xp): x = chainer.Parameter(initializers.One(), shape) self.check_to_device(x, device_spec, expected_xp) def check_initialize_by_none(self, shape, device_spec, expected_xp): x = chainer.Parameter(None, shape) self.check_to_device(x, device_spec, expected_xp) def check_initialize_by_array(self, shape, device_spec, expected_xp): data = np.random.uniform(-1, 1, shape).astype('f') x = chainer.Parameter(data) self.check_to_device(x, device_spec, expected_xp) def test_initializer_to_device_numpy(self): self.check_initializer(self.x_shape, np, np) @attr.gpu def test_initializer_to_device_cupy(self): self.check_initializer(self.x_shape, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_initializer_to_device_chainerx(self): self.check_initializer(self.x_shape, 'native:0', chainerx) def test_initialize_by_scalar_to_device_numpy(self): self.check_initialize_by_scalar(self.x_shape, np, np) @attr.gpu def test_initialize_by_scalar_to_device_cupy(self): self.check_initialize_by_scalar( self.x_shape, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_initialize_by_scalar_to_device_chainerx(self): self.check_initialize_by_scalar(self.x_shape, 'native:0', chainerx) def test_initialize_by_initializer_to_device_numpy(self): self.check_initialize_by_initializer(self.x_shape, np, np) @attr.gpu def test_initialize_by_initializer_to_device_cupy(self): self.check_initialize_by_initializer( self.x_shape, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_initialize_by_initializer_to_device_chainerx(self): self.check_initialize_by_initializer( self.x_shape, 'native:0', chainerx) def test_initialize_by_none_to_device_numpy(self): self.check_initialize_by_none(self.x_shape, np, np) @attr.gpu def test_initialize_by_none_to_device_cupy(self): self.check_initialize_by_none(self.x_shape, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_initialize_by_none_to_device_chainerx(self): self.check_initialize_by_none(self.x_shape, 'native:0', chainerx) def test_initialize_by_array_to_device_numpy(self): self.check_initialize_by_array(self.x_shape, np, np) @attr.gpu def test_initialize_by_array_to_device_cupy(self): self.check_initialize_by_array(self.x_shape, (cuda.cupy, 0), cuda.cupy) @attr.chainerx def test_initialize_by_array_to_device_chainerx(self): self.check_initialize_by_array(self.x_shape, 'native:0', chainerx) @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @attr.chainerx class TestParameterToChainerX(unittest.TestCase): def check_to_chainerx(self, x): assert isinstance(x, chainer.Parameter) x.to_chainerx() assert x.xp is chainerx def check_initializer(self, shape): x = chainer.Parameter(shape=shape) self.check_to_chainerx(x) def check_initialize_by_scalar(self, shape): x = chainer.Parameter(2., shape) self.check_to_chainerx(x) def check_initialize_by_initializer(self, shape): x = chainer.Parameter(initializers.One(), shape) self.check_to_chainerx(x) def check_initialize_by_none(self, shape): x = chainer.Parameter(None, shape) self.check_to_chainerx(x) def check_initialize_by_array(self, shape, xp, device=None): if device is not None: data = xp.random.uniform(-1, 1, shape, device=device).astype('f') else: data = xp.random.uniform(-1, 1, shape).astype('f') x = chainer.Parameter(data) self.check_to_chainerx(x) def test_initializer_to_chainerx(self): self.check_initializer(self.x_shape) def test_initialize_by_scalar_to_chainerx(self): self.check_initialize_by_scalar(self.x_shape) def test_initialize_by_initializer_to_chainerx(self): self.check_initialize_by_initializer(self.x_shape) def test_initialize_by_none_to_chainerx(self): self.check_initialize_by_none(self.x_shape) def test_initialize_by_array_to_chainerx_numpy(self): self.check_initialize_by_array(self.x_shape, np) @attr.gpu def test_initialize_by_array_to_chainerx_cupy(self): self.check_initialize_by_array(self.x_shape, cuda.cupy) @attr.chainerx def test_initialize_by_array_to_chainerx_chainerx_native(self): self.check_initialize_by_array(self.x_shape, chainerx, 'native:0') @attr.gpu @attr.chainerx def test_initialize_by_array_to_chainerx_chainerx_cuda(self): self.check_initialize_by_array(self.x_shape, chainerx, 'cuda:0') @testing.parameterize( {'x_shape': (10,)}, {'x_shape': ()}, ) @attr.chainerx class TestParameterFromChainerX(unittest.TestCase): def check_from_chainerx(self, x, expected_xp): assert isinstance(x, chainer.Parameter) x.from_chainerx() assert x.xp is expected_xp def check_initializer(self, shape, expected_xp): x = chainer.Parameter(shape=shape) self.check_from_chainerx(x, expected_xp) def check_initialize_by_scalar(self, shape, expected_xp): x = chainer.Parameter(2., shape) self.check_from_chainerx(x, expected_xp) def check_initialize_by_initializer(self, shape, expected_xp): x = chainer.Parameter(initializers.One(), shape) self.check_from_chainerx(x, expected_xp) def check_initialize_by_none(self, shape, expected_xp): x = chainer.Parameter(None, shape) self.check_from_chainerx(x, expected_xp) def check_initialize_by_array(self, shape, xp, expected_xp, device=None): if device is not None: data = xp.random.uniform(-1, 1, shape, device=device).astype('f') else: data = xp.random.uniform(-1, 1, shape).astype('f') x = chainer.Parameter(data) self.check_from_chainerx(x, expected_xp) def test_initializer_from_chainerx(self): self.check_initializer(self.x_shape, np) def test_initialize_by_scalar_from_chainerx(self): self.check_initialize_by_scalar(self.x_shape, np) def test_initialize_by_initializer_from_chainerx(self): self.check_initialize_by_initializer(self.x_shape, np) def test_initialize_by_none_from_chainerx(self): self.check_initialize_by_none(self.x_shape, np) def test_initialize_by_array_from_chainerx_numpy(self): self.check_initialize_by_array(self.x_shape, np, np) @attr.gpu def test_initialize_by_array_from_chainerx_cupy(self): self.check_initialize_by_array(self.x_shape, cuda.cupy, cuda.cupy) @attr.chainerx def test_initialize_by_array_from_chainerx_chainerx_native(self): self.check_initialize_by_array(self.x_shape, chainerx, np, 'native:0') @attr.gpu @attr.chainerx def test_initialize_by_array_from_chainerx_chainerx_cuda(self): self.check_initialize_by_array( self.x_shape, chainerx, cuda.cupy, 'cuda:0') class TestUninitializedParameter(unittest.TestCase): def setUp(self): self.a = np.random.rand(3, 2).astype(np.float32) self.b = np.random.rand(*self.a.shape).astype(self.a.dtype) def test_init_without_data(self): x = chainer.Parameter() assert x.data is None assert x.grad is None def test_initialize(self): x = chainer.Parameter() x.initialize((3, 2)) assert x.shape == (3, 2) assert x.dtype == np.float32 np.testing.assert_array_equal(x.data, np.float32('nan')) np.testing.assert_array_equal(x.grad, np.float32('nan')) assert backend.get_device_from_array(x.data).xp is np assert backend.get_device_from_array(x.grad).xp is np def check_constant_initialization(self, x, a, xp, expected_device): x.initialize(a.shape) assert isinstance(x.data, xp.ndarray) xp.testing.assert_array_equal(x.data, xp.asarray(a)) xp.testing.assert_array_equal(x.grad, np.float32('nan')) assert backend.get_device_from_array(x.data) == expected_device assert backend.get_device_from_array(x.grad) == expected_device def test_initialize_with_initializer(self): x = chainer.Parameter(initializers.Constant(self.a)) self.check_constant_initialization( x, self.a, np, chainer.get_device(np)) def test_initialize_dtype(self): initializer = initializers.Zero(np.float64) x = chainer.Parameter(initializer=initializer) x.initialize((2, 3)) assert x.data.dtype == np.float64 assert x.grad.dtype == np.float64 def test_initialize_by_callable_default_dtype(self): def initializer(array): array.fill(1.0) x = chainer.Parameter(initializer=initializer) with chainer.using_config('dtype', np.float16): x.initialize((3, 2)) assert x.data.dtype == np.float16 assert x.grad.dtype == np.float16 def test_initialize_node(self): initializer = initializers.Zero(np.float64) x = chainer.Parameter(initializer=initializer) x.initialize((2, 3)) assert x.node.shape == (2, 3) assert x.node.dtype == np.float64 @attr.gpu def test_initialize_to_gpu(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_gpu() self.check_constant_initialization( x, self.a, cuda.cupy, chainer.get_device((cuda.cupy, 0))) @attr.multi_gpu(2) def test_initialize_to_noncurrent_gpu(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_gpu(1) self.check_constant_initialization( x, self.a, cuda.cupy, chainer.get_device((cuda.cupy, 1))) @attr.gpu def test_initialize_to_cpu(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_gpu() x.to_cpu() self.check_constant_initialization( x, self.a, np, chainer.get_device(np)) @attr.ideep def test_initialize_to_intel64(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) assert x.data is None x.to_intel64() x.initialize(self.a.shape) assert isinstance(x.data, intel64.mdarray) np.testing.assert_array_equal(x.data, self.a) np.testing.assert_array_equal(x.grad, np.float32('nan')) @attr.chainerx def test_initialize_to_chainerx_native(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_device(np) x.to_chainerx() self.check_constant_initialization( x, self.a, chainerx, chainer.get_device('native:0')) @attr.chainerx @attr.gpu def test_initialize_to_chainerx_cuda(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_device((cuda.cupy, 0)) x.to_chainerx() self.check_constant_initialization( x, self.a, chainerx, chainer.get_device('cuda:0')) @attr.chainerx @attr.multi_gpu(2) def test_initialize_to_chainerx_cuda_noncurrent_gpu(self): x = chainer.Parameter(initializer=initializers.Constant(self.a)) x.to_device((cuda.cupy, 1)) x.to_chainerx() self.check_constant_initialization( x, self.a, chainerx, chainer.get_device('cuda:1')) def test_copy_to_initialize(self): # This test intends the use case of link.copy() method. x = chainer.Parameter() y = copy.copy(x) x.initialize((3, 2)) assert x.data is y.data def test_cleargrad(self): x = chainer.Parameter() x.cleargrad() x.initialize((3, 2)) assert x.grad is None def check_zerograd(self, x, xp): assert isinstance(x.grad, xp.ndarray) assert x.grad.shape == x.data.shape assert x.grad.dtype == x.data.dtype xp.testing.assert_array_equal(x.grad, 0) def test_zerograd(self): x = chainer.Parameter() with testing.assert_warns(DeprecationWarning): x.zerograd() x.initialize((3, 2)) self.check_zerograd(x, np) @attr.gpu def test_zerograd_to_gpu(self): x = chainer.Parameter() with testing.assert_warns(DeprecationWarning): x.zerograd() x.to_gpu() x.initialize((3, 2)) self.check_zerograd(x, cuda.cupy) @attr.gpu def test_to_gpu_zerograd(self): x = chainer.Parameter() x.to_gpu() with testing.assert_warns(DeprecationWarning): x.zerograd() x.initialize((3, 2)) self.check_zerograd(x, cuda.cupy) @attr.chainerx def test_zerograd_to_chainerx(self): x = chainer.Parameter() with testing.assert_warns(DeprecationWarning): x.zerograd() x.to_device(np) x.to_chainerx() x.initialize((3, 2)) self.check_zerograd(x, chainerx) @attr.chainerx def test_to_chainerx_zerograd(self): x = chainer.Parameter() x.to_device(np) x.to_chainerx() with testing.assert_warns(DeprecationWarning): x.zerograd() x.initialize((3, 2)) self.check_zerograd(x, chainerx) def test_zerograd_dtype(self): x = chainer.Parameter(initializers.Zero(dtype=np.float16)) with testing.assert_warns(DeprecationWarning): x.zerograd() x.initialize((3, 2)) assert x.grad.dtype == x.data.dtype def test_copydata_to_uninitialized_parameter(self): x = chainer.Parameter() y = chainer.Parameter(self.a) x.copydata(y) np.testing.assert_array_equal(x.data, self.a) @attr.gpu def test_copydata_to_uninitialized_parameter_gpu(self): x = chainer.Parameter() y = chainer.Parameter(self.a) x.to_gpu() x.copydata(y) cp = cuda.cupy assert isinstance(x.data, cp.ndarray) cp.testing.assert_array_equal(x.data, self.a) @attr.chainerx def test_copydata_to_uninitialized_parameter_chainerx(self): # TODO(sonots): Support copyto with ChainerX raise unittest.SkipTest('ChainerX does not support copyto') def test_copydata_from_uninitialized_parameter(self): initializer = initializers.Zero() x = chainer.Parameter(self.a) y = chainer.Parameter(initializer) x.copydata(y) assert isinstance(x.data, np.ndarray) assert isinstance(y.data, np.ndarray) np.testing.assert_array_equal(x.data, y.data) @attr.gpu def test_copydata_from_uninitialized_parameter_gpu(self): initializer = initializers.Zero() x = chainer.Parameter(self.a) y = chainer.Parameter(initializer) y.to_gpu() x.copydata(y) cp = cuda.cupy assert isinstance(x.data, np.ndarray) assert isinstance(y.data, cp.ndarray) cp.testing.assert_array_equal(x.data, y.data) @attr.chainerx def test_copydata_from_uninitialized_parameter_chainerx(self): # TODO(sonots): Support copydata with ChainerX raise unittest.SkipTest('ChainerX does not support copydata') def test_copydata_from_to_uninitialized_parameters(self): x = chainer.Parameter() y = chainer.Parameter() x.copydata(y) assert x.data is None assert y.data is None def test_addgrad_to_uninitialized_parameter(self): x = chainer.Parameter() y = chainer.Parameter(self.a) y.grad = self.b x.cleargrad() x.addgrad(y) assert isinstance(x.data, np.ndarray) assert isinstance(x.grad, np.ndarray) np.testing.assert_array_equal(x.grad, self.b) @attr.gpu def test_addgrad_to_uninitialized_parameter_cpu_to_gpu(self): x = chainer.Parameter() y = chainer.Parameter(self.a) y.grad = self.b x.to_gpu() x.cleargrad() x.addgrad(y) cp = cuda.cupy assert isinstance(x.data, cp.ndarray) assert isinstance(x.grad, cp.ndarray) cp.testing.assert_array_equal(x.grad, self.b) @attr.gpu def test_addgrad_to_uninitialized_parameter_gpu_to_cpu(self): x = chainer.Parameter() y = chainer.Parameter(self.a) y.grad = self.b y.to_gpu() x.cleargrad() x.addgrad(y) assert isinstance(x.data, np.ndarray) assert isinstance(x.grad, np.ndarray) np.testing.assert_array_equal(x.grad, self.b) @attr.gpu def test_addgrad_to_uninitialized_parameter_gpu_to_gpu(self): x = chainer.Parameter() y = chainer.Parameter(self.a) y.grad = self.b x.to_gpu() y.to_gpu() x.cleargrad() x.addgrad(y) cp = cuda.cupy assert isinstance(x.data, cp.ndarray) assert isinstance(x.grad, cp.ndarray) cp.testing.assert_array_equal(x.grad, self.b) @attr.multi_gpu(2) def test_addgrad_to_uninitialized_parameter_gpu_to_another_gpu(self): x = chainer.Parameter() y = chainer.Parameter(self.a) y.grad = self.b x.to_gpu(1) y.to_gpu(0) x.cleargrad() x.addgrad(y) cp = cuda.cupy assert isinstance(x.data, cp.ndarray) assert isinstance(x.grad, cp.ndarray) assert int(x.data.device) == 1 assert int(x.grad.device) == 1 cp.testing.assert_array_equal(x.grad, self.b) @attr.chainerx def test_addgrad_to_uninitialized_parameter_cpu_to_chainerx(self): # TODO(sonots): Support addgrad with ChainerX raise unittest.SkipTest('ChainerX does not support addgrad') class TestDebugPrint(unittest.TestCase): def setUp(self): self.arr = np.random.randn(5, 3, 5, 5).astype(np.float32) def check_debug_print(self, v, mean, std): result = v.debug_print() assert v.summary() in result assert 'dtype: float32' in result # py2.7 on win64 returns shape as long assert re.match(r'- shape: \(5L?, 3L?, 5L?, 5L?\)', result.splitlines()[3]) # no grad msg = 'statistics: mean={mean:.8f}, std={std:.8f}' msg = msg.format(mean=mean, std=std) assert msg in result assert 'grad: None' in result # zero grad with testing.assert_warns(DeprecationWarning): v.zerograd() result = v.debug_print() assert 'grad: 0' in result # add grad v.grad = v.data result = v.debug_print() msg = 'grad: mean={mean:.8f}, std={std:.8f}'.format(mean=mean, std=std) assert msg in result def check_debug_print_empty(self, v): result = v.debug_print() assert 'device: None' in result assert 'backend: None' in result assert 'shape: None' in result assert 'dtype: None' in result assert 'statistics: None' in result assert 'grad: None' in result def test_debug_print_cpu(self): v = chainer.Variable(self.arr) result = v.debug_print() assert 'device: CPU' in result assert 'numpy.ndarray' in result self.check_debug_print(v, mean=float(np.mean(v.data)), std=float(np.std(v.data))) @attr.gpu def test_debug_print_gpu(self): v = chainer.Variable(self.arr) v.to_gpu(0) result = v.debug_print() assert 'device: <CUDA Device 0>' in result assert 'cupy.core.core.ndarray' in result self.check_debug_print(v, mean=float(cuda.cupy.mean(v.data)), std=float(cuda.cupy.std(v.data))) def test_debug_print_empty(self): v = chainer.Variable() self.check_debug_print_empty(v) class TestVariableSetCreator(unittest.TestCase): class MockFunction(chainer.Function): pass def setUp(self): self.x = np.random.uniform(-1, 1, (2, 5)).astype(np.float32) self.f = self.MockFunction() self.node = self.f.node self.node.rank = 10 def check_set_creator(self, x): x = chainer.Variable(x) x.set_creator(self.f) assert x.creator == self.f assert x.rank == 11 def test_set_creator_cpu(self): self.check_set_creator(self.x) @attr.gpu def test_set_creator_gpu(self): self.check_set_creator(cuda.to_gpu(self.x)) def check_set_creator_node(self, x): x = chainer.Variable(x) x.set_creator_node(self.node) assert x.creator_node == self.node assert x.rank == 11 def test_set_creator_node_cpu(self): self.check_set_creator_node(self.x) @attr.gpu def test_set_creator_node_gpu(self): self.check_set_creator_node(cuda.to_gpu(self.x)) class TestVariableBackwardError(unittest.TestCase): def setUp(self): self.x = np.array([1], np.float32) def check_type_mismatch(self, x_data, retain): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): if not retain: self.retain_inputs(()) return xp.array(1, np.float32), def backward(self, inputs, grads): return [1] x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward() def test_type_mismatch_cpu(self): self.check_type_mismatch(self.x, True) def test_type_mismatch_unretain_cpu(self): self.check_type_mismatch(self.x, False) @attr.gpu def test_type_mismatch_gpu(self): self.check_type_mismatch(cuda.to_gpu(self.x), True) @attr.gpu def test_type_mismatch_unretain_gpu(self): self.check_type_mismatch(cuda.to_gpu(self.x), False) def check_dtype_mismatch(self, x_data, retain): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): if not retain: self.retain_inputs(()) return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1], np.int32), x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, TypeError, 'dummy_function'): y.backward() def test_dtype_mismatch_cpu(self): self.check_dtype_mismatch(self.x, True) def test_dtype_mismatch_unretain_cpu(self): self.check_dtype_mismatch(self.x, False) @attr.gpu def test_dtype_mismatch_gpu(self): self.check_dtype_mismatch(cuda.to_gpu(self.x), True) @attr.gpu def test_dtype_mismatch_unretain_gpu(self): self.check_dtype_mismatch(cuda.to_gpu(self.x), False) def check_shape_mismatch(self, x_data, retain): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): if not retain: self.retain_inputs(()) return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1, 2], np.float32), x = chainer.Variable(x_data) y = DummyFunction()(x) with six.assertRaisesRegex(self, ValueError, 'dummy_function'): y.backward() def test_shape_mismatch_cpu(self): self.check_shape_mismatch(self.x, True) def test_shape_mismatch_unretain_cpu(self): self.check_shape_mismatch(self.x, False) @attr.gpu def test_shape_mismatch_gpu(self): self.check_shape_mismatch(cuda.to_gpu(self.x), True) @attr.gpu def test_shape_mismatch_unretain_gpu(self): self.check_shape_mismatch(cuda.to_gpu(self.x), False) class TestVariableBackwardErrorTraceback(unittest.TestCase): def setUp(self): self.x = np.array([1], np.float32) chainer.set_debug(True) def tearDown(self): chainer.set_debug(False) def check_traceback(self, x_data): xp = backend.get_array_module(x_data) class DummyFunction(chainer.Function): label = 'dummy_function' def forward(self, inputs): return xp.array(1, np.float32), def backward(self, inputs, grads): return xp.array([1, 2], np.float32), x = chainer.Variable(x_data) line = inspect.currentframe().f_lineno + 1 y = DummyFunction()(x) # `line` is THIS line try: y.backward() self.fail() except ValueError as e: assert 'Stacktrace' in str(e) assert 'line %d' % line in str(e) def test_traceback_cpu(self): self.check_traceback(self.x) @attr.gpu def test_traceback_gpu(self): self.check_traceback(cuda.to_gpu(self.x)) def test_raise(self): x = np.array([1], np.float32) x = chainer.Variable(x) y = F.identity(x) y.grad = np.array([np.nan], np.float32) with pytest.raises(RuntimeError): y.backward() def test_int(self): x = np.array([1], np.int) x = chainer.Variable(x) y = F.identity(x) y.grad = np.array([0], np.int) y.backward() @testing.parameterize(*testing.product({ 'in_shape': [(4, 3, 2)], 'out_shape': [(2, 2, 6), (2, -1, 6), 24, (-1,), [2, 12]], 'dtype': [np.float16, np.float32, np.float64], })) class TestReshape(unittest.TestCase): def setUp(self): self.x = np.random.uniform(-1, 1, self.in_shape).astype(self.dtype) def check_forward(self, x_data): shape = self.out_shape x = chainer.Variable(x_data) y = x.reshape(shape) assert y.data.dtype == self.dtype assert (self.x.reshape(shape) == backend.CpuDevice().send(y.data)).all() def test_forward_cpu(self): self.check_forward(self.x) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x)) @attr.chainerx def test_forward_chainerx(self): # TODO(imanishi): chainerx does not support fp16 yet if self.dtype == np.float16: raise unittest.SkipTest('ChainerX does not support float16') self.check_forward(chainerx.array(self.x)) def check_backward(self, x_data): x = chainer.Variable(x_data) y = x.reshape(self.out_shape) y.grad = y.data y.backward() testing.assert_allclose(backend.CpuDevice().send(x.data), backend.CpuDevice().send(x.grad), atol=0, rtol=0) def test_backward_cpu(self): self.check_backward(self.x) @attr.gpu def test_backward_gpu(self): self.check_backward(cuda.to_gpu(self.x)) @attr.chainerx def test_backward_chainerx(self): # TODO(niboshi): Support it if self.dtype == np.float16: raise unittest.SkipTest('ChainerX does not support float16') self.check_backward(chainerx.array(self.x)) @testing.parameterize(*testing.product({ 'in_shape': [(4, 3, 2)], 'axes': [[], [(-1, 0, 1)], [[-1, 0, 1]], [None], [-1, 0, 1]], 'dtype': [np.float16, np.float32, np.float32], })) class TestTranspose(unittest.TestCase): def setUp(self): self.x = np.random.uniform(-1, 1, self.in_shape).astype(self.dtype) def check_forward(self, x_data): axes = self.axes x = chainer.Variable(x_data) y = x.transpose(*axes) assert y.data.dtype == self.dtype assert (self.x.transpose(*axes) == backend.CpuDevice().send(y.data)).all() def test_forward_cpu(self): self.check_forward(self.x) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x)) @attr.chainerx def test_forward_chainerx(self): # TODO(hvy): chainerx does not support fp16 yet if self.dtype == np.float16: raise unittest.SkipTest('ChainerX does not support float16') self.check_forward(chainerx.array(self.x)) def check_backward(self, x_data): x = chainer.Variable(x_data) y = x.transpose(*self.axes) y.grad = y.data y.backward() testing.assert_allclose(x.data, x.grad, atol=0, rtol=0) def test_backward_cpu(self): self.check_backward(self.x) @attr.gpu def test_backward_gpu(self): self.check_backward(cuda.to_gpu(self.x)) @attr.chainerx def test_backward_chainerx(self): # TODO(niboshi): Support it if self.dtype == np.float16: raise unittest.SkipTest('ChainerX does not support float16') self.check_backward(chainerx.array(self.x)) class UnnamedVariableToStringTestBase(object): def setUp(self): if self.x_shape is None: self.x = chainer.Variable() else: x = np.empty(self.x_shape) x = np.arange(x.size).reshape(self.x_shape) x = x.astype(self.dtype) self.x = chainer.Variable(x) def test_repr_cpu(self): assert repr(self.x) == self.repr def test_str_cpu(self): assert str(self.x) == self.str @attr.gpu def test_repr_gpu(self): self.x.to_gpu() assert repr(self.x) == self.repr @attr.gpu def test_str_gpu(self): self.x.to_gpu() assert str(self.x) == self.str def _skip_chainerx_unsupported_dtype(self): supported_dtypes = chainerx.testing.dtypes.all_dtypes if (self.dtype is not None and self.dtype.__name__ not in supported_dtypes): raise unittest.SkipTest( 'ChainerX does not support {} dtype'.format( self.dtype.__name__)) @attr.chainerx def test_repr_chainerx_cpu(self): self._skip_chainerx_unsupported_dtype() self.x.to_chainerx() assert repr(self.x) == self.repr @attr.chainerx def test_str_chainerx_cpu(self): self._skip_chainerx_unsupported_dtype() self.x.to_chainerx() assert str(self.x) == self.str @attr.chainerx @attr.gpu def test_repr_chainerx_gpu(self): self._skip_chainerx_unsupported_dtype() self.x.to_gpu() self.x.to_chainerx() assert repr(self.x) == self.repr @attr.chainerx @attr.gpu def test_str_chainerx_gpu(self): self._skip_chainerx_unsupported_dtype() self.x.to_gpu() self.x.to_chainerx() assert str(self.x) == self.str @testing.parameterize( {'x_shape': None, 'dtype': None, 'repr': 'variable(None)', 'str': 'variable(None)'}, {'x_shape': (2, 2,), 'dtype': np.float16, 'repr': 'variable([[ 0., 1.],\n [ 2., 3.]])', 'str': 'variable([[ 0. 1.]\n [ 2. 3.]])'}, {'x_shape': (2, 2,), 'dtype': np.float32, 'repr': 'variable([[ 0., 1.],\n [ 2., 3.]])', 'str': 'variable([[ 0. 1.]\n [ 2. 3.]])'}, {'x_shape': (2, 2,), 'dtype': np.float64, 'repr': 'variable([[ 0., 1.],\n [ 2., 3.]])', 'str': 'variable([[ 0. 1.]\n [ 2. 3.]])'}, {'x_shape': (3,), 'dtype': np.float32, 'repr': 'variable([ 0., 1., 2.])', 'str': 'variable([ 0. 1. 2.])'}, ) @testing.with_requires('numpy<1.14') class TestUnnamedVariableToStringLegacy( UnnamedVariableToStringTestBase, unittest.TestCase): # Textual representation of arrays in NumPy 1.13 or earlier. pass @testing.parameterize( {'x_shape': None, 'dtype': None, 'repr': 'variable(None)', 'str': 'variable(None)'}, {'x_shape': (2, 2,), 'dtype': np.float16, 'repr': 'variable([[0., 1.],\n [2., 3.]])', 'str': 'variable([[0. 1.]\n [2. 3.]])'}, {'x_shape': (2, 2,), 'dtype': np.float32, 'repr': 'variable([[0., 1.],\n [2., 3.]])', 'str': 'variable([[0. 1.]\n [2. 3.]])'}, {'x_shape': (2, 2,), 'dtype': np.float64, 'repr': 'variable([[0., 1.],\n [2., 3.]])', 'str': 'variable([[0. 1.]\n [2. 3.]])'}, {'x_shape': (3,), 'dtype': np.float32, 'repr': 'variable([0., 1., 2.])', 'str': 'variable([0. 1. 2.])'}, ) @testing.with_requires('numpy>=1.14') class TestUnnamedVariableToStringModern( UnnamedVariableToStringTestBase, unittest.TestCase): # Textual representation of arrays in NumPy 1.14 or later. pass class TestUnnamedVariableDim2Size0ToString(unittest.TestCase): def setUp(self): x = np.empty((0, 0)) x = x.astype(np.float32) self.x = chainer.Variable(x) if (sys.version_info < (3,) and sys.maxsize > 2**32 and platform.system() == 'Windows'): self.repr = 'variable([], shape=(0L, 0L))' else: self.repr = 'variable([], shape=(0, 0))' self.str = 'variable([])' def test_repr_cpu(self): assert repr(self.x) == self.repr def test_str_cpu(self): assert str(self.x) == self.str @attr.gpu def test_repr_gpu(self): self.x.to_gpu() assert repr(self.x) == self.repr @attr.gpu def test_str_gpu(self): self.x.to_gpu() assert str(self.x) == self.str class NamedVariableToStringTestBase(object): def setUp(self): if self.x_shape is None: self.x = chainer.Variable(name='x') else: x = np.empty(self.x_shape) x = np.arange(x.size).reshape(self.x_shape) x = x.astype(self.dtype) self.x = chainer.Variable(x, name='x') def test_named_repr(self): assert repr(self.x) == self.repr def test_named_str(self): assert str(self.x) == self.str @attr.gpu def test_repr_gpu(self): self.x.to_gpu() assert repr(self.x) == self.repr @attr.gpu def test_str_gpu(self): self.x.to_gpu() assert str(self.x) == self.str @testing.parameterize( {'x_shape': None, 'dtype': None, 'repr': 'variable x(None)', 'str': 'variable x(None)'}, {'x_shape': (2, 2,), 'dtype': np.float32, 'repr': 'variable x([[ 0., 1.],\n [ 2., 3.]])', 'str': 'variable x([[ 0. 1.]\n [ 2. 3.]])'}, {'x_shape': (), 'dtype': np.float32, 'repr': 'variable x(0.0)', 'str': 'variable x(0.0)'}, ) @testing.with_requires('numpy<1.14') class TestNamedVariableToStringLegacy( NamedVariableToStringTestBase, unittest.TestCase): # Textual representation of arrays in NumPy 1.13 or earlier. pass @testing.parameterize( {'x_shape': None, 'dtype': None, 'repr': 'variable x(None)', 'str': 'variable x(None)'}, {'x_shape': (2, 2,), 'dtype': np.float32, 'repr': 'variable x([[0., 1.],\n [2., 3.]])', 'str': 'variable x([[0. 1.]\n [2. 3.]])'}, {'x_shape': (), 'dtype': np.float32, 'repr': 'variable x(0.)', 'str': 'variable x(0.)'}, ) @testing.with_requires('numpy>=1.14') class TestNamedVariableToStringModern( NamedVariableToStringTestBase, unittest.TestCase): # Textual representation of arrays in NumPy 1.14 or later. pass class TestNamedVariableDim2Size0ToString(unittest.TestCase): def setUp(self): x = np.empty((0, 0)) x = x.astype(np.float32) self.x = chainer.Variable(x, name='x') if (sys.version_info < (3,) and sys.maxsize > 2**32 and platform.system() == 'Windows'): self.repr = 'variable x([], shape=(0L, 0L))' else: self.repr = 'variable x([], shape=(0, 0))' self.str = 'variable x([])' def test_named_repr(self): assert repr(self.x) == self.repr def test_named_str(self): assert str(self.x) == self.str @attr.gpu def test_repr_gpu(self): self.x.to_gpu() assert repr(self.x) == self.repr @attr.gpu def test_str_gpu(self): self.x.to_gpu() assert str(self.x) == self.str class IdentityFunction(chainer.Function): def forward(self, inputs): return inputs def backward(self, inputs, grad_outputs): return grad_outputs class TestVariableDoubleBackward(unittest.TestCase): def test_default_backward(self): x = chainer.Variable(np.empty((), np.float32)) y = x * 2 # x.grad_var will be different from y.grad_var y.backward() assert x.grad_var is not y.grad_var assert x.grad_var.creator is None x.grad_var.backward() assert y.grad_var.grad_var is None def test_raise_double_backprop(self): x = chainer.Variable(np.empty((), np.float32)) y = IdentityFunction()(x) y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): x.grad_var.backward() def test_raise_double_backprop_2(self): x = chainer.Variable(np.empty((), np.float32)) z = F.identity(x) # new style y = IdentityFunction()(z) # old style y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): x.grad_var.backward() def test_grad_raise_double_backprop(self): x = chainer.Variable(np.empty((), np.float32)) y = IdentityFunction()(x) y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): chainer.grad([x.grad_var], [y.grad_var]) def test_grad_raise_double_backprop_2(self): x = chainer.Variable(np.empty((), np.float32)) z = F.identity(x) # new style y = IdentityFunction()(z) # old style y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): chainer.grad([x.grad_var], [y.grad_var]) class TestVariableDoubleBackwardOneElementScalar(unittest.TestCase): # Tests for old-styled (1-element array) scalar. # See: https://github.com/chainer/chainer/pull/4199 def test_default_backward(self): x = chainer.Variable(np.empty(1, np.float32)) y = x * 2 # x.grad_var will be different from y.grad_var with testing.assert_warns(DeprecationWarning): y.backward() assert x.grad_var.creator is None with warnings.catch_warnings(): # ok to be warned that x.grad_var is old-styled scalar warnings.simplefilter('ignore', DeprecationWarning) x.grad_var.backward() assert y.grad_var.grad_var is None def test_raise_double_backprop(self): x = chainer.Variable(np.empty(1, np.float32)) y = IdentityFunction()(x) with testing.assert_warns(DeprecationWarning): y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): with warnings.catch_warnings(): # ok to be warned that x.grad_var is old-styled scalar warnings.simplefilter('ignore', DeprecationWarning) x.grad_var.backward() def test_raise_double_backprop_2(self): x = chainer.Variable(np.empty(1, np.float32)) z = F.identity(x) # new style y = IdentityFunction()(z) # old style with testing.assert_warns(DeprecationWarning): y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): with warnings.catch_warnings(): # ok to be warned that x.grad_var is old-styled scalar warnings.simplefilter('ignore', DeprecationWarning) x.grad_var.backward() def test_grad_raise_double_backprop(self): x = chainer.Variable(np.empty(1, np.float32)) y = IdentityFunction()(x) with testing.assert_warns(DeprecationWarning): y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): chainer.grad([x.grad_var], [y.grad_var]) def test_grad_raise_double_backprop_2(self): x = chainer.Variable(np.empty(1, np.float32)) z = F.identity(x) # new style y = IdentityFunction()(z) # old style with testing.assert_warns(DeprecationWarning): y.backward(enable_double_backprop=True) with pytest.raises(RuntimeError): chainer.grad([x.grad_var], [y.grad_var]) class TestAsVariable(unittest.TestCase): def check_to_variable_from_array(self, x): y = chainer.as_variable(x) assert isinstance(y, chainer.Variable) assert y.data is x assert not y.requires_grad def test_to_variable_from_numpy(self): self.check_to_variable_from_array(np.empty(1, np.float32)) @attr.gpu def test_to_variable_from_cupy(self): self.check_to_variable_from_array(cuda.cupy.empty(1, np.float32)) def test_to_variable_from_variable(self): x = chainer.Variable(np.array(1, np.float32)) y = chainer.as_variable(x) assert x is y assert y.requires_grad @testing.parameterize(*testing.product({ 'in_shape': [(4, 3, 2)], 'dtype': [np.float16, np.float32, np.float64], 'loss_scale': [None, 1, 10], })) class TestLossScale(unittest.TestCase): def setUp(self): self.x = np.random.uniform(-1, 1, self.in_shape).astype(self.dtype) self.y = np.random.uniform(-1, 1, self.in_shape).astype(self.dtype) def check_loss_scale(self, x_data, y_data): x = chainer.Variable(x_data) y = chainer.Variable(y_data) z = x * y loss = F.sum(z) loss.backward(loss_scale=self.loss_scale) if self.loss_scale is not None: x.grad /= self.loss_scale y.grad /= self.loss_scale rtol, atol = 1e-4, 1e-5 if self.dtype is np.float16: rtol, atol = 1e-1, 1e-2 testing.assert_allclose(x.data, y.grad, rtol=rtol, atol=atol) testing.assert_allclose(y.data, x.grad, rtol=rtol, atol=atol) def test_loss_scale_cpu(self): self.check_loss_scale(self.x, self.y) @attr.gpu def test_loss_scale_gpu(self): self.check_loss_scale(cuda.to_gpu(self.x), cuda.to_gpu(self.y)) @testing.parameterize(*testing.product({ # ideep2.0.0 not support shape 0 'shape': [(1,), (3, 2), (2, 3, 4, 3)], 'dtype': [ np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64, np.float16, np.float32, np.float64], })) @attr.ideep class TestIntel64(unittest.TestCase): def setUp(self): self.x_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype) def _check_variable_shape_and_dtype(self, var): assert var.data.shape == self.shape assert var.data.dtype == self.dtype assert var.shape == self.shape assert var.dtype == self.dtype def test_cpu_to_intel64(self): x = chainer.Variable(self.x_data) assert x.xp is np prev_x_data = x.data x.to_intel64() assert x.xp is np # Converted to mdarray only if dtype == float32. # Otherwise, data should be left untouched. if self.dtype == np.float32: assert isinstance(x.data, intel64.ideep.mdarray) else: assert x.data is prev_x_data self._check_variable_shape_and_dtype(x) def test_intel64_to_intel64(self): x = chainer.Variable(self.x_data) x.to_intel64() prev_x_data = x.data x.to_intel64() # Data should be left untouched assert x.data is prev_x_data @attr.gpu def test_gpu_to_intel64(self): x = chainer.Variable(self.x_data) x.to_gpu() x.to_intel64() # Converted to mdarray only if dtype == float32. # Otherwise, data should be converted to numpy.ndarray. if self.dtype == np.float32: assert isinstance(x.data, intel64.ideep.mdarray) else: assert isinstance(x.data, np.ndarray) self._check_variable_shape_and_dtype(x) @attr.gpu def test_intel64_to_gpu(self): x = chainer.Variable(self.x_data) x.to_intel64() x.to_gpu() # Data should be converted to cuda.ndarray assert isinstance(x.data, cuda.cupy.ndarray) self._check_variable_shape_and_dtype(x) def test_intel64_to_cpu(self): x = chainer.Variable(self.x_data) x.to_intel64() x.to_cpu() # Data should be converted to numpy.ndarray assert isinstance(x.data, np.ndarray) self._check_variable_shape_and_dtype(x) @testing.parameterize(*testing.product({ 'shape': [(), (3, 2, 3), (4, 4, 3, 2, 3)], 'dtype': [ np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64, np.float16, np.float32, np.float64, ], })) @attr.ideep class TestIntel64Unsupported(unittest.TestCase): """Tests for arrays that should not be converted to iDeep array.""" def setUp(self): self.x_data = np.random.uniform(-1, 1, self.shape).astype(self.dtype) def test_cpu_to_intel64(self): x = chainer.Variable(self.x_data) x.to_intel64() assert isinstance(x.data, np.ndarray) @attr.gpu def test_gpu_to_intel64(self): x = chainer.Variable(self.x_data) x.to_gpu() x.to_intel64() assert isinstance(x.data, np.ndarray) @testing.parameterize(*testing.product({ 'shape': [(3,), (3, 2), (3, 2, 2), (3, 2, 2, 3)], 'dtype': [ np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64, np.float16, np.float32, np.float64], })) class TestLazyGradSum(unittest.TestCase): def setUp(self): self.x = np.random.uniform(-1, 1, self.shape).astype(self.dtype) y10 = np.random.uniform(-1, 1, self.shape).astype(self.dtype) gy00 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) f10 = chainer.FunctionNode() f10.check_type_forward = mock.MagicMock() f10.forward_cpu = mock.MagicMock(return_value=(y10,)) f10.retain_outputs((0,)) f10.backward = mock.MagicMock(return_value=(gy00,)) self.y10 = y10 self.f10 = f10 self.gy00 = gy00 y11 = np.random.uniform(-1, 1, self.shape).astype(self.dtype) gy01 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) f11 = chainer.FunctionNode() f11.check_type_forward = mock.MagicMock() f11.forward_cpu = mock.MagicMock(return_value=(y11,)) f11.retain_outputs((0,)) f11.backward = mock.MagicMock(return_value=(gy01,)) self.y11 = y11 self.f11 = f11 self.gy01 = gy01 y12 = np.random.uniform(-1, 1, self.shape).astype(self.dtype) gy02 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) f12 = chainer.FunctionNode() f12.check_type_forward = mock.MagicMock() f12.forward_cpu = mock.MagicMock(return_value=(y12,)) f12.retain_outputs((0,)) f12.backward = mock.MagicMock(return_value=(gy02,)) self.y12 = y12 self.f12 = f12 self.gy02 = gy02 y = np.random.uniform(-1, 1, self.shape).astype(self.dtype) gy10 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) gy11 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) gy12 = chainer.Variable( np.random.uniform(-1, 1, self.shape).astype(self.dtype)) f2 = chainer.FunctionNode() f2.check_type_forward = mock.MagicMock() f2.forward_cpu = mock.MagicMock(return_value=(y,)) f12.retain_outputs((0,)) f2.backward = mock.MagicMock(return_value=(gy10, gy11, gy12)) self.y = y self.f2 = f2 self.gy10 = gy10 self.gy11 = gy11 self.gy12 = gy12 self.gx = gy00 + gy01 + gy02 def forward(self, x): y0 = F.identity(x) y10 = self.f10.apply((y0,)) y11 = self.f11.apply((y0,)) y12 = self.f12.apply((y0,)) y = self.f2.apply((y10[0], y11[0], y12[0])) return y def check_backward(self): x = chainer.Variable(self.x) y = self.forward(x) y[0].grad = np.ones(y[0].shape, y[0].dtype) y[0].backward() testing.assert_allclose(self.gx.data, x.grad, atol=1e-3, rtol=1e-2) def test_backward_cpu(self): with chainer.using_config('lazy_grad_sum', False): self.check_backward() def test_backward_cpu_lazy_grad_sum(self): with chainer.using_config('lazy_grad_sum', True): self.check_backward() testing.run_module(__name__, __file__)
mit
-47,175,618,758,789,790
32.270584
79
0.586776
false
3.345001
true
false
false
barbagroup/pygbe
pygbe/tree/cuda_kernels.py
1
83039
try: from pycuda.compiler import SourceModule except ImportError: pass def kernels(BSZ, Nm, K_fine, P, REAL): mod = SourceModule( """ #define REAL %(precision)s #define BSZ %(blocksize)d #define Nm %(Nmult)d #define K_fine %(K_near)d #define P %(Ptree)d /* __device__ int getIndex(int P, int i, int j, int k) { int I=0, ii, jj; for (ii=0; ii<i; ii++) { for (jj=1; jj<P+2-ii; jj++) { I+=jj; } } for (jj=P+2-j; jj<P+2; jj++) { I+=jj-i; } I+=k; return I; } */ __device__ int getIndex(int i, int j, int k, int *Index) { return Index[(P+1)*(P+1)*i + (P+1)*j + k]; } __device__ void getCoeff(REAL *a, REAL dx, REAL dy, REAL dz, REAL kappa, int *index, int LorY) { REAL b[Nm]; REAL R = sqrt(dx*dx+dy*dy+dz*dz); REAL R2 = R*R; REAL R3 = R2*R; int i,j,k,I,Im1x,Im2x,Im1y,Im2y,Im1z,Im2z; REAL C,C1,C2,Cb; if (LorY==2) // if Yukawa { b[0] = exp(-kappa*R); a[0] = b[0]/R; } if (LorY==1) // if Laplace { a[0] = 1/R; } // Two indices = 0 I = getIndex(1,0,0, index); if (LorY==2) // if Yukawa { b[I] = -kappa * (dx*a[0]); // 1,0,0 b[P+1] = -kappa * (dy*a[0]); // 0,1,0 b[1] = -kappa * (dz*a[0]); // 0,0,1 a[I] = -1/R2*(kappa*dx*b[0]+dx*a[0]); a[P+1] = -1/R2*(kappa*dy*b[0]+dy*a[0]); a[1] = -1/R2*(kappa*dz*b[0]+dz*a[0]); } if (LorY==1) // if Laplace { a[I] = -dx/R3; a[P+1] = -dy/R3; a[1] = -dz/R3; } for (i=2; i<P+1; i++) { Cb = -kappa/i; C = 1./(i*R2); I = getIndex(i,0,0, index); Im1x = getIndex(i-1,0,0, index); Im2x = getIndex(i-2,0,0, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dx*b[Im1x] + b[Im2x]) -(2*i-1)*dx*a[Im1x] - (i-1)*a[Im2x] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dx*a[Im1x] - (i-1)*a[Im2x] ); } I = getIndex(0,i,0, index); Im1y = I-(P+2-i); Im2y = Im1y-(P+2-i+1); if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dy*b[Im1y] + b[Im2y]) -(2*i-1)*dy*a[Im1y] - (i-1)*a[Im2y] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dy*a[Im1y] - (i-1)*a[Im2y] ); } I = i; Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dz*b[Im1z] + b[Im2z]) -(2*i-1)*dz*a[Im1z] - (i-1)*a[Im2z] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dz*a[Im1z] - (i-1)*a[Im2z] ); } } // One index = 0, one = 1 other >=1 Cb = -kappa/2; I = getIndex(1,1,0, index); Im1x = P+1; Im1y = I-(P+2-1-1); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y]); a[I] = 1./(2*R2) * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]) -(2*2-1)*(dx*a[Im1x]+dy*a[Im1y]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dx*a[Im1x]+dy*a[Im1y]) ); } I = getIndex(1,0,1, index); Im1x = 1; Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z]); a[I] = 1./(2*R2) * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]) -(2*2-1)*(dx*a[Im1x]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dx*a[Im1x]+dz*a[Im1z]) ); } I = getIndex(0,1,1, index); Im1y = I-(P+2-1); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z]); a[I] = 1./(2*R2) * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]) -(2*2-1)*(dy*a[Im1y]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dy*a[Im1y]+dz*a[Im1z]) ); } for (i=2; i<P; i++) { Cb = -kappa/(i+1); C = 1./((1+i)*R2); I = getIndex(1,i,0, index); Im1x = getIndex(0,i,0, index); Im1y = I-(P+2-i-1); Im2y = Im1y-(P+2-i); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+b[Im2y]) -(2*(1+i)-1)*(dx*a[Im1x]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dx*a[Im1x]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } I = getIndex(1,0,i, index); Im1x = getIndex(0,0,i, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]+b[Im2z]) -(2*(1+i)-1)*(dx*a[Im1x]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dx*a[Im1x]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } I = getIndex(0,1,i, index); Im1y = I-(P+2-1); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]+b[Im2z]) -(2*(1+i)-1)*(dy*a[Im1y]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dy*a[Im1y]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } I = getIndex(i,1,0, index); Im1y = I-(P+2-1-i); Im1x = getIndex(i-1,1,0, index); Im2x = getIndex(i-2,1,0, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dy*b[Im1y]+dx*b[Im1x]+b[Im2x]) -(2*(1+i)-1)*(dy*a[Im1y]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dy*a[Im1y]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } I = getIndex(i,0,1, index); Im1z = I-1; Im1x = getIndex(i-1,0,1, index); Im2x = getIndex(i-2,0,1, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dz*b[Im1z]+dx*b[Im1x]+b[Im2x]) -(2*(1+i)-1)*(dz*a[Im1z]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dz*a[Im1z]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } I = getIndex(0,i,1, index); Im1z = I-1; Im1y = I-(P+2-i); Im2y = Im1y-(P+2-i+1); if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dz*b[Im1z]+dy*b[Im1y]+b[Im2y]) -(2*(1+i)-1)*(dz*a[Im1z]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dz*a[Im1z]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } } // One index 0, others >=2 for (i=2; i<P+1; i++) { for (j=2; j<P+1-i; j++) { Cb = -kappa/(i+j); C = 1./((i+j)*R2); I = getIndex(i,j,0, index); Im1x = getIndex(i-1,j,0, index); Im2x = getIndex(i-2,j,0, index); Im1y = I-(P+2-j-i); Im2y = Im1y-(P+3-j-i); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + a[Im2x] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+b[Im2x]+b[Im2y]) -(2*(i+j)-1)*(dx*a[Im1x]+dy*a[Im1y]) -(i+j-1)*(a[Im2x]+a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dx*a[Im1x]+dy*a[Im1y]) -(i+j-1)*(a[Im2x]+a[Im2y]) ); } I = getIndex(i,0,j, index); Im1x = getIndex(i-1,0,j, index); Im2x = getIndex(i-2,0,j, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z] + a[Im2x] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]+b[Im2x]+b[Im2z]) -(2*(i+j)-1)*(dx*a[Im1x]+dz*a[Im1z]) -(i+j-1)*(a[Im2x]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dx*a[Im1x]+dz*a[Im1z]) -(i+j-1)*(a[Im2x]+a[Im2z]) ); } I = getIndex(0,i,j, index); Im1y = I-(P+2-i); Im2y = Im1y-(P+3-i); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]+b[Im2y]+b[Im2z]) -(2*(i+j)-1)*(dy*a[Im1y]+dz*a[Im1z]) -(i+j-1)*(a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dy*a[Im1y]+dz*a[Im1z]) -(i+j-1)*(a[Im2y]+a[Im2z]) ); } } } if (P>2) { // Two index = 1, other>=1 Cb = -kappa/3; I = getIndex(1,1,1, index); Im1x = getIndex(0,1,1, index); Im1y = getIndex(1,0,1, index); Im1y = I-(P); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z]); a[I] = 1/(3*R2) * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]) -5*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1/(3*R2) * ( -5*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) ); } for (i=2; i<P-1; i++) { Cb = -kappa/(2+i); C = 1./((i+2)*R2); I = getIndex(i,1,1, index); Im1x = getIndex(i-1,1,1, index); Im1y = I-(P+2-i-1); Im1z = I-1; Im2x = getIndex(i-2,1,1, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2x]) ); } I = getIndex(1,i,1, index); Im1x = getIndex(0,i,1, index); Im1y = I-(P+2-i-1); Im2y = Im1y-(P+3-i-1); Im1z = I-1 ; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2y]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2y]) ); } I = getIndex(1,1,i, index); Im1x = getIndex(0,1,i, index); Im1y = I-(P); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2z]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2z]) ); } } } // One index = 1, others >=2 if (P>4) { for (i=2; i<P-2; i++) { for (j=2; j<P-i; j++) { Cb = -kappa/(1+i+j); C = 1./((1+i+j)*R2); C1 = -(2.*(1+i+j)-1); C2 = (i+j); I = getIndex(1,i,j, index); Im1x = getIndex(0,i,j, index); Im1y = I-(P+2-1-i); Im2y = Im1y-(P+3-1-i); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2y]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2y]+a[Im2z]) ); } I = getIndex(i,1,j, index); Im1x = getIndex(i-1,1,j, index); Im1y = I-(P+2-i-1); Im2x = getIndex(i-2,1,j, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2z]) ); } I = getIndex(i,j,1, index); Im1x = getIndex(i-1,j,1, index); Im2x = getIndex(i-2,j,1, index); Im1y = I-(P+2-i-j); Im2y = Im1y-(P+3-i-j); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2y]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]) ); } } } } // All indices >= 2 if (P>5) { for (i=2;i<P-3;i++) { for (j=2;j<P-1-i;j++) { for (k=2;k<P+1-i-j;k++) { Cb = -kappa/(i+j+k); C = 1./((i+j+k)*R2); C1 = -(2.*(i+j+k)-1); C2 = i+j+k-1.; I = getIndex(i,j,k, index); Im1x = getIndex(i-1,j,k, index); Im2x = getIndex(i-2,j,k, index); Im1y = I-(P+2-i-j); Im2y = Im1y-(P+3-i-j); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2y]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]+a[Im2z]) ); } } } } } } __device__ void getCoeff_shift(REAL *ax, REAL *ay, REAL *az, REAL dx, REAL dy, REAL dz, REAL kappa, int *index, int LorY) { REAL b[Nm], a[Nm]; REAL R = sqrt(dx*dx+dy*dy+dz*dz); REAL R2 = R*R; REAL R3 = R2*R; int i,j,k,I,Im1x,Im2x,Im1y,Im2y,Im1z,Im2z; REAL C,C1,C2,Cb; if (LorY==2) // if Yukawa { b[0] = exp(-kappa*R); a[0] = b[0]/R; } if (LorY==1) // if Laplace { a[0] = 1/R; } // Two indices = 0 I = getIndex(1,0,0, index); if (LorY==2) // if Yukawa { b[I] = -kappa * (dx*a[0]); // 1,0,0 b[P+1] = -kappa * (dy*a[0]); // 0,1,0 b[1] = -kappa * (dz*a[0]); // 0,0,1 a[I] = -1/R2*(kappa*dx*b[0]+dx*a[0]); a[P+1] = -1/R2*(kappa*dy*b[0]+dy*a[0]); a[1] = -1/R2*(kappa*dz*b[0]+dz*a[0]); } if (LorY==1) // if Laplace { a[I] = -dx/R3; a[P+1] = -dy/R3; a[1] = -dz/R3; } ax[0] = a[I]; ay[0] = a[P+1]; az[0] = a[1]; for (i=2; i<P+1; i++) { Cb = -kappa/i; C = 1./(i*R2); I = getIndex(i,0,0, index); Im1x = getIndex(i-1,0,0, index); Im2x = getIndex(i-2,0,0, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dx*b[Im1x] + b[Im2x]) -(2*i-1)*dx*a[Im1x] - (i-1)*a[Im2x] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dx*a[Im1x] - (i-1)*a[Im2x] ); } ax[Im1x] = a[I]*i; I = getIndex(0,i,0, index); Im1y = I-(P+2-i); Im2y = Im1y-(P+2-i+1); if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dy*b[Im1y] + b[Im2y]) -(2*i-1)*dy*a[Im1y] - (i-1)*a[Im2y] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dy*a[Im1y] - (i-1)*a[Im2y] ); } ay[Im1y] = a[I]*i; I = i; Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dz*b[Im1z] + b[Im2z]) -(2*i-1)*dz*a[Im1z] - (i-1)*a[Im2z] ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*i-1)*dz*a[Im1z] - (i-1)*a[Im2z] ); } az[Im1z] = a[I]*i; } // One index = 0, one = 1 other >=1 Cb = -kappa/2; I = getIndex(1,1,0, index); Im1x = P+1; Im1y = I-(P+2-1-1); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y]); a[I] = 1./(2*R2) * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]) -(2*2-1)*(dx*a[Im1x]+dy*a[Im1y]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dx*a[Im1x]+dy*a[Im1y]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]; I = getIndex(1,0,1, index); Im1x = 1; Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z]); a[I] = 1./(2*R2) * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]) -(2*2-1)*(dx*a[Im1x]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dx*a[Im1x]+dz*a[Im1z]) ); } ax[Im1x] = a[I]; az[Im1z] = a[I]; I = getIndex(0,1,1, index); Im1y = I-(P+2-1); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z]); a[I] = 1./(2*R2) * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]) -(2*2-1)*(dy*a[Im1y]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1./(2*R2) * ( -(2*2-1)*(dy*a[Im1y]+dz*a[Im1z]) ); } ay[Im1y] = a[I]; az[Im1z] = a[I]; for (i=2; i<P; i++) { Cb = -kappa/(i+1); C = 1./((1+i)*R2); I = getIndex(1,i,0, index); Im1x = getIndex(0,i,0, index); Im1y = I-(P+2-i-1); Im2y = Im1y-(P+2-i); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+b[Im2y]) -(2*(1+i)-1)*(dx*a[Im1x]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dx*a[Im1x]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]; I = getIndex(1,0,i, index); Im1x = getIndex(0,0,i, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]+b[Im2z]) -(2*(1+i)-1)*(dx*a[Im1x]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dx*a[Im1x]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } ax[Im1x] = a[I]; az[Im1z] = a[I]*i; I = getIndex(0,1,i, index); Im1y = I-(P+2-1); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]+b[Im2z]) -(2*(1+i)-1)*(dy*a[Im1y]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dy*a[Im1y]+dz*a[Im1z]) - (1+i-1)*(a[Im2z]) ); } ay[Im1y] = a[I]; az[Im1z] = a[I]*i; I = getIndex(i,1,0, index); Im1y = I-(P+2-1-i); Im1x = getIndex(i-1,1,0, index); Im2x = getIndex(i-2,1,0, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dy*b[Im1y]+dx*b[Im1x]+b[Im2x]) -(2*(1+i)-1)*(dy*a[Im1y]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dy*a[Im1y]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } ay[Im1y] = a[I]; ax[Im1x] = a[I]*i; I = getIndex(i,0,1, index); Im1z = I-1; Im1x = getIndex(i-1,0,1, index); Im2x = getIndex(i-2,0,1, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + dx*a[Im1x] + a[Im2x]); a[I] = C * ( -kappa*(dz*b[Im1z]+dx*b[Im1x]+b[Im2x]) -(2*(1+i)-1)*(dz*a[Im1z]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dz*a[Im1z]+dx*a[Im1x]) - (1+i-1)*(a[Im2x]) ); } az[Im1z] = a[I]; ax[Im1x] = a[I]*i; I = getIndex(0,i,1, index); Im1z = I-1; Im1y = I-(P+2-i); Im2y = Im1y-(P+2-i+1); if (LorY==2) // if Yukawa { b[I] = Cb * (dz*a[Im1z] + dy*a[Im1y] + a[Im2y]); a[I] = C * ( -kappa*(dz*b[Im1z]+dy*b[Im1y]+b[Im2y]) -(2*(1+i)-1)*(dz*a[Im1z]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(1+i)-1)*(dz*a[Im1z]+dy*a[Im1y]) - (1+i-1)*(a[Im2y]) ); } az[Im1z] = a[I]; ay[Im1y] = a[I]*i; } // One index 0, others >=2 for (i=2; i<P+1; i++) { for (j=2; j<P+1-i; j++) { Cb = -kappa/(i+j); C = 1./((i+j)*R2); I = getIndex(i,j,0, index); Im1x = getIndex(i-1,j,0, index); Im2x = getIndex(i-2,j,0, index); Im1y = I-(P+2-j-i); Im2y = Im1y-(P+3-j-i); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + a[Im2x] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+b[Im2x]+b[Im2y]) -(2*(i+j)-1)*(dx*a[Im1x]+dy*a[Im1y]) -(i+j-1)*(a[Im2x]+a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dx*a[Im1x]+dy*a[Im1y]) -(i+j-1)*(a[Im2x]+a[Im2y]) ); } ax[Im1x] = a[I]*i; ay[Im1y] = a[I]*j; I = getIndex(i,0,j, index); Im1x = getIndex(i-1,0,j, index); Im2x = getIndex(i-2,0,j, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dz*a[Im1z] + a[Im2x] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dz*b[Im1z]+b[Im2x]+b[Im2z]) -(2*(i+j)-1)*(dx*a[Im1x]+dz*a[Im1z]) -(i+j-1)*(a[Im2x]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dx*a[Im1x]+dz*a[Im1z]) -(i+j-1)*(a[Im2x]+a[Im2z]) ); } ax[Im1x] = a[I]*i; az[Im1z] = a[I]*j; I = getIndex(0,i,j, index); Im1y = I-(P+2-i); Im2y = Im1y-(P+3-i); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dy*a[Im1y] + dz*a[Im1z] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dy*b[Im1y]+dz*b[Im1z]+b[Im2y]+b[Im2z]) -(2*(i+j)-1)*(dy*a[Im1y]+dz*a[Im1z]) -(i+j-1)*(a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+j)-1)*(dy*a[Im1y]+dz*a[Im1z]) -(i+j-1)*(a[Im2y]+a[Im2z]) ); } ay[Im1y] = a[I]*i; az[Im1z] = a[I]*j; } } if (P>2) { // Two index = 1, other>=1 Cb = -kappa/3; I = getIndex(1,1,1, index); Im1x = getIndex(0,1,1, index); Im1y = getIndex(1,0,1, index); Im1y = I-(P); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z]); a[I] = 1/(3*R2) * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]) -5*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) ); } if (LorY==1) // if Laplace { a[I] = 1/(3*R2) * ( -5*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]; az[Im1z] = a[I]; for (i=2; i<P-1; i++) { Cb = -kappa/(2+i); C = 1./((i+2)*R2); I = getIndex(i,1,1, index); Im1x = getIndex(i-1,1,1, index); Im1y = I-(P+2-i-1); Im1z = I-1; Im2x = getIndex(i-2,1,1, index); if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2x]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2x]) ); } ax[Im1x] = a[I]*i; ay[Im1y] = a[I]; az[Im1z] = a[I]; I = getIndex(1,i,1, index); Im1x = getIndex(0,i,1, index); Im1y = I-(P+2-i-1); Im2y = Im1y-(P+3-i-1); Im1z = I-1 ; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2y]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2y]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]*i; az[Im1z] = a[I]; I = getIndex(1,1,i, index); Im1x = getIndex(0,1,i, index); Im1y = I-(P); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2z]) -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( -(2*(i+2)-1)*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - (i+1)*(a[Im2z]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]; az[Im1z] = a[I]*i; } } // One index = 1, others >=2 if (P>4) { for (i=2; i<P-2; i++) { for (j=2; j<P-i; j++) { Cb = -kappa/(1+i+j); C = 1./((1+i+j)*R2); C1 = -(2.*(1+i+j)-1); C2 = (i+j); I = getIndex(1,i,j, index); Im1x = getIndex(0,i,j, index); Im1y = I-(P+2-1-i); Im2y = Im1y-(P+3-1-i); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2y]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2y]+a[Im2z]) ); } ax[Im1x] = a[I]; ay[Im1y] = a[I]*i; az[Im1z] = a[I]*j; I = getIndex(i,1,j, index); Im1x = getIndex(i-1,1,j, index); Im1y = I-(P+2-i-1); Im2x = getIndex(i-2,1,j, index); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2z]) ); } ax[Im1x] = a[I]*i; ay[Im1y] = a[I]; az[Im1z] = a[I]*j; I = getIndex(i,j,1, index); Im1x = getIndex(i-1,j,1, index); Im2x = getIndex(i-2,j,1, index); Im1y = I-(P+2-i-j); Im2y = Im1y-(P+3-i-j); Im1z = I-1; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2y]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2y]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]) ); } ax[Im1x] = a[I]*i; ay[Im1y] = a[I]*j; az[Im1z] = a[I]; } } } // All indices >= 2 if (P>5) { for (i=2;i<P-3;i++) { for (j=2;j<P-1-i;j++) { for (k=2;k<P+1-i-j;k++) { Cb = -kappa/(i+j+k); C = 1./((i+j+k)*R2); C1 = -(2.*(i+j+k)-1); C2 = i+j+k-1.; I = getIndex(i,j,k, index); Im1x = getIndex(i-1,j,k, index); Im2x = getIndex(i-2,j,k, index); Im1y = I-(P+2-i-j); Im2y = Im1y-(P+3-i-j); Im1z = I-1; Im2z = I-2; if (LorY==2) // if Yukawa { b[I] = Cb * (dx*a[Im1x] + dy*a[Im1y] + dz*a[Im1z] + a[Im2x] + a[Im2y] + a[Im2z]); a[I] = C * ( -kappa*(dx*b[Im1x]+dy*b[Im1y]+dz*b[Im1z]+b[Im2x]+b[Im2y]+b[Im2z]) + C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]+a[Im2z]) ); } if (LorY==1) // if Laplace { a[I] = C * ( C1*(dx*a[Im1x]+dy*a[Im1y]+dz*a[Im1z]) - C2*(a[Im2x]+a[Im2y]+a[Im2z]) ); } ax[Im1x] = a[I]*i; ay[Im1y] = a[I]*j; az[Im1z] = a[I]*k; } } } } } __device__ void multipole(REAL &K, REAL &V, REAL *M, REAL *Md, REAL *a, int CJ_start, int jblock, int j) { int offset; for (int i=0; i<Nm; i++) { offset = (CJ_start+j)*Nm + jblock*BSZ*Nm + i; V += M[offset] * a[i]; K += Md[offset]* a[i]; } } __device__ void multipoleKt(REAL &Ktx, REAL &Kty, REAL &Ktz, REAL *M, REAL *ax, REAL *ay, REAL *az, int CJ_start, int jblock, int j) { int offset; for (int i=0; i<Nm; i++) { offset = (CJ_start+j)*Nm + jblock*BSZ*Nm + i; Ktx += M[offset] * ax[i]; Kty += M[offset] * ay[i]; Ktz += M[offset] * az[i]; } } __device__ REAL mynorm(REAL *x) { return sqrt(x[0]*x[0] + x[1]*x[1] + x[2]*x[2]); } __device__ void cross(REAL *x, REAL *y, REAL *z) // z is the resulting array { z[0] = x[1]*y[2] - x[2]*y[1]; z[1] = x[2]*y[0] - x[0]*y[2]; z[2] = x[0]*y[1] - x[1]*y[0]; } __device__ void MV(REAL *M, REAL *V, REAL *res) // 3x3 mat-vec { REAL V2[3] = {V[0], V[1], V[2]}; for (int i=0; i<3; i++) { REAL sum = 0.; for (int j=0; j<3; j++) { sum += M[3*i+j]*V2[j]; } res[i] = sum; } } __device__ void MVip(REAL *M, REAL *V) // 3x3 mat-vec in-place { REAL V2[3] = {V[0], V[1], V[2]}; for (int i=0; i<3; i++) { REAL sum = 0.; for (int j=0; j<3; j++) { sum += M[3*i+j]*V2[j]; } V[i] = sum; } } __device__ REAL dot_prod(REAL *x, REAL *y) // len(3) vector dot product { return x[0]*y[0] + x[1]*y[1] + x[2]*y[2]; } __device__ void axpy(REAL *x, REAL *y, REAL *z, REAL alpha, int sign, int N) { for(int i=0; i<N; i++) { z[i] = sign*alpha*x[i] + y[i]; } } __device__ void ax(REAL *x, REAL *y, REAL alpha, int N) { for(int i=0; i<N; i++) { y[i] = alpha*x[i]; } } __device__ void axip(REAL *x, REAL alpha, int N) { for(int i=0; i<N; i++) { x[i] = alpha*x[i]; } } __device__ void lineInt(REAL &PHI_K, REAL &PHI_V, REAL z, REAL x, REAL v1, REAL v2, REAL kappa, REAL *xk, REAL *wk, int K, int LorY) { REAL theta1 = atan2(v1,x); REAL theta2 = atan2(v2,x); REAL absZ = fabs(z), signZ; if (absZ<1e-10) signZ = 0; else signZ = z/absZ; // Loop over gauss points REAL thetak, Rtheta, R, expKr, expKz = exp(-kappa*absZ); for (int i=0; i<K; i++) { thetak = (theta2 - theta1)/2*xk[i] + (theta2 + theta1)/2; Rtheta = x/cos(thetak); R = sqrt(Rtheta*Rtheta + z*z); expKr = exp(-kappa*R); if (LorY==2) { if (kappa>1e-12) { PHI_V+= -wk[i]*(expKr - expKz)/kappa * (theta2 - theta1)/2; PHI_K+= wk[i]*(z/R*expKr - expKz*signZ) * (theta2 - theta1)/2; } else { PHI_V+= wk[i]*(R-absZ) * (theta2 - theta1)/2; PHI_K+= wk[i]*(z/R - signZ) * (theta2 - theta1)/2; } } if (LorY==1) { PHI_V += wk[i]*(R-absZ) * (theta2 - theta1)/2; PHI_K += wk[i]*(z/R - signZ) * (theta2 - theta1)/2; } } } __device__ void intSide(REAL &PHI_K, REAL &PHI_V, REAL *v1, REAL *v2, REAL p, REAL kappa, REAL *xk, REAL *wk, int K, int LorY) { REAL v21u[3]; for (int i=0; i<3; i++) { v21u[i] = v2[i] - v1[i]; } REAL L21 = mynorm(v21u); axip(v21u, 1/L21, 3); REAL unit[3] = {0.,0.,1.}; REAL orthog[3]; cross(unit, v21u, orthog); REAL v1new_x = dot_prod(orthog, v1); REAL v1new_y = dot_prod(v21u, v1); if (v1new_x<0) { axip(v21u, -1, 3); axip(orthog, -1, 3); v1new_x = dot_prod(orthog, v1); v1new_y = dot_prod(v21u, v1); } REAL v2new_y = dot_prod(v21u, v2); if ((v1new_y>0 && v2new_y<0) || (v1new_y<0 && v2new_y>0)) { lineInt(PHI_K, PHI_V, p, v1new_x, 0, v1new_y, kappa, xk, wk, K, LorY); lineInt(PHI_K, PHI_V, p, v1new_x, v2new_y, 0, kappa, xk, wk, K, LorY); } else { REAL PHI_Kaux = 0., PHI_Vaux = 0.; lineInt(PHI_Kaux, PHI_Vaux, p, v1new_x, v1new_y, v2new_y, kappa, xk, wk, K, LorY); PHI_K -= PHI_Kaux; PHI_V -= PHI_Vaux; } } __device__ void SA(REAL &PHI_K, REAL &PHI_V, REAL *y, REAL x0, REAL x1, REAL x2, REAL K_diag, REAL V_diag, REAL kappa, int same, REAL *xk, REAL *wk, int K, int LorY) { REAL y0_panel[3], y1_panel[3], y2_panel[3], x_panel[3]; REAL X[3], Y[3], Z[3]; x_panel[0] = x0 - y[0]; x_panel[1] = x1 - y[1]; x_panel[2] = x2 - y[2]; for (int i=0; i<3; i++) { y0_panel[i] = 0.; y1_panel[i] = y[3+i] - y[i]; y2_panel[i] = y[6+i] - y[i]; X[i] = y1_panel[i]; } // Find panel coordinate system X: 0->1 cross(y1_panel, y2_panel, Z); REAL Xnorm = mynorm(X); REAL Znorm = mynorm(Z); for (int i=0; i<3; i++) { X[i] /= Xnorm; Z[i] /= Znorm; } cross(Z,X,Y); // Rotate the coordinate system to match panel plane // Multiply y_panel times a rotation matrix [X; Y; Z] REAL x_aux, y_aux, z_aux; x_aux = dot_prod(X, y0_panel); y_aux = dot_prod(Y, y0_panel); z_aux = dot_prod(Z, y0_panel); y0_panel[0] = x_aux; y0_panel[1] = y_aux; y0_panel[2] = z_aux; x_aux = dot_prod(X, y1_panel); y_aux = dot_prod(Y, y1_panel); z_aux = dot_prod(Z, y1_panel); y1_panel[0] = x_aux; y1_panel[1] = y_aux; y1_panel[2] = z_aux; x_aux = dot_prod(X, y2_panel); y_aux = dot_prod(Y, y2_panel); z_aux = dot_prod(Z, y2_panel); y2_panel[0] = x_aux; y2_panel[1] = y_aux; y2_panel[2] = z_aux; x_aux = dot_prod(X, x_panel); y_aux = dot_prod(Y, x_panel); z_aux = dot_prod(Z, x_panel); x_panel[0] = x_aux; x_panel[1] = y_aux; x_panel[2] = z_aux; // Shift origin so it matches collocation point for (int i=0; i<2; i++) { y0_panel[i] -= x_panel[i]; y1_panel[i] -= x_panel[i]; y2_panel[i] -= x_panel[i]; } // Loop over sides intSide(PHI_K, PHI_V, y0_panel, y1_panel, x_panel[2], kappa, xk, wk, K, LorY); // Side 0 intSide(PHI_K, PHI_V, y1_panel, y2_panel, x_panel[2], kappa, xk, wk, K, LorY); // Side 1 intSide(PHI_K, PHI_V, y2_panel, y0_panel, x_panel[2], kappa, xk, wk, K, LorY); // Side 2 if (same==1) { PHI_K += K_diag; PHI_V += V_diag; } } __device__ __inline__ void GQ_fine(REAL &PHI_K, REAL &PHI_V, REAL *panel, int J, REAL xi, REAL yi, REAL zi, REAL kappa, REAL *Xk, REAL *Wk, REAL *Area, int LorY) { REAL nx, ny, nz; REAL dx, dy, dz, r, aux; PHI_K = 0.; PHI_V = 0.; int j = J/9; aux = 1/(2*Area[j]); nx = ((panel[J+4]-panel[J+1])*(panel[J+2]-panel[J+8]) - (panel[J+5]-panel[J+2])*(panel[J+1]-panel[J+7])) * aux; ny = ((panel[J+5]-panel[J+2])*(panel[J+0]-panel[J+6]) - (panel[J+3]-panel[J+0])*(panel[J+2]-panel[J+8])) * aux; nz = ((panel[J+3]-panel[J+0])*(panel[J+1]-panel[J+7]) - (panel[J+4]-panel[J+1])*(panel[J+0]-panel[J+6])) * aux; #pragma unroll for (int kk=0; kk<K_fine; kk++) { dx = xi - (panel[J+0]*Xk[3*kk] + panel[J+3]*Xk[3*kk+1] + panel[J+6]*Xk[3*kk+2]); dy = yi - (panel[J+1]*Xk[3*kk] + panel[J+4]*Xk[3*kk+1] + panel[J+7]*Xk[3*kk+2]); dz = zi - (panel[J+2]*Xk[3*kk] + panel[J+5]*Xk[3*kk+1] + panel[J+8]*Xk[3*kk+2]); r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!! if (LorY==1) { aux = Wk[kk]*Area[j]*r; PHI_V += aux; PHI_K += aux*(nx*dx+ny*dy+nz*dz)*(r*r); } else { aux = Wk[kk]*Area[j]*exp(-kappa*1/r)*r; PHI_V += aux; PHI_K += aux*(nx*dx+ny*dy+nz*dz)*r*(kappa+r); } } } __device__ __inline__ void GQ_fineKt(REAL &PHI_Ktx, REAL &PHI_Kty, REAL &PHI_Ktz, REAL *panel, int J, REAL xi, REAL yi, REAL zi, REAL kappa, REAL *Xk, REAL *Wk, REAL *Area, int LorY) { REAL dx, dy, dz, r, aux; PHI_Ktx = 0.; PHI_Kty = 0.; PHI_Ktz = 0.; int j = J/9; #pragma unroll for (int kk=0; kk<K_fine; kk++) { dx = xi - (panel[J+0]*Xk[3*kk] + panel[J+3]*Xk[3*kk+1] + panel[J+6]*Xk[3*kk+2]); dy = yi - (panel[J+1]*Xk[3*kk] + panel[J+4]*Xk[3*kk+1] + panel[J+7]*Xk[3*kk+2]); dz = zi - (panel[J+2]*Xk[3*kk] + panel[J+5]*Xk[3*kk+1] + panel[J+8]*Xk[3*kk+2]); r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!! if (LorY==1) { aux = Wk[kk]*Area[j]*r*r*r; PHI_Ktx -= aux*dx; PHI_Kty -= aux*dy; PHI_Ktz -= aux*dz; } else { aux = Wk[kk]*Area[j]*exp(-kappa*1/r)*r*r*(kappa+r); PHI_Ktx -= aux*dx; PHI_Kty -= aux*dy; PHI_Ktz -= aux*dz; } } } __global__ void M2P(REAL *K_gpu, REAL *V_gpu, int *offMlt, int *sizeTar, REAL *xc, REAL *yc, REAL *zc, REAL *M, REAL *Md, REAL *xt, REAL *yt, REAL *zt, int *Index, int ptr_off, int ptr_lst, REAL kappa, int BpT, int NCRIT, int LorY) { int I = threadIdx.x + blockIdx.x*NCRIT; int CJ_start = offMlt[ptr_off+blockIdx.x]; int Nmlt = offMlt[ptr_off+blockIdx.x+1] - CJ_start; REAL xi, yi, zi, dx, dy, dz; REAL a[Nm]; __shared__ REAL xc_sh[BSZ], yc_sh[BSZ], zc_sh[BSZ]; __shared__ int Index_sh[(P+1)*(P+1)*(P+1)]; for (int ind=0; ind<((P+1)*(P+1)*(P+1)-1)/BSZ; ind++) { Index_sh[ind*BSZ + threadIdx.x] = Index[ind*BSZ + threadIdx.x]; } int ind = ((P+1)*(P+1)*(P+1)-1)/BSZ; if (threadIdx.x<(P+1)*(P+1)*(P+1)-BSZ*ind) { Index_sh[ind*BSZ + threadIdx.x] = Index[ind*BSZ + threadIdx.x]; } int i; for (int iblock=0; iblock<BpT; iblock++) { i = I + iblock*BSZ; xi = xt[i]; yi = yt[i]; zi = zt[i]; REAL K = 0., V = 0.; for(int jblock=0; jblock<(Nmlt-1)/BSZ; jblock++) { __syncthreads(); xc_sh[threadIdx.x] = xc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; yc_sh[threadIdx.x] = yc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; zc_sh[threadIdx.x] = zc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<BSZ; j++) { dx = xi - xc_sh[j]; dy = yi - yc_sh[j]; dz = zi - zc_sh[j]; getCoeff(a, dx, dy, dz, kappa, Index_sh, LorY); multipole(K, V, M, Md, a, CJ_start, jblock, j); } } } __syncthreads(); int jblock = (Nmlt-1)/BSZ; xc_sh[threadIdx.x] = xc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; yc_sh[threadIdx.x] = yc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; zc_sh[threadIdx.x] = zc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<Nmlt-(jblock*BSZ); j++) { dx = xi - xc_sh[j]; dy = yi - yc_sh[j]; dz = zi - zc_sh[j]; getCoeff(a, dx, dy, dz, kappa, Index_sh, LorY); multipole(K, V, M, Md, a, CJ_start, jblock, j); } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { K_gpu[i] += K; V_gpu[i] += V; } } } __global__ void M2PKt(REAL *Ktx_gpu, REAL *Kty_gpu, REAL *Ktz_gpu, int *offMlt, int *sizeTar, REAL *xc, REAL *yc, REAL *zc, REAL *M, REAL *xt, REAL *yt, REAL *zt, int *Index, int ptr_off, int ptr_lst, REAL kappa, int BpT, int NCRIT, int LorY) { int I = threadIdx.x + blockIdx.x*NCRIT; int CJ_start = offMlt[ptr_off+blockIdx.x]; int Nmlt = offMlt[ptr_off+blockIdx.x+1] - CJ_start; REAL xi, yi, zi, dx, dy, dz; REAL ax[Nm], ay[Nm], az[Nm]; __shared__ REAL xc_sh[BSZ], yc_sh[BSZ], zc_sh[BSZ]; __shared__ int Index_sh[(P+1)*(P+1)*(P+1)]; for (int ind=0; ind<((P+1)*(P+1)*(P+1)-1)/BSZ; ind++) { Index_sh[ind*BSZ + threadIdx.x] = Index[ind*BSZ + threadIdx.x]; } int ind = ((P+1)*(P+1)*(P+1)-1)/BSZ; if (threadIdx.x<(P+1)*(P+1)*(P+1)-BSZ*ind) { Index_sh[ind*BSZ + threadIdx.x] = Index[ind*BSZ + threadIdx.x]; } int i; for (int iblock=0; iblock<BpT; iblock++) { i = I + iblock*BSZ; xi = xt[i]; yi = yt[i]; zi = zt[i]; REAL Ktx = 0., Kty = 0., Ktz = 0.; for(int jblock=0; jblock<(Nmlt-1)/BSZ; jblock++) { __syncthreads(); xc_sh[threadIdx.x] = xc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; yc_sh[threadIdx.x] = yc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; zc_sh[threadIdx.x] = zc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<BSZ; j++) { for (int ii=0; ii<Nm; ii++) { ax[ii] = 0.; ay[ii] = 0.; az[ii] = 0.; } dx = xi - xc_sh[j]; dy = yi - yc_sh[j]; dz = zi - zc_sh[j]; getCoeff_shift(ax, ay, az, dx, dy, dz, kappa, Index_sh, LorY); multipoleKt(Ktx, Kty, Ktz, M, ax, ay, az, CJ_start, jblock, j); } } } __syncthreads(); int jblock = (Nmlt-1)/BSZ; xc_sh[threadIdx.x] = xc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; yc_sh[threadIdx.x] = yc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; zc_sh[threadIdx.x] = zc[ptr_lst + CJ_start + jblock*BSZ + threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<Nmlt-(jblock*BSZ); j++) { for (int ii=0; ii<Nm; ii++) { ax[ii] = 0.; ay[ii] = 0.; az[ii] = 0.; } dx = xi - xc_sh[j]; dy = yi - yc_sh[j]; dz = zi - zc_sh[j]; getCoeff_shift(ax, ay, az, dx, dy, dz, kappa, Index_sh, LorY); multipoleKt(Ktx, Kty, Ktz, M, ax, ay, az, CJ_start, jblock, j); } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { Ktx_gpu[i] += Ktx; Kty_gpu[i] += Kty; Ktz_gpu[i] += Ktz; } } } __global__ void P2P(REAL *K_gpu, REAL *V_gpu, int *offSrc, int *offTwg, int *P2P_list, int *sizeTar, int *k, REAL *xj, REAL *yj, REAL *zj, REAL *m, REAL *mx, REAL *my, REAL *mz, REAL *mKc, REAL *mVc, REAL *xt, REAL *yt, REAL *zt, REAL *Area, REAL *sglInt, REAL *vertex, int ptr_off, int ptr_lst, int LorY, REAL kappa, REAL threshold, int BpT, int NCRIT, REAL K_diag, int *AI_int_gpu, REAL *Xsk, REAL *Wsk) { int I = threadIdx.x + blockIdx.x*NCRIT; int list_start = offTwg[ptr_off+blockIdx.x]; int list_end = offTwg[ptr_off+blockIdx.x+1]; REAL xi, yi, zi, dx, dy, dz, r, auxK, auxV; __shared__ REAL ver_sh[9*BSZ], xj_sh[BSZ], yj_sh[BSZ], zj_sh[BSZ], A_sh[BSZ], k_sh[BSZ], sglInt_sh[BSZ], m_sh[BSZ], mx_sh[BSZ], my_sh[BSZ], mz_sh[BSZ], mKc_sh[BSZ], mVc_sh[BSZ], Xsk_sh[K_fine*3], Wsk_sh[K_fine]; if (threadIdx.x<K_fine*3) { Xsk_sh[threadIdx.x] = Xsk[threadIdx.x]; if (threadIdx.x<K_fine) Wsk_sh[threadIdx.x] = Wsk[threadIdx.x]; } __syncthreads(); int i, same, near, CJ_start, Nsrc, CJ; for (int iblock=0; iblock<BpT; iblock++) { REAL sum_K = 0., sum_V = 0.; i = I + iblock*BSZ; xi = xt[i]; yi = yt[i]; zi = zt[i]; int an_counter = 0; for (int lst=list_start; lst<list_end; lst++) { CJ = P2P_list[ptr_lst+lst]; CJ_start = offSrc[CJ]; Nsrc = offSrc[CJ+1] - CJ_start; for(int jblock=0; jblock<(Nsrc-1)/BSZ; jblock++) { __syncthreads(); xj_sh[threadIdx.x] = xj[CJ_start + jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[CJ_start + jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[CJ_start + jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[CJ_start + jblock*BSZ + threadIdx.x]; mx_sh[threadIdx.x] = mx[CJ_start + jblock*BSZ + threadIdx.x]; my_sh[threadIdx.x] = my[CJ_start + jblock*BSZ + threadIdx.x]; mz_sh[threadIdx.x] = mz[CJ_start + jblock*BSZ + threadIdx.x]; mKc_sh[threadIdx.x] = mKc[CJ_start + jblock*BSZ + threadIdx.x]; mVc_sh[threadIdx.x] = mVc[CJ_start + jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[CJ_start + jblock*BSZ + threadIdx.x]; sglInt_sh[threadIdx.x] = sglInt[CJ_start + jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[CJ_start + jblock*BSZ + threadIdx.x]; for (int vert=0; vert<9; vert++) { ver_sh[9*threadIdx.x+vert] = vertex[9*(CJ_start+jblock*BSZ+threadIdx.x)+vert]; } __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<BSZ; j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6]) *0.333333333333333333; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])*0.333333333333333333; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])*0.333333333333333333; r = 1/(dx*dx + dy*dy + dz*dz); // r is 1/r!!! same = (r>1e12); near = ((2*A_sh[j]*r) > threshold*threshold); auxV = 0.; auxK = 0.; if (near==0) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!!! if (LorY==2) { auxV = exp(-kappa*1/r)*r; auxK = (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)*auxV*(r)*(kappa+r); auxV *= m_sh[j]; } if (LorY==1) { auxV = m_sh[j]*r; auxK = (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)*(r*r*r); } } if ( (near==1) && (k_sh[j]==0)) { if (same==1) { auxK = K_diag; auxV = sglInt_sh[j]; } else { GQ_fine(auxK, auxV, ver_sh, 9*j, xi, yi, zi, kappa, Xsk_sh, Wsk_sh, A_sh, LorY); } auxV *= mVc_sh[j]; auxK *= mKc_sh[j]; an_counter += 1; } sum_V += auxV; sum_K += auxK; } } } __syncthreads(); int jblock = (Nsrc-1)/BSZ; if (jblock*BSZ + threadIdx.x < Nsrc) { xj_sh[threadIdx.x] = xj[CJ_start + jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[CJ_start + jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[CJ_start + jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[CJ_start + jblock*BSZ + threadIdx.x]; mx_sh[threadIdx.x] = mx[CJ_start + jblock*BSZ + threadIdx.x]; my_sh[threadIdx.x] = my[CJ_start + jblock*BSZ + threadIdx.x]; mz_sh[threadIdx.x] = mz[CJ_start + jblock*BSZ + threadIdx.x]; mKc_sh[threadIdx.x] = mKc[CJ_start + jblock*BSZ + threadIdx.x]; mVc_sh[threadIdx.x] = mVc[CJ_start + jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[CJ_start + jblock*BSZ + threadIdx.x]; sglInt_sh[threadIdx.x] = sglInt[CJ_start + jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[CJ_start + jblock*BSZ + threadIdx.x]; for (int vert=0; vert<9; vert++) { ver_sh[9*threadIdx.x+vert] = vertex[9*(CJ_start+jblock*BSZ+threadIdx.x)+vert]; } } __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<Nsrc-(jblock*BSZ); j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6]) *0.3333333333333333333; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])*0.3333333333333333333; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])*0.3333333333333333333; r = 1/(dx*dx + dy*dy + dz*dz); // r is 1/r!!! same = (r>1e12); near = ((2*A_sh[j]*r) > threshold*threshold); auxV = 0.; auxK = 0.; if (near==0) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!! if (LorY==2) { auxV = exp(-kappa*1/r)*r; auxK = (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)*auxV*(r)*(kappa+r); auxV *= m_sh[j]; } if (LorY==1) { auxV = m_sh[j]*r; auxK = (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)*(r*r*r); } } if ( (near==1) && (k_sh[j]==0)) { if (same==1) { auxK = K_diag; auxV = sglInt_sh[j]; } else { GQ_fine(auxK, auxV, ver_sh, 9*j, xi, yi, zi, kappa, Xsk_sh, Wsk_sh, A_sh, LorY); } auxV *= mVc_sh[j]; auxK *= mKc_sh[j]; an_counter += 1; } sum_V += auxV; sum_K += auxK; } } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { K_gpu[i] += sum_K; V_gpu[i] += sum_V; AI_int_gpu[i] = an_counter; } } } __global__ void P2PKt(REAL *Ktx_gpu, REAL *Kty_gpu, REAL *Ktz_gpu, int *offSrc, int *offTwg, int *P2P_list, int *sizeTar, int *k, REAL *xj, REAL *yj, REAL *zj, REAL *m, REAL *mKtc, REAL *xt, REAL *yt, REAL *zt, REAL *Area, REAL *vertex, int ptr_off, int ptr_lst, int LorY, REAL kappa, REAL threshold, int BpT, int NCRIT, int *AI_int_gpu, REAL *Xsk, REAL *Wsk) { int I = threadIdx.x + blockIdx.x*NCRIT; int list_start = offTwg[ptr_off+blockIdx.x]; int list_end = offTwg[ptr_off+blockIdx.x+1]; REAL xi, yi, zi, dx, dy, dz, r, auxKtx, auxKty, auxKtz; __shared__ REAL ver_sh[9*BSZ], xj_sh[BSZ], yj_sh[BSZ], zj_sh[BSZ], A_sh[BSZ], k_sh[BSZ], m_sh[BSZ], mKtc_sh[BSZ], Xsk_sh[K_fine*3], Wsk_sh[K_fine]; if (threadIdx.x<K_fine*3) { Xsk_sh[threadIdx.x] = Xsk[threadIdx.x]; if (threadIdx.x<K_fine) Wsk_sh[threadIdx.x] = Wsk[threadIdx.x]; } __syncthreads(); int i, same, near, CJ_start, Nsrc, CJ; for (int iblock=0; iblock<BpT; iblock++) { REAL sum_Ktx = 0., sum_Kty = 0., sum_Ktz = 0.; i = I + iblock*BSZ; xi = xt[i]; yi = yt[i]; zi = zt[i]; int an_counter = 0; for (int lst=list_start; lst<list_end; lst++) { CJ = P2P_list[ptr_lst+lst]; CJ_start = offSrc[CJ]; Nsrc = offSrc[CJ+1] - CJ_start; for(int jblock=0; jblock<(Nsrc-1)/BSZ; jblock++) { __syncthreads(); xj_sh[threadIdx.x] = xj[CJ_start + jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[CJ_start + jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[CJ_start + jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[CJ_start + jblock*BSZ + threadIdx.x]; mKtc_sh[threadIdx.x] = mKtc[CJ_start + jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[CJ_start + jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[CJ_start + jblock*BSZ + threadIdx.x]; for (int vert=0; vert<9; vert++) { ver_sh[9*threadIdx.x+vert] = vertex[9*(CJ_start+jblock*BSZ+threadIdx.x)+vert]; } __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<BSZ; j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6]) *0.333333333333333333; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])*0.333333333333333333; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])*0.333333333333333333; r = 1/(dx*dx + dy*dy + dz*dz); // r is 1/r!!! same = (r>1e12); near = ((2*A_sh[j]*r) > threshold*threshold); auxKtx = 0.; auxKty = 0.; auxKtz = 0.; if (near==0) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!!! if (LorY==2) { auxKtx = -m_sh[j]*exp(-kappa*1/r)*r*r*(kappa+r); auxKty = auxKtx*dy; auxKtz = auxKtx*dz; auxKtx *= dx; } if (LorY==1) { auxKtx = -m_sh[j]*r*r*r; auxKty = auxKtx*dy; auxKtz = auxKtx*dz; auxKtx *= dx; } } if ( (near==1) && (k_sh[j]==0)) { if (same==1) { auxKtx = 0.0; auxKty = 0.0; auxKtz = 0.0; } else { GQ_fineKt(auxKtx, auxKty, auxKtz, ver_sh, 9*j, xi, yi, zi, kappa, Xsk_sh, Wsk_sh, A_sh, LorY); } auxKtx *= mKtc_sh[j]; auxKty *= mKtc_sh[j]; auxKtz *= mKtc_sh[j]; an_counter += 1; } sum_Ktx += auxKtx; sum_Kty += auxKty; sum_Ktz += auxKtz; } } } __syncthreads(); int jblock = (Nsrc-1)/BSZ; if (jblock*BSZ + threadIdx.x < Nsrc) { xj_sh[threadIdx.x] = xj[CJ_start + jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[CJ_start + jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[CJ_start + jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[CJ_start + jblock*BSZ + threadIdx.x]; mKtc_sh[threadIdx.x] = mKtc[CJ_start + jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[CJ_start + jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[CJ_start + jblock*BSZ + threadIdx.x]; for (int vert=0; vert<9; vert++) { ver_sh[9*threadIdx.x+vert] = vertex[9*(CJ_start+jblock*BSZ+threadIdx.x)+vert]; } } __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int j=0; j<Nsrc-(jblock*BSZ); j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6]) *0.3333333333333333333; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])*0.3333333333333333333; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])*0.3333333333333333333; r = 1/(dx*dx + dy*dy + dz*dz); // r is 1/r!!! same = (r>1e12); near = ((2*A_sh[j]*r) > threshold*threshold); auxKtx = 0.; auxKty = 0.; auxKtz = 0.; if (near==0) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = rsqrt(dx*dx + dy*dy + dz*dz); // r is 1/r!!! if (LorY==2) { auxKtx = -m_sh[j]*exp(-kappa*1/r)*r*r*(kappa+r); auxKty = auxKtx*dy; auxKtz = auxKtx*dz; auxKtx *= dx; } if (LorY==1) { auxKtx = -m_sh[j]*r*r*r; auxKty = auxKtx*dy; auxKtz = auxKtx*dz; auxKtx *= dx; } } if ( (near==1) && (k_sh[j]==0)) { if (same==1) { auxKtx = 0.0; auxKty = 0.0; auxKtz = 0.0; } else { GQ_fineKt(auxKtx, auxKty, auxKtz, ver_sh, 9*j, xi, yi, zi, kappa, Xsk_sh, Wsk_sh, A_sh, LorY); } auxKtx *= mKtc_sh[j]; auxKty *= mKtc_sh[j]; auxKtz *= mKtc_sh[j]; an_counter += 1; } sum_Ktx += auxKtx; sum_Kty += auxKty; sum_Ktz += auxKtz; } } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { Ktx_gpu[i] += sum_Ktx; Kty_gpu[i] += sum_Kty; Ktz_gpu[i] += sum_Ktz; AI_int_gpu[i] = an_counter; } } } __global__ void get_phir(REAL *phir, REAL *xq, REAL *yq, REAL *zq, REAL *m, REAL *mx, REAL *my, REAL *mz, REAL *mKc, REAL *mVc, REAL *xj, REAL *yj, REAL *zj, REAL *Area, int *k, REAL *vertex, int Nj, int Nq, int K, REAL *xk, REAL *wk, REAL threshold, int *AI_int_gpu, int Nk, REAL *Xsk, REAL *Wsk) { int i = threadIdx.x + blockIdx.x*BSZ; REAL xi, yi, zi, dx, dy, dz, r; int jblock, triangle; __shared__ REAL ver_sh[9*BSZ], xj_sh[BSZ], yj_sh[BSZ], zj_sh[BSZ], A_sh[BSZ], k_sh[BSZ], m_sh[BSZ], mx_sh[BSZ], my_sh[BSZ], mz_sh[BSZ], mKc_sh[BSZ], mVc_sh[BSZ]; REAL sum_V = 0., sum_K = 0.; xi = xq[i]; yi = yq[i]; zi = zq[i]; int an_counter = 0; for(jblock=0; jblock<(Nj-1)/BSZ; jblock++) { __syncthreads(); xj_sh[threadIdx.x] = xj[jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[jblock*BSZ + threadIdx.x]; mx_sh[threadIdx.x] = mx[jblock*BSZ + threadIdx.x]; my_sh[threadIdx.x] = my[jblock*BSZ + threadIdx.x]; mz_sh[threadIdx.x] = mz[jblock*BSZ + threadIdx.x]; mKc_sh[threadIdx.x] = mKc[jblock*BSZ + threadIdx.x]; mVc_sh[threadIdx.x] = mVc[jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[(jblock*BSZ + threadIdx.x)]; for (int vert=0; vert<9; vert++) { triangle = jblock*BSZ+threadIdx.x; ver_sh[9*threadIdx.x+vert] = vertex[9*triangle+vert]; } __syncthreads(); for (int j=0; j<BSZ; j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6])/3; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])/3; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])/3; r = sqrt(dx*dx + dy*dy + dz*dz); if((sqrt(2*A_sh[j])/r) < threshold) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = sqrt(dx*dx + dy*dy + dz*dz); sum_V += m_sh[j]/r; sum_K += (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)/(r*r*r); } else if(k_sh[j]==0) { REAL PHI_K = 0.; REAL PHI_V = 0.; GQ_fine(PHI_K, PHI_V, ver_sh, 9*j, xi, yi, zi, 1e-15, Xsk, Wsk, A_sh, 1); //REAL panel[9] = {ver_sh[9*j], ver_sh[9*j+1], ver_sh[9*j+2], // ver_sh[9*j+3], ver_sh[9*j+4], ver_sh[9*j+5], // ver_sh[9*j+6], ver_sh[9*j+7], ver_sh[9*j+8]}; //SA(PHI_K, PHI_V, panel, xi, yi, zi, // 1., 1., 1e-15, 0, xk, wk, 9, 1); sum_V += PHI_V * mVc_sh[j]; sum_K += PHI_K * mKc_sh[j]; an_counter += 1; } } } __syncthreads(); jblock = (Nj-1)/BSZ; if (threadIdx.x<Nj-jblock*BSZ) { xj_sh[threadIdx.x] = xj[jblock*BSZ + threadIdx.x]; yj_sh[threadIdx.x] = yj[jblock*BSZ + threadIdx.x]; zj_sh[threadIdx.x] = zj[jblock*BSZ + threadIdx.x]; m_sh[threadIdx.x] = m[jblock*BSZ + threadIdx.x]; mx_sh[threadIdx.x] = mx[jblock*BSZ + threadIdx.x]; my_sh[threadIdx.x] = my[jblock*BSZ + threadIdx.x]; mz_sh[threadIdx.x] = mz[jblock*BSZ + threadIdx.x]; mKc_sh[threadIdx.x] = mKc[jblock*BSZ + threadIdx.x]; mVc_sh[threadIdx.x] = mVc[jblock*BSZ + threadIdx.x]; k_sh[threadIdx.x] = k[jblock*BSZ + threadIdx.x]; A_sh[threadIdx.x] = Area[jblock*BSZ + threadIdx.x]; for (int vert=0; vert<9; vert++) { triangle = jblock*BSZ+threadIdx.x; ver_sh[9*threadIdx.x+vert] = vertex[9*triangle+vert]; } } __syncthreads(); for (int j=0; j<Nj-(jblock*BSZ); j++) { dx = xi - (ver_sh[9*j] + ver_sh[9*j+3] + ver_sh[9*j+6])/3; dy = yi - (ver_sh[9*j+1] + ver_sh[9*j+4] + ver_sh[9*j+7])/3; dz = zi - (ver_sh[9*j+2] + ver_sh[9*j+5] + ver_sh[9*j+8])/3; r = sqrt(dx*dx + dy*dy + dz*dz); if (i<Nq) { if ((sqrt(2*A_sh[j])/r) < threshold) { dx = xi - xj_sh[j]; dy = yi - yj_sh[j]; dz = zi - zj_sh[j]; r = sqrt(dx*dx + dy*dy + dz*dz); sum_V += m_sh[j]/r; sum_K += (mx_sh[j]*dx+my_sh[j]*dy+mz_sh[j]*dz)/(r*r*r); } else if(k_sh[j]==0) { REAL PHI_K = 0.; REAL PHI_V = 0.; GQ_fine(PHI_K, PHI_V, ver_sh, 9*j, xi, yi, zi, 1e-15, Xsk, Wsk, A_sh, 1); sum_V += PHI_V * mVc_sh[j]; sum_K += PHI_K * mKc_sh[j]; an_counter += 1; } } } if (i<Nq) { phir[i] = (-sum_K + sum_V)/(4*M_PI); AI_int_gpu[i] = an_counter; } } __global__ void compute_RHS(REAL *F, REAL *xq, REAL *yq, REAL *zq, REAL *q, REAL *xi, REAL *yi, REAL *zi, int *sizeTar, int Nq, REAL E_1, int NCRIT, int BpT) { int II = threadIdx.x + blockIdx.x*NCRIT; int I; REAL x, y, z, sum; REAL dx, dy, dz, r; __shared__ REAL xq_sh[BSZ], yq_sh[BSZ], zq_sh[BSZ], q_sh[BSZ]; for (int iblock=0; iblock<BpT; iblock++) { I = II + iblock*BSZ; x = xi[I]; y = yi[I]; z = zi[I]; sum = 0.; for (int block=0; block<(Nq-1)/BSZ; block++) { __syncthreads(); xq_sh[threadIdx.x] = xq[block*BSZ+threadIdx.x]; yq_sh[threadIdx.x] = yq[block*BSZ+threadIdx.x]; zq_sh[threadIdx.x] = zq[block*BSZ+threadIdx.x]; q_sh[threadIdx.x] = q[block*BSZ+threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int i=0; i<BSZ; i++) { dx = xq_sh[i] - x; dy = yq_sh[i] - y; dz = zq_sh[i] - z; r = sqrt(dx*dx + dy*dy + dz*dz); sum += q_sh[i]/(E_1*r); } } } int block = (Nq-1)/BSZ; __syncthreads(); xq_sh[threadIdx.x] = xq[block*BSZ+threadIdx.x]; yq_sh[threadIdx.x] = yq[block*BSZ+threadIdx.x]; zq_sh[threadIdx.x] = zq[block*BSZ+threadIdx.x]; q_sh[threadIdx.x] = q[block*BSZ+threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int i=0; i<Nq-block*BSZ; i++) { dx = xq_sh[i] - x; dy = yq_sh[i] - y; dz = zq_sh[i] - z; r = sqrt(dx*dx + dy*dy + dz*dz); sum += q_sh[i]/(E_1*r); } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { F[I] = sum; } } } __global__ void compute_RHSKt(REAL *Fx, REAL *Fy, REAL *Fz, REAL *xq, REAL *yq, REAL *zq, REAL *q, REAL *xi, REAL *yi, REAL *zi, int *sizeTar, int Nq, REAL E_1, int NCRIT, int BpT) { int II = threadIdx.x + blockIdx.x*NCRIT; int I; REAL x, y, z, sum_x, sum_y, sum_z; REAL dx, dy, dz, r, aux; __shared__ REAL xq_sh[BSZ], yq_sh[BSZ], zq_sh[BSZ], q_sh[BSZ]; for (int iblock=0; iblock<BpT; iblock++) { I = II + iblock*BSZ; x = xi[I]; y = yi[I]; z = zi[I]; sum_x = 0., sum_y = 0, sum_z = 0; for (int block=0; block<(Nq-1)/BSZ; block++) { __syncthreads(); xq_sh[threadIdx.x] = xq[block*BSZ+threadIdx.x]; yq_sh[threadIdx.x] = yq[block*BSZ+threadIdx.x]; zq_sh[threadIdx.x] = zq[block*BSZ+threadIdx.x]; q_sh[threadIdx.x] = q[block*BSZ+threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int i=0; i<BSZ; i++) { dx = x - xq_sh[i]; dy = y - yq_sh[i]; dz = z - zq_sh[i]; r = sqrt(dx*dx + dy*dy + dz*dz); aux = -q_sh[i]/(r*r*r); sum_x += aux*dx; sum_y += aux*dy; sum_z += aux*dz; } } } int block = (Nq-1)/BSZ; __syncthreads(); xq_sh[threadIdx.x] = xq[block*BSZ+threadIdx.x]; yq_sh[threadIdx.x] = yq[block*BSZ+threadIdx.x]; zq_sh[threadIdx.x] = zq[block*BSZ+threadIdx.x]; q_sh[threadIdx.x] = q[block*BSZ+threadIdx.x]; __syncthreads(); if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { for (int i=0; i<Nq-block*BSZ; i++) { dx = x - xq_sh[i]; dy = y - yq_sh[i]; dz = z - zq_sh[i]; r = sqrt(dx*dx + dy*dy + dz*dz); aux = -q_sh[i]/(r*r*r); sum_x += aux*dx; sum_y += aux*dy; sum_z += aux*dz; } } if (threadIdx.x+iblock*BSZ<sizeTar[blockIdx.x]) { Fx[I] = sum_x; Fy[I] = sum_y; Fz[I] = sum_z; } } } """%{'blocksize':BSZ, 'Nmult':Nm, 'K_near':K_fine, 'Ptree':P, 'precision':REAL}, nvcc="nvcc", options=["-use_fast_math"]) return mod
bsd-3-clause
-102,341,381,851,496,640
35.088223
180
0.345645
false
2.685521
false
false
false
fajran/kasuari
utils/merge.py
1
2326
from PIL import Image import sys import os class ImageMerger: format = "%(z)d.%(x)d.%(y)d.jpg" def __init__(self, target): self.target = target self.size = 256 def start(self): level = 0 while True: total = self._merge(level) if total == 1: break level += 1 print "done." def _merge(self, level): print "Merging images on level %d.." % level size = self.size size2 = size * 2 target = self.target total = 0 y = 0 py = 0 while True: x = 0 px = 0 first = True while True: xlist = [x, x+1] ylist = [y, y+1] im = Image.new("RGB", (size2, size2)) cnt = 0 for ix in [0,1]: for iy in [0,1]: sx, sy = xlist[ix], ylist[iy] fname = os.path.join(target, self.format % {'z': level, 'x': sx, 'y': sy}) if os.path.isfile(fname): ims = Image.open(fname) w, h = ims.size im.paste(ims, (ix * size, iy * size, ix * size + w, iy * size + h)) cnt += 1 if cnt > 0: fname = os.path.join(target, self.format % {'z': level + 1, 'x': px, 'y': py}) resized = im.resize((size, size)) resized.save(fname) total += 1 print "-", fname if cnt == 0: break px += 1 x += 2 first = False if first and cnt == 0: break py += 1 y += 2 return total if __name__ == '__main__': target = sys.argv[1] im = ImageMerger(target) im.start()
mit
-4,343,508,817,439,789,600
25.735632
73
0.316423
false
4.69899
false
false
false
TrainingB/Clembot
clembot/exts/autorespond/autoresponder.py
1
4240
import json import os import discord from discord.ext import commands from clembot.exts.utils.utilities import Utilities class AutoResponder(commands.Cog): def __init__(self, bot): self.bot = bot self.guild_dict = bot.guild_dict self.utilities = Utilities() self.pokemon_forms = [] with open(os.path.join('data', 'pokemon_forms.json'), 'r') as fd: data = json.load(fd) self.pokemon_forms = data['pokemon_forms'] @commands.group(pass_context=True, hidden=True, aliases=["auto-response", "ar"]) async def _autoresponse(self, ctx): if ctx.invoked_subcommand is None: await self.utilities._send_message(ctx.channel, f"Beep Beep! **{ctx.message.author.display_name}**, **!{ctx.invoked_with}** can be used with various options.") @_autoresponse.command(aliases=["add-image"]) async def _autoresponse_add_image(self, ctx, *, ar_message_text): ar_key, _, ar_message = ar_message_text.partition(' ') ctx.bot.guild_dict[ctx.guild.id].setdefault('auto-responses-image', {}).setdefault(ctx.channel.id,{})[ar_key] = ar_message await self.utilities._send_message(ctx.channel, f"{ar_key} has been set correctly.", user=ctx.message.author) @_autoresponse.command(aliases=["add"]) async def _autoresponse_add(self, ctx, *, ar_message_text): ar_key, _, ar_message = ar_message_text.partition(' ') ctx.bot.guild_dict[ctx.guild.id].setdefault('auto-responses', {}).setdefault(ctx.channel.id,{})[ar_key] = ar_message await self.utilities._send_message(ctx.channel, f"{ar_key} has been set correctly.", user=ctx.message.author) @_autoresponse.command(aliases=["clear-all"]) async def _autoresponse_clear_all(self, ctx): try: for guild_id in list(ctx.bot.guild_dict.keys()): for channel_id in list(ctx.bot.guild_dict[guild_id].get('auto-responses', {}).keys()): if not ctx.bot.guild_dict[guild_id].get('auto-responses', {}).get(channel_id, None) : print(ctx.bot.guild_dict[guild_id].get('auto-responses', {}).pop(channel_id,None)) for channel_id in list(ctx.bot.guild_dict[guild_id].get('auto-responses-image', {}).keys()): if not ctx.bot.guild_dict[guild_id].get('auto-responses-image', {}).get(channel_id, None) : print(ctx.bot.guild_dict[guild_id].get('auto-responses-image', {}).pop(channel_id,None)) await self.utilities._send_message(ctx.channel, f"auto-responses are cleaned up.", user=ctx.message.author) except Exception as error: print(error) beep_notes = ("""**{member}** here are the commands for trade management. **!trade offer <pokemon>** - to add pokemon to your offers list. **!trade request <pokemon>** - to add pokemon to your requests list. **!trade clear <pokemon>** - to remove pokemon from your trade offer or request list. **!trade list** - brings up pokemon in your trade offer/request list. **!trade list @user** - brings up pokemon in user's trade offer/request list. **!trade list pokemon** - filters your trade offer/request list by sepcified pokemon. **!trade search <pokemon>** - brings up a list of 10 users who are offering pokemon with their pokemon request as well. **<pokemon> - can be one or more pokemon or pokedex# separated by space.** """) def get_beep_embed(self, title, description, usage=None, available_value_title=None, available_values=None, footer=None, mode="message"): if mode == "message": color = discord.Colour.green() else: color = discord.Colour.red() help_embed = discord.Embed(title=title, description=f"{description}", colour=color) help_embed.set_footer(text=footer) return help_embed @classmethod async def _help(self, ctx): footer = "Tip: < > denotes required and [ ] denotes optional arguments." await ctx.message.channel.send(embed=self.get_beep_embed(self, title="Help - Trade Management", description=self.beep_notes.format(member=ctx.message.author.display_name), footer=footer)) def setup(bot): bot.add_cog(AutoResponder(bot))
gpl-3.0
-8,263,116,281,136,314,000
42.265306
195
0.652594
false
3.530391
false
false
false
mgraffg/simplegp
SimpleGP/tests/test_eval.py
1
1811
# Copyright 2013 Mario Graff Guerrero # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from SimpleGP import GP import numpy as np class TestEval(object): def __init__(self): x = np.linspace(-10, 10, 100) pol = np.array([0.2, -0.3, 0.2]) X = np.vstack((x**2, x, np.ones(x.shape[0]))) y = (X.T * pol).sum(axis=1) x = x[:, np.newaxis] self._gp = GP(fname_best=None).train(x, y) self._gp.create_population() self._cons = 1.2 self._gp._p_constants[0][0] = self._cons self._nfunc = self._gp._nop.shape[0] self._nvar = self._nfunc + self._gp._x.shape[1] def test_sum(self): self._gp._p[0] = np.array([0, self._nfunc, self._nvar], dtype=np.int) y = self._gp._x.flatten() + self._cons yh = self._gp.eval(0) assert np.fabs(y - yh).sum() == 0 def test_subtract(self): self._gp._p[0] = np.array([1, self._nfunc, self._nvar], dtype=np.int) y = self._gp._x.flatten() - self._cons yh = self._gp.eval(0) assert np.fabs(y - yh).sum() == 0 def test_multiply(self): self._gp._p[0] = np.array([2, self._nfunc, self._nvar], dtype=np.int) y = self._gp._x.flatten() * self._cons yh = self._gp.eval(0) assert np.fabs(y - yh).sum() == 0
apache-2.0
7,823,612,286,752,798,000
36.729167
77
0.59746
false
3.090444
false
false
false
HBPNeurorobotics/BlenderRobotDesigner
robot_designer_plugin/export/collada15/file.py
1
8958
# ##### # This file is part of the RobotDesigner of the Neurorobotics subproject (SP10) # in the Human Brain Project (HBP). # It has been forked from the RobotEditor (https://gitlab.com/h2t/roboteditor) # developed at the Karlsruhe Institute of Technology in the # High Performance Humanoid Technologies Laboratory (H2T). # ##### # ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # ##### # # Copyright (c) 2015, Karlsruhe Institute of Technology (KIT) # Copyright (c) 2016, FZI Forschungszentrum Informatik # # Changes: # # 2016-01-15: Stefan Ulbrich (FZI), Major refactoring. Integrated into complex plugin framework. # # ###### """ Sphinx-autodoc tag """ # ###### # System imports # import os # import sys # import math # ###### # Blender imports import bpy from bpy.props import StringProperty # import mathutils # ###### # RobotDesigner imports from ...core import config, PluginManager from ...core.operators import RDOperator from ...operators.helpers import ModelSelected, ObjectMode # ###### # Plugin imports from . import fix from . import collada def extractData(segment_name): tree = collada.Tree() arm = bpy.context.active_object bpy.ops.roboteditor.select_segment(segment_name=segment_name) currentBone = bpy.context.active_bone tree.name = segment_name if currentBone.parent: parentName = currentBone.parent.name else: parentName = None if currentBone.RobotEditor.axis_revert: inverted = -1 else: inverted = 1 axis = ["0", "0", "0"] if currentBone.RobotEditor.axis == 'X': axis[0] = str(inverted) elif currentBone.RobotEditor.axis == 'Y': axis[1] = str(inverted) elif currentBone.RobotEditor.axis == 'Z': axis[2] = str(inverted) tree.axis = axis trafo, dummy = currentBone.RobotEditor.getTransform() # translation tree.addTrafo([str(element) for element in trafo.translation]) # rotation rotation = trafo.to_euler() tree.addTrafo([str(element) for element in [0, 0, 1, rotation.z]]) tree.addTrafo([str(element) for element in [0, 1, 0, rotation.y]]) tree.addTrafo([str(element) for element in [1, 0, 0, rotation.x]]) if currentBone.RobotEditor.jointMode == 'REVOLUTE': tree.initialValue = str(currentBone.RobotEditor.theta.offset) tree.min = str(currentBone.RobotEditor.theta.min) tree.max = str(currentBone.RobotEditor.theta.max) tree.axis_type = 'revolute' else: tree.initialValue = str(currentBone.RobotEditor.d.offset) tree.min = str(currentBone.RobotEditor.d.min) tree.max = str(currentBone.RobotEditor.d.max) tree.axis_type = 'prismatic' children = [child.name for child in currentBone.children] tree.meshes = [mesh.name for mesh in bpy.data.objects if mesh.type == 'MESH' and mesh.parent_bone == segment_name] markers = [m for m in bpy.data.objects if m.RobotEditor.tag == 'MARKER' and m.parent_bone == segment_name] # tree.markers = [(m.name,(currentBone.matrix_local.inverted()*m.matrix_world.translation).to_tuple()) # for m in markers] # tree.markers = [(m.name,(m.matrix_parent_inverse*m.matrix_world.translation).to_tuple()) for m in markers] poseBone = arm.pose.bones[segment_name] tree.markers = [ (m.name, ( poseBone.matrix.inverted() * arm.matrix_world.inverted() * m.matrix_world.translation).to_tuple()) for m in markers] for child in children: tree.addChild(extractData(child)) return tree # @PluginManager.register_class # class ImportCollada15(RDOperator): # """ # Not implemented # **Preconditions:** # # **Postconditions:** # """ # bl_idname = config.OPERATOR_PREFIX + "selectarmature" # bl_label = "Select model" # # model_name = StringProperty() # # @classmethod # def run(cls, model_name=""): # return super().run(**cls.pass_keywords()) # # @classmethod # def poll(cls, context): # """ # Checks whether preconditions are met for this :ref:`operator` # # :param context: :ref:`context` # :return: True when conditions are met, False otherwise # """ # return check_conditions(ObjectMode) # # @OperatorLogger # @Postconditions(ModelSelected) # def execute(self, context): # self.report({'ERROR'},"Not implemented") @RDOperator.Preconditions(ObjectMode, ModelSelected) @PluginManager.register_class class RobotEditor_exportCollada(RDOperator): """ :term:`operator` for exporting a :term:`robot model` to COLLADA 1.5 **Preconditions:** **Postconditions:** """ bl_idname = "roboteditor.colladaexport" bl_label = "Export to COLLADA 1.5" filepath = StringProperty(subtype='FILE_PATH') @RDOperator.OperatorLogger @RDOperator.Postconditions(ModelSelected) def execute(self, context): bpy.ops.wm.collada_export(filepath=self.filepath, check_existing=False, filter_blender=False, filter_image=False, filter_movie=False, filter_python=False, filter_font=False, filter_sound=False, filter_text=False, filter_btx=False, filter_collada=True, apply_modifiers=True, filter_folder=True) fix.fixCollada(self.filepath, self.filepath, context) handler = collada.COLLADA() handler.import14(self.filepath) arm = context.active_object baseBoneName = arm.data.bones[0].name tree = collada.Tree() tree.name = arm.name tree.addChild(extractData(baseBoneName)) handler.attach(tree) massFrames = [obj for obj in context.scene.objects if obj.RobotEditor.tag == 'PHYSICS_FRAME' and obj.parent_bone is not ''] for frame in massFrames: # transform = frame.parent.data.bones[frame.parent_bone].matrix_local.inverted() * frame.matrix_local segment_name = frame.parent.data.bones[frame.parent_bone].name poseBone = arm.pose.bones[segment_name] transform = poseBone.matrix.inverted() * arm.matrix_world.inverted() * frame.matrix_world frameTrafos = [tuple(v for v in transform.translation)] frameRotation = transform.to_euler() frameTrafos.append(tuple([0, 0, 1, frameRotation.z])) frameTrafos.append(tuple([0, 1, 0, frameRotation.y])) frameTrafos.append(tuple([1, 0, 0, frameRotation.x])) collisionModels = [] collisionModelTransformations = {} for model in [i for i in context.scene.objects if i.parent == frame]: modelName = model.data.name.replace('.', '_') + '-mesh' collisionModels.append(modelName) # matrix = model.parent.data.bones[model.parent_bone].matrix_local.inverted() * model.matrix_local matrix = model.matrix_local collisionModelTransformations[modelName] = [tuple(v for v in matrix.translation)] rotation = matrix.to_euler() collisionModelTransformations[modelName].append(tuple([0, 0, 1, rotation.z])) collisionModelTransformations[modelName].append(tuple([0, 1, 0, rotation.y])) collisionModelTransformations[modelName].append(tuple([1, 0, 0, rotation.x])) # TODO also bring the matrix_local to all collisionmodels print("mass frames", frame.name, collisionModels) handler.addMassObject(frame.name, frameTrafos, tuple(v for v in frame.RobotEditor.dynamics.inertiaTensor), frame.RobotEditor.dynamics.mass, collisionModels, collisionModelTransformations) handler.write(self.filepath) return {'FINISHED'} def invoke(self, context, event): context.window_manager.fileselect_add(self) return {'RUNNING_MODAL'} return {'FINISHED'}
gpl-2.0
702,177,518,763,781,600
35.713115
114
0.635298
false
3.687937
false
false
false
rrpg/engine
core/commands/look.py
1
4279
# -*- coding: utf-8 -*- from models import area, character, place, item, item_container import core.command from core.localisation import _ class look(core.command.command): """ Look command """ def run(self): """ c.run() Display some informations about the player's current position (characters arround, availables directions...). """ sections = { _('LOOK_REGION_PARAM'): ['region', self._getRegionInfo], _('LOOK_FIGHT_PARAM'): ['fight', self._getFightInfo], _('LOOK_CHARACTERS_PARAM'): ['characters', self._getCharacters], _('LOOK_DIRECTIONS_PARAM'): ['directions', self._getDirections], _('LOOK_PLACES_PARAM'): ['places', self._getPlaces], _('LOOK_OBJECTS_PARAM'): ['items', self._getObjects], _('LOOK_CONTAINERS_PARAM'): ['item_containers', self._getContainers] } result = dict() areaId = self._player.getAreaId() what = None if len(self._args) > 0: what = self._args[0] if what is not None: if what not in sections.keys(): raise core.command.exception(_('ERROR_LOOK_UNKNOWN_SECTION')) result[sections[what][0]] = sections[what][1](areaId) else: for s in sections: values = sections[s][1](areaId) if values: result[sections[s][0]] = values return result def _getRegionInfo(self, areaId): curArea = area.model.loadById(areaId) return { 'name': area.area.getRegionNameFromAreaId(areaId), 'x': curArea['x'], 'y': curArea['y'], 'has_save_point': area.area.hasSavePoint(areaId) } def _getFightInfo(self, areaId): f = core.fight.fight.getFight() if f is None: return None else: return f.getEnemy() def _getCharacters(self, areaId): characters = list() # Display surrounding characters for c in character.character.searchByIdArea(areaId): if c._model['id_character'] != self._player._model['id_character']: characters.append(c._model['name']) return characters def _getDirections(self, areaId): directions = list() # Display accessible areas areas = area.model.getSurroundingAreas(areaId) for d in area.area.getValidDirections(areas['directions']): directions.append(d) return directions def _getPlaces(self, areaId): places = list() # Display accessible places for p in place.model.getSurroundingPlaces(areaId): places.append(p['name']) return places def _getObjects(self, areaId): objects = list() # Display surrounding objects items = area.area.getItems(areaId) for i in items: it = item.model.loadById(i) objects.append({ 'name': it['name'], 'quantity': items[i]['quantity'] }) return objects def _getContainers(self, areaId): containers = dict() for c in item_container.container.getAllFromIdArea(areaId): try: containers[c['type_label']] = containers[c['type_label']] + 1 except KeyError: containers[c['type_label']] = 1 return containers def render(self, data): sections = data.keys() output = list() if 'region' in sections: o = list() o.append(_('CURRENT_REGION_%s') % data['region']['name']) if data['region']['has_save_point']: o.append(_('AREA_HAS_SAVE_POINT')) output.append('\n'.join(o)) if 'fight' in sections and data['fight'] is not None: output.append(_('CURRENTLY_FIGHTING_%s') % data['fight']['name']) if 'characters' in sections: o = list() o.append(_('PRESENT_CHARACTERS')) for c in data['characters']: o.append(' ' + str(c)) output.append('\n'.join(o)) if 'directions' in sections: o = list() o.append(_('AVAILABLE_DIRECTIONS')) for d in data['directions']: o.append(' ' + d) output.append('\n'.join(o)) if 'places' in sections: o = list() o.append(_('AVAILABLE_PLACES')) for p in data['places']: o.append(' ' + p) output.append('\n'.join(o)) if 'items' in sections: o = list() o.append(_('AVAILABLE_ITEMS')) for i in data['items']: o.append(str(i['quantity']).rjust(3) + ' ' + i['name']) output.append('\n'.join(o)) if 'item_containers' in sections: o = list() o.append(_('AVAILABLE_ITEMS_CONTAINERS')) for c in sorted(data['item_containers'].keys()): for nb in range(data['item_containers'][c]): o.append(' ' + c + ' #' + str(nb + 1)) output.append('\n'.join(o)) return '\n\n'.join(output)
mit
7,157,176,980,451,430,000
25.91195
71
0.64057
false
3.021893
false
false
false
alirizakeles/zato
code/zato-server/src/zato/server/service/internal/server.py
1
3943
# -*- coding: utf-8 -*- """ Copyright (C) 2012 Dariusz Suchojad <dsuch at zato.io> Licensed under LGPLv3, see LICENSE.txt for terms and conditions. """ from __future__ import absolute_import, division, print_function, unicode_literals # stdlib from contextlib import closing from traceback import format_exc # Zato from zato.common import ZatoException from zato.common.odb.model import Server from zato.server.service.internal import AdminService, AdminSIO class Edit(AdminService): """ Updates a server. """ class SimpleIO(AdminSIO): request_elem = 'zato_server_edit_request' response_elem = 'zato_server_edit_response' input_required = ('id', 'name') output_required = ('id', 'cluster_id', 'name', 'host') output_optional = ('bind_host', 'bind_port', 'last_join_status', 'last_join_mod_date', 'last_join_mod_by', 'up_status', 'up_mod_date') def handle(self): with closing(self.odb.session()) as session: existing_one = session.query(Server).\ filter(Server.id!=self.request.input.id).\ filter(Server.name==self.request.input.name).\ first() if existing_one: raise Exception('A server of that name [{0}] already exists on this cluster'.format(self.request.input.name)) try: item = session.query(Server).filter_by(id=self.request.input.id).one() item.name = self.request.input.name session.add(item) session.commit() self.response.payload = item except Exception, e: msg = 'Could not update the server, id:[{}], e:[{}]'.format(self.request.input.id, format_exc(e)) self.logger.error(msg) session.rollback() raise class GetByID(AdminService): """ Returns a particular server """ class SimpleIO(AdminSIO): request_elem = 'zato_server_get_by_id_request' response_elem = 'zato_server_get_by_id_response' input_required = ('id',) output_required = ('id', 'cluster_id', 'name', 'host') output_optional = ('bind_host', 'bind_port', 'last_join_status', 'last_join_mod_date', 'last_join_mod_by', 'up_status', 'up_mod_date') def get_data(self, session): return session.query(Server).\ filter(Server.id==self.request.input.id).\ one() def handle(self): with closing(self.odb.session()) as session: self.response.payload = self.get_data(session) for name in('last_join_mod_date', 'up_mod_date'): attr = getattr(self.response.payload, name, None) if attr: setattr(self.response.payload, name, attr.isoformat()) class Delete(AdminService): """ Deletes a server. """ class SimpleIO(AdminSIO): request_elem = 'zato_server_delete_request' response_elem = 'zato_server_delete_response' input_required = ('id',) def handle(self): with closing(self.odb.session()) as session: try: server = session.query(Server).\ filter(Server.id==self.request.input.id).\ one() # Sanity check if server.id == self.server.id: msg = 'A server cannot delete itself, id:[{}], name:[{}]'.format(server.id, server.name) self.logger.error(msg) raise ZatoException(self.cid, msg) # This will cascade and delete every related object session.delete(server) session.commit() except Exception, e: session.rollback() msg = 'Could not delete the server, e:[{e}]'.format(e=format_exc(e)) self.logger.error(msg) raise
gpl-3.0
9,005,205,076,764,030,000
34.205357
125
0.568603
false
3.939061
false
false
false
tapilab/theissier
scriptPredict.py
1
2153
#!/bin/python import pymongo from pymongo import MongoClient client = MongoClient('localhost', 27017) from sklearn.feature_extraction.text import CountVectorizer vec = CountVectorizer() from sklearn.linear_model import LogisticRegression from elasticsearch import Elasticsearch es = Elasticsearch() import zerorpc import array import json db = client.tweetsClassifier collection = db['scoredTweets'] clf = LogisticRegression(class_weight="auto") res = es.search(index="test2", body={"query": {"match_all": {}}}) totalHits = res['hits']['total'] res = es.search(index="test2", size=totalHits, body={"query": {"match_all": {}}}) for hit in res['hits']['hits']: print("%(text)s" % hit["_source"]) epsilon = 0.5 class TrainClassifier(object): def fit(self, object): print ("currently fitting") print("object :", object) textTweetsArray = [] scores = [] for document in collection.find({ "sessionname": object }, { "text": 1, "id": 1, "score": 1, "sessionname": 1, "_id": 0 }): textTweetsArray.append(document['text']) scores.append(document['score']) resultMatrix = vec.fit_transform(textTweetsArray).toarray() clf.fit(resultMatrix, scores) def predict(self, object): print ("currently predicting") textUnlabledTweets = [] idTweetsThatMatchTheCriterion = [] for i in range(len(object)): textUnlabledTweets.append(object[i]['fields']['text']) matrixUnlabledTweets = vec.transform(textUnlabledTweets).toarray() predictUnlabledTweets = clf.predict_proba(matrixUnlabledTweets) print("predict : " , predictUnlabledTweets) for j in range(len(predictUnlabledTweets)): if (predictUnlabledTweets[j][1] >= epsilon): idTweetsThatMatchTheCriterion.append(object[j]['_id']) #tweets that respect the criterion (more than a certain probability) will be sent to node.js print ("id tweets sent back ", idTweetsThatMatchTheCriterion) return "%s " % idTweetsThatMatchTheCriterion s = zerorpc.Server(TrainClassifier()) s.bind("tcp://127.0.0.1:4242") s.run()
apache-2.0
6,065,205,377,650,612,000
40.403846
131
0.673014
false
3.661565
false
false
false
arvinddoraiswamy/blahblah
cryptopals/Set4/c32_client.py
1
1265
import requests import datetime import operator import sys url= 'http://localhost:9000/test?' file_qs='file=fddoo&' sig_qs ='signature=' siglen= 40 byte_count= 0 guessed_bytes= '' req_url= '' ''' http://localhost:9000/test?file=foo&signature=24b2d4322e50bf57c88697644e2fd1450794ab5c ''' ''' http://localhost:9000/test?file=fddoo&signature=6f62e47b625aeec1cb239258523dd7e03d4cb906 ''' while len(guessed_bytes) < siglen: req_time= {} byte_count += 1 for byte in range(0,256): req_url= url+file_qs+sig_qs+guessed_bytes diff= siglen - len(guessed_bytes) req_url += hex(byte)[2:].zfill(2) start_time= datetime.datetime.now() resp= requests.get(req_url) end_time= datetime.datetime.now() diff= end_time - start_time req_time[hex(byte)]= diff.total_seconds() sorted_req_time= [] for key, value in sorted(req_time.items(), key=operator.itemgetter(1)): sorted_req_time.append(key) print 'Guessed byte', byte_count, sorted_req_time[-1][2:].zfill(2) guessed_bytes += sorted_req_time[-1][2:].zfill(2) req_url= url+file_qs+sig_qs+guessed_bytes resp= requests.get(req_url) if resp.content == '500': print 'Match not found' else: print 'Match found', guessed_bytes
mit
8,099,344,932,862,683,000
28.418605
96
0.667194
false
2.829978
false
false
false
optimamodel/Optima
optima/calibration.py
1
13511
""" CALIBRATION Function(s) to perform calibration. """ from optima import OptimaException, Link, Par, dcp, asd, printv, findinds, isnumber, odict from numpy import zeros, array, mean import six if six.PY3: unicode = str def autofit(project=None, name=None, fitwhat=None, fitto=None, method='wape', maxtime=None, maxiters=1000, verbose=2, doplot=False, randseed=None, **kwargs): ''' Function to automatically fit parameters. Parameters: fitwhat = which parameters to vary to improve the fit; these are defined in parameters.py under the 'auto' attribute; default is 'force' (FOI metaparameters only) fitto = what kind of data to fit to; options are anything in results.main; default is 'prev' (prevalence) or everything method = which method of calculating the objective/goodness-of-fit to use; default weighted absolute percentage error to place less weight on outliers Others should be self-explanatory. Version: 2017may22 ''' if doplot: # Store global information for debugging global autofitfig, autofitresults autofitfig, autofitresults = [None]*2 timestr = 'unlimited' if maxtime is None else str(maxtime) itersstr = 'unlimited' if maxiters is None else str(maxiters) printv('Performing automatic fitting for %s seconds/%s iterations...' % (timestr,itersstr), 1, verbose) # Validate input if project is None: raise OptimaException('autofit() requires a project in order to run') if name is None: name = -1 # Calibrate last parameter set elif type(name) not in (str,unicode) and not(isnumber(name)): raise OptimaException('%s must be the name or index of a parameter set' % name) # Initialization parset = project.parsets[name] # Shorten the original parameter set parset.projectref = Link(project) # Try to link the parset back to the project pars = dcp(parset.pars) # Just get a copy of the pars for parsing if fitwhat is None: fitwhat = ['force'] # By default, automatically fit force-of-infection only if type(fitwhat)==str: fitwhat = [fitwhat] if type(fitto)==str: fitto = [fitto] parset.improvement = [] # For storing the improvement for each fit # Create the list of parameters to be fitted and set the limits parlist = makeparlist(pars, fitwhat) parlower = array([item['limits'][0] for item in parlist]) parhigher = array(project.settings.convertlimits([item['limits'][1] for item in parlist])) # Replace text labels with numeric values # Perform fit parvec = convert(pars, parlist) args = {'pars':pars, 'parlist':parlist, 'project':project, 'fitto':fitto, 'method':method, 'doplot':doplot, 'verbose':verbose} res = asd(objectivecalc, parvec, args=args, xmin=parlower, xmax=parhigher, maxtime=maxtime, maxiters=maxiters, randseed=randseed, verbose=verbose, **kwargs) # Save, along with some additional info pars = convert(pars, parlist, res.x) parset.pars = pars parset.improvement.append(res.details.fvals) # Store improvement history parset.autofitsettings = odict([('fitwhat', fitwhat), ('fitto', fitto), ('maxtime', maxtime), ('maxiters', maxiters), ('randseed', randseed)]) return parset ## WARNING -- the following two functions must be updated together! # Populate lists of what to fit def makeparlist(pars, fitwhat): ''' This uses the "manual" attribute to decide what to calibrate (e.g., just the metaparameter or all the values. "fitwhat" options (see parameters.py, especially listparattributes()): ['init','popsize','test','treat','force','other','const'] ''' parlist = [] for parname in pars: # Just use first one, since all the same par = pars[parname] if issubclass(type(par), Par): # Check if it's a parameter if par.short in fitwhat: # It's in the list of things to fit if par.manual =='meta': parlist.append({'name':par.short, 'type':par.manual, 'limits':par.limits, 'ind':None}) elif par.manual =='pop': for i in range(len(par.y)): parlist.append({'name':par.short, 'type':par.manual, 'limits':par.limits, 'ind':i}) elif par.manual =='exp': for i in range(len(par.i)): parlist.append({'name':par.short, 'type':par.manual, 'limits':par.limits, 'ind':i}) elif par.manual =='const' or par.manual =='year': parlist.append({'name':par.short, 'type':par.manual, 'limits':par.limits, 'ind':None}) else: raise OptimaException('Parameter "manual" type "%s" not understood' % par.manual) else: pass # It's like popkeys or something -- don't worry, be happy return parlist def convert(pars, parlist, parvec=None): ''' If parvec is not supplied: Take a parameter set (e.g. P.parsets[0].pars), a list of "types" (e.g. 'force'), and a list of keys (e.g. 'hivtest'), and return a vector of values, e.g. "dehydrate" them. If parvec is supplied: Take a vector of parameter values and "hydrate" them into a pars object using a list of "types" (e.g. 'force'), and a list of keys (e.g. 'hivtest'). Relies on the structure of makeparlist() above... ''' # Handle inputs nfitpars = len(parlist) if parvec is None: tv = True # to vector parvec = zeros(nfitpars) else: tv = False # Do the loop for i in range(nfitpars): thistype = parlist[i]['type'] # Should match up with par.manual thisname = parlist[i]['name'] thisind = parlist[i]['ind'] if thistype in ['force', 'pop']: if tv: parvec[i] = pars[thisname].y[thisind] else: pars[thisname].y[thisind] = parvec[i] elif thistype=='exp': if tv: parvec[i] = pars[thisname].i[thisind] # Don't change growth rates, just intercept i else: pars[thisname].i[thisind] = parvec[i] elif thistype=='meta': if tv: parvec[i] = pars[thisname].m else: pars[thisname].m = parvec[i] elif thistype=='const': if tv: parvec[i] = pars[thisname].y else: pars[thisname].y = parvec[i] else: raise OptimaException('Parameter type "%s" not understood' % thistype) # Decide which to return if tv: return parvec else: return pars def extractdata(xdata, ydata): ''' Return the x and y data values for non-nan y data ''' from numpy import isnan, array nonnanx = array(xdata)[~isnan(array(ydata))] nonnany = array(ydata)[~isnan(array(ydata))] return nonnanx, nonnany def objectivecalc(parvec=None, pars=None, parlist=None, project=None, fitto='prev', method='wape', bestindex=0, doplot=False, verbose=2): ''' Calculate the mismatch between the model and the data -- may or may not be related to the likelihood. Either way, it's very uncertain what this function does. TODO: replace 'bestindex' with upper and lower limits for the data ''' if doplot: # Store global information for debugging -- TODO, consider better ways of doing this global autofitfig, autofitresults from pylab import figure, ceil, sqrt, subplot, scatter, xlabel, ylabel, plot, show, pause, ylim, bar, arange if autofitfig is None: autofitfig = figure(figsize=(16,12), facecolor=(1,1,1)) autofitfig.subplots_adjust(left=0.05, right=0.95, bottom=0.1, top=0.95, wspace=0.3, hspace=0.4) if autofitresults is None: autofitresults = {'count':[], 'mismatch':[], 'allmismatches':[]} # Validate input -- check everything in one go if any([arg is None for arg in [parvec, pars, parlist, project]]): raise OptimaException('objectivecalc() requires parvec, pars, parlist, and project inputs') eps = project.settings.eps # Specify absolute error -- can't be larger than ~0.001 because then general population prevalence might be weighted incorrectly pars = convert(pars, parlist, parvec) results = project.runsim(pars=pars, start=project.data['years'][0], end=project.data['years'][-1], verbose=0, resultname=project.name+'-autofit', addresult=False) ## Loop over all results allmismatches = [] count = 0 mismatch = 0 if doplot: debugdata = [] if fitto in [None, 'all', ['all']]: fitto = list(results.main.keys()) # If not specified, use everything for key in fitto: # The results! e.g. key='prev' try: this = results.main[key] except: errormsg = 'autofit(): Key to fit "%s" not found; valid keys are:\n%s' % (key, results.main.keys()) raise OptimaException(errormsg) for attr in ['tot', 'pops']: # Loop over either total or by population denominators tmpdata = getattr(this, 'data'+attr) # Get this data, e.g. results.main['prev'].datatot if tmpdata is not None: # If it actually exists, proceed tmpmodel = getattr(this, attr) # Get this result, e.g. results.main['prev'].tot datarows = tmpdata[bestindex] # Pull out data without uncertainty modelrows = tmpmodel[bestindex] # Pull out just the best result (likely only 1 index) -- TODO: check if should be another index nrows = len(datarows) for row in range(nrows): # Loop over each available row datarow = datarows[row] if len(modelrows.shape)>1: modelrow = modelrows[row] else: modelrow = modelrows datax, datay = extractdata(results.datayears, datarow) # Pull out the not-NaN values if doplot: rowname = 'total' if nrows==1 else pars['popkeys'][row] for i,year in enumerate(datax): # Loop over each data point available count += 1 modelx = findinds(results.tvec, year) # Find the index of the corresponding time point modely = modelrow[modelx] # Finally, extract the model result! if method=='wape': thismismatch = abs(modely - datay[i]) / mean(datay+eps) elif method=='mape': thismismatch = abs(modely - datay[i]) / (datay[i]+eps) elif method=='mad': thismismatch = abs(modely - datay[i]) elif method=='mse': thismismatch = (modely - datay[i])**2 else: errormsg = 'autofit(): "method" not known; you entered "%s", but must be one of:\n' % method errormsg += '"wape" = weighted absolute percentage error (default)\n' errormsg += '"mape" = mean absolute percentage error\n' errormsg += '"mad" = mean absolute difference\n' errormsg += '"mse" = mean squared error' raise OptimaException(errormsg) allmismatches.append(thismismatch) mismatch += thismismatch if doplot: tmpdebugdata = (count, key, rowname, year, datay[i], modely, thismismatch, mismatch) printv('#%i. Key="%s" pop="%s" year=%f datay=%f modely=%f thismis=%f totmis=%f' % tmpdebugdata, 4, verbose) debugdata.append(tmpdebugdata) if doplot: autofitresults['count'].append(len(autofitresults['count'])) # Append new count autofitresults['mismatch'].append(mismatch) # Append mismatch autofitresults['allmismatches'].append(array(allmismatches).flatten()) # Append mismatch autofitfig.clear() nplots = len(set([d[1]+d[2] for d in debugdata]))+2 # 1 is the mismatch rows = ceil(sqrt(nplots)) cols = rows-1 if rows*(rows-1)>=nplots else rows subplot(rows,cols,1) scatter(autofitresults['count'], autofitresults['mismatch']) xlabel('Count') ylabel('Mismatch') ylim((0,ylim()[1])) subplot(rows,cols,2) allmis = autofitresults['allmismatches'][-1] # Shorten bar(arange(len(allmis)), allmis) xlabel('Data point') ylabel('Mismatch') ylim((0,ylim()[1])) # Process data for plotting plotdata = odict() for count,key,rowname,year,datay,modely,thismismatch,mismatch in debugdata: if key not in plotdata: plotdata[key] = odict() if rowname not in plotdata[key]: plotdata[key][rowname] = odict([('x',[]), ('datay',[]), ('modely',[])]) plotdata[key][rowname]['x'].append(year) plotdata[key][rowname]['datay'].append(datay) plotdata[key][rowname]['modely'].append(modely) count = 0 for key1,tmp1 in plotdata.items(): for key2,tmp2 in tmp1.items(): count += 1 subplot(rows, cols, count+2) scatter(tmp2['x'], tmp2['datay']) plot(tmp2['x'], tmp2['modely']) ylabel(key1+' - '+key2) ylim((0,ylim()[1])) show() pause(0.001) printv('Current mismatch: %s' % array(mismatch), 4, verbose=verbose) return mismatch
lgpl-3.0
3,351,361,125,149,846,000
49.044444
170
0.6041
false
3.767708
false
false
false
waxa/GCM-Push-Class-for-python
GCMPush.py
2
1398
#!/usr/bin/env python # -*- coding: utf-8 -*- # @author waxa <[email protected]> import json import requests class GCMPush : # @param servKey : A serer key from google console # @param verbose : optional param to print info from push request def __init__(self, servKey, verbose = False) : self.servKey = servKey self.url = "https://android.googleapis.com/gcm/send" self.header = {"Authorization": "key="+self.servKey, "Content-Type" : "application/json", "Accept-Encoding" : "application/json" } self.verbose = verbose # @param regIds : A list with registers ID's from android devices. Must be strings # @param title : A title for you notification, for basic usage on cordova push plugin, don't need on android native # @param message : The text on you notification, for basic usage on cordova push plugin, don't need on android native # @param extras : You can pass extra data (string, json, etc) on this param def push(self, regIds, title, message, extras = None) : data = { "registration_ids" : regIds, "data" : { "message" : message, "title" : title } } if extras is not None: data["data"]["extras"] = extras r = requests.post(self.url, data = json.dumps(data), headers = self.header) if self.verbose : print "-------------------------------" print "push info" print r.text print "-------------------------------"
gpl-2.0
1,831,681,067,588,702,500
30.772727
132
0.642346
false
3.320665
false
false
false
chromium/chromium
third_party/blink/renderer/bindings/scripts/bind_gen/interface.py
1
299474
# Copyright 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import itertools import web_idl from . import name_style from .blink_v8_bridge import blink_class_name from .blink_v8_bridge import blink_type_info from .blink_v8_bridge import make_default_value_expr from .blink_v8_bridge import make_v8_to_blink_value from .blink_v8_bridge import make_v8_to_blink_value_variadic from .blink_v8_bridge import native_value_tag from .blink_v8_bridge import v8_bridge_class_name from .code_node import EmptyNode from .code_node import FormatNode from .code_node import ListNode from .code_node import SequenceNode from .code_node import SymbolDefinitionNode from .code_node import SymbolNode from .code_node import SymbolScopeNode from .code_node import TextNode from .code_node import WeakDependencyNode from .code_node_cxx import CxxBlockNode from .code_node_cxx import CxxBreakableBlockNode from .code_node_cxx import CxxClassDefNode from .code_node_cxx import CxxFuncDeclNode from .code_node_cxx import CxxFuncDefNode from .code_node_cxx import CxxLikelyIfNode from .code_node_cxx import CxxMultiBranchesNode from .code_node_cxx import CxxNamespaceNode from .code_node_cxx import CxxSwitchNode from .code_node_cxx import CxxUnlikelyIfNode from .codegen_accumulator import CodeGenAccumulator from .codegen_context import CodeGenContext from .codegen_expr import CodeGenExpr from .codegen_expr import expr_and from .codegen_expr import expr_from_exposure from .codegen_expr import expr_or from .codegen_format import format_template as _format from .codegen_utils import component_export from .codegen_utils import component_export_header from .codegen_utils import enclose_with_header_guard from .codegen_utils import make_copyright_header from .codegen_utils import make_forward_declarations from .codegen_utils import make_header_include_directives from .codegen_utils import write_code_node_to_file from .mako_renderer import MakoRenderer from .package_initializer import package_initializer from .path_manager import PathManager from .task_queue import TaskQueue def _is_none_or_str(arg): return arg is None or isinstance(arg, str) def backward_compatible_api_func(cg_context): """ Returns the Blink function name compatible with the old bindings generator. """ assert isinstance(cg_context, CodeGenContext) name = (cg_context.member_like.code_generator_info.property_implemented_as or cg_context.member_like.identifier or cg_context.property_.identifier) if cg_context.attribute_get: # modules/webaudio/biquad_filter_node.idl has readonly attribute "Q" # and somehow it's implemented as "q" in Blink. if name == "Q": name = "q" if cg_context.attribute_set: tokens = name_style.raw.tokenize(name) if tokens[0] in ("IDL", "css", "xml"): tokens[0] = tokens[0].upper() else: tokens[0] = tokens[0].capitalize() tokens.insert(0, "set") name = "".join(tokens) if cg_context.indexed_property_getter and not name: name = "AnonymousIndexedGetter" if cg_context.indexed_property_setter and not name: name = "AnonymousIndexedSetter" if cg_context.named_property_getter and not name: name = "AnonymousNamedGetter" if cg_context.named_property_setter and not name: name = "AnonymousNamedSetter" if cg_context.named_property_deleter and not name: name = "AnonymousNamedDeleter" return name def callback_function_name(cg_context, overload_index=None, argument_count=None, for_cross_origin=False): """ Args: cg_context: A CodeGenContext of the target IDL construct. overload_index: An overload index if the target is an overloaded IDL operation. argument_count: When the target is an IDL operation that has optional arguments and is annotated with [NoAllocDirectCall], the value is the number of arguments that V8 passes in (excluding the fixed arguments like the receiver object and the v8::FastApiCallbackOptions.) for_cross_origin: True if the target is the cross origin accessible version. """ assert isinstance(cg_context, CodeGenContext) assert overload_index is None or isinstance(overload_index, int) assert argument_count is None or isinstance(argument_count, int) assert isinstance(for_cross_origin, bool) def _cxx_name(name): """ Returns a property name that the bindings generator can use in generated code. Note that Web IDL allows '-' (hyphen-minus) and '_' (low line) in identifiers but C++ does not allow or recommend them. This function encodes these characters. """ # In Python3, we can use str.maketrans and str.translate. # # We're optimistic about name conflict. It's highly unlikely that # these replacements will cause a conflict. assert "Dec45" not in name assert "Dec95" not in name name = name.replace("-", "Dec45") name = name.replace("_", "Dec95") return name if cg_context.constant: property_name = cg_context.property_.identifier else: property_name = _cxx_name(cg_context.property_.identifier) if cg_context.attribute_get: kind = "AttributeGet" elif cg_context.attribute_set: kind = "AttributeSet" elif cg_context.constant: kind = "Constant" elif cg_context.constructor_group: if cg_context.is_named_constructor: kind = "NamedConstructor" else: property_name = "" kind = "Constructor" elif cg_context.exposed_construct: if cg_context.is_named_constructor: kind = "NamedConstructorProperty" elif cg_context.legacy_window_alias: kind = "LegacyWindowAlias" else: kind = "ExposedConstruct" elif cg_context.operation_group: kind = "Operation" elif cg_context.stringifier: kind = "Operation" if cg_context.no_alloc_direct_call: nadc = "NoAllocDirectCall" elif cg_context.no_alloc_direct_call_for_testing: nadc = "NoAllocDirectCallForTesting" else: nadc = "" overload = "" if overload_index is not None and (len(cg_context.constructor_group or cg_context.operation_group) > 1): overload += "Overload{}".format(overload_index + 1) if argument_count is not None: overload += "Arg{}".format(argument_count) if for_cross_origin: suffix = "CrossOrigin" elif nadc or overload: suffix = nadc + overload else: suffix = "Callback" if cg_context.for_world == CodeGenContext.MAIN_WORLD: world_suffix = "ForMainWorld" elif cg_context.for_world == CodeGenContext.NON_MAIN_WORLDS: world_suffix = "ForNonMainWorlds" elif cg_context.for_world == CodeGenContext.ALL_WORLDS: world_suffix = "" return name_style.func(property_name, kind, suffix, world_suffix) def constant_name(cg_context): assert isinstance(cg_context, CodeGenContext) assert cg_context.constant property_name = cg_context.property_.identifier.lower() return name_style.constant(property_name) def custom_function_name(cg_context): assert isinstance(cg_context, CodeGenContext) if cg_context.named_property_getter: return "NamedPropertyGetterCustom" if cg_context.named_property_setter: return "NamedPropertySetterCustom" if cg_context.named_property_deleter: return "NamedPropertyDeleterCustom" if cg_context.attribute_get: suffix = "AttributeGetterCustom" elif cg_context.attribute_set: suffix = "AttributeSetterCustom" elif cg_context.operation_group: suffix = "MethodCustom" else: assert False return name_style.func(cg_context.property_.identifier, suffix) # ---------------------------------------------------------------------------- # Callback functions # ---------------------------------------------------------------------------- def bind_blink_api_arguments(code_node, cg_context): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) if cg_context.attribute_get: return if cg_context.attribute_set: real_type = cg_context.attribute.idl_type.unwrap(typedef=True) if real_type.is_enumeration: pattern = """\ // https://heycam.github.io/webidl/#dfn-attribute-setter // step 4.6.1. Let S be ? ToString(V). const auto&& arg1_value_string = NativeValueTraits<IDLStringV2>::NativeValue( ${isolate}, ${v8_property_value}, ${exception_state}); if (${exception_state}.HadException()) return; // step 4.6.2. If S is not one of the enumeration's values, then return // undefined. const auto arg1_value_maybe_enum = {enum_type}::Create(arg1_value_string); if (!arg1_value_maybe_enum) {{ bindings::ReportInvalidEnumSetToAttribute( ${isolate}, arg1_value_string, "{enum_type_name}", ${exception_state}); return; // Return undefined. }} const auto ${arg1_value} = arg1_value_maybe_enum.value(); """ text = _format(pattern, enum_type=blink_class_name( real_type.type_definition_object), enum_type_name=real_type.identifier) code_node.register_code_symbol(SymbolNode("arg1_value", text)) return name = "arg1_value" v8_value = "${v8_property_value}" code_node.register_code_symbol( make_v8_to_blink_value(name, v8_value, cg_context.attribute.idl_type)) return for argument in cg_context.function_like.arguments: name = name_style.arg_f("arg{}_{}", argument.index + 1, argument.identifier) if argument.is_variadic: code_node.register_code_symbol( make_v8_to_blink_value_variadic(name, "${info}", argument.index, argument.idl_type)) else: v8_value = "${{info}}[{}]".format(argument.index) code_node.register_code_symbol( make_v8_to_blink_value(name, v8_value, argument.idl_type, argument=argument, cg_context=cg_context)) def bind_callback_local_vars(code_node, cg_context): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) S = SymbolNode T = TextNode F = FormatNode local_vars = [] template_vars = {} local_vars.extend([ S("blink_property_name", ("const AtomicString& ${blink_property_name} = " "ToCoreAtomicString(${v8_property_name}.As<v8::String>());")), S("class_like_name", ("const char* const ${class_like_name} = " "\"${class_like.identifier}\";")), S("current_context", ("v8::Local<v8::Context> ${current_context} = " "${isolate}->GetCurrentContext();")), S("current_script_state", ("ScriptState* ${current_script_state} = " "ScriptState::From(${current_context});")), S("isolate", "v8::Isolate* ${isolate} = ${info}.GetIsolate();"), S("non_undefined_argument_length", ("const int ${non_undefined_argument_length} = " "bindings::NonUndefinedArgumentLength(${info});")), S("per_context_data", ("V8PerContextData* ${per_context_data} = " "${script_state}->PerContextData();")), S("per_isolate_data", ("V8PerIsolateData* ${per_isolate_data} = " "V8PerIsolateData::From(${isolate});")), S("property_name", "const char* const ${property_name} = \"${property.identifier}\";"), S("receiver_context", ("v8::Local<v8::Context> ${receiver_context} = " "${v8_receiver}->CreationContext();")), S("receiver_script_state", ("ScriptState* ${receiver_script_state} = " "ScriptState::From(${receiver_context});")), ]) is_receiver_context = not ( (cg_context.member_like and cg_context.member_like.is_static) or cg_context.constructor) # creation_context pattern = "const v8::Local<v8::Context>& ${creation_context} = {_1};" _1 = "${receiver_context}" if is_receiver_context else "${current_context}" local_vars.append(S("creation_context", _format(pattern, _1=_1))) # creation_context_object text = ("${v8_receiver}" if is_receiver_context else "${current_context}->Global()") template_vars["creation_context_object"] = T(text) # script_state pattern = "ScriptState* ${script_state} = {_1};" _1 = ("${receiver_script_state}" if is_receiver_context else "${current_script_state}") local_vars.append(S("script_state", _format(pattern, _1=_1))) # execution_context pattern = "ExecutionContext* ${execution_context} = {_1};" _1 = ("${receiver_execution_context}" if is_receiver_context else "${current_execution_context}") local_vars.append(S("execution_context", _format(pattern, _1=_1))) node = S("current_execution_context", ("ExecutionContext* ${current_execution_context} = " "ExecutionContext::From(${current_context});")) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/execution_context/execution_context.h" ])) local_vars.append(node) node = S("receiver_execution_context", ("ExecutionContext* ${receiver_execution_context} = " "ExecutionContext::From(${receiver_context});")) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/execution_context/execution_context.h" ])) local_vars.append(node) # execution_context_of_document_tree pattern = "ExecutionContext* ${execution_context_of_document_tree} = {_1};" if is_receiver_context: _1 = "bindings::ExecutionContextFromV8Wrappable(${blink_receiver})" else: _1 = "${current_execution_context}" text = _format(pattern, _1=_1) local_vars.append(S("execution_context_of_document_tree", text)) # exception_state_context_type pattern = ( "const ExceptionState::ContextType ${exception_state_context_type} = " "{_1};") if cg_context.attribute_get: _1 = "ExceptionState::kGetterContext" elif cg_context.attribute_set: _1 = "ExceptionState::kSetterContext" elif cg_context.constructor_group: _1 = "ExceptionState::kConstructionContext" elif cg_context.indexed_property_getter: _1 = "ExceptionState::kIndexedGetterContext" elif cg_context.indexed_property_setter: _1 = "ExceptionState::kIndexedSetterContext" elif cg_context.named_property_getter: _1 = "ExceptionState::kNamedGetterContext" elif cg_context.named_property_setter: _1 = "ExceptionState::kNamedSetterContext" elif cg_context.named_property_deleter: _1 = "ExceptionState::kNamedDeletionContext" else: _1 = "ExceptionState::kExecutionContext" local_vars.append( S("exception_state_context_type", _format(pattern, _1=_1))) # exception_state def create_exception_state(symbol_node): node = SymbolDefinitionNode(symbol_node) pattern = ("{exception_state_type} ${exception_state}({init_args});" "{exception_to_reject_promise}") exception_state_type = "ExceptionState" init_args = ["${isolate}", "${exception_state_context_type}"] exception_to_reject_promise = "" if (cg_context.no_alloc_direct_call or cg_context.no_alloc_direct_call_for_testing): exception_state_type = "NoAllocDirectCallExceptionState" init_args.insert(0, "${blink_receiver}") node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/platform/bindings/no_alloc_direct_call_exception_state.h" ])) if cg_context.is_named_constructor: init_args.append("\"{}\"".format(cg_context.property_.identifier)) else: init_args.append("${class_like_name}") if (cg_context.property_ and cg_context.property_.identifier and not cg_context.constructor_group): init_args.append("${property_name}") if cg_context.is_return_type_promise_type: exception_to_reject_promise = ( "\n" "ExceptionToRejectPromiseScope reject_promise_scope" "(${info}, ${exception_state});") node.append( F(pattern, exception_state_type=exception_state_type, init_args=", ".join(init_args), exception_to_reject_promise=exception_to_reject_promise)) return node local_vars.append( S("exception_state", definition_constructor=create_exception_state)) # blink_receiver if cg_context.class_like.identifier == "Window": # TODO(yukishiino): Window interface should be # [ImplementedAs=LocalDOMWindow] instead of [ImplementedAs=DOMWindow], # and [CrossOrigin] properties should be implemented specifically with # DOMWindow class. Then, we'll have less hacks. if (not cg_context.member_like or "CrossOrigin" in cg_context.member_like.extended_attributes): text = ("DOMWindow* ${blink_receiver} = " "${class_name}::ToWrappableUnsafe(${v8_receiver});") else: text = ("LocalDOMWindow* ${blink_receiver} = To<LocalDOMWindow>(" "${class_name}::ToWrappableUnsafe(${v8_receiver}));") else: pattern = ("{_1}* ${blink_receiver} = " "${class_name}::ToWrappableUnsafe(${v8_receiver});") _1 = blink_class_name(cg_context.class_like) text = _format(pattern, _1=_1) local_vars.append(S("blink_receiver", text)) # v8_property_value if cg_context.v8_callback_type == CodeGenContext.V8_FUNCTION_CALLBACK: # In case of V8_ACCESSOR_NAME_SETTER_CALLBACK, |v8_property_value| is # defined as an argument. In case of V8_FUNCTION_CALLBACK (of IDL # attribute set function), |info[0]| is the value to be set. local_vars.append( S("v8_property_value", "v8::Local<v8::Value> ${v8_property_value} = ${info}[0];")) # v8_receiver if cg_context.v8_callback_type == CodeGenContext.V8_FUNCTION_CALLBACK: # In case of v8::FunctionCallbackInfo, This() is the receiver object. local_vars.append( S("v8_receiver", "v8::Local<v8::Object> ${v8_receiver} = ${info}.This();")) else: # In case of v8::PropertyCallbackInfo, Holder() is the object that has # the property being processed. local_vars.append( S("v8_receiver", "v8::Local<v8::Object> ${v8_receiver} = ${info}.Holder();")) # v8_return_value def create_v8_return_value(symbol_node): return SymbolDefinitionNode( symbol_node, [ T("v8::Local<v8::Value> ${v8_return_value};"), CxxUnlikelyIfNode( # cond=F( "!ToV8Traits<{}>::ToV8" "(${script_state}, ${return_value})" ".ToLocal(&${v8_return_value})", native_value_tag(cg_context.return_type)), body=T("return;")), ]) local_vars.append( S("v8_return_value", definition_constructor=create_v8_return_value)) # throw_security_error template_vars["throw_security_error"] = T( "BindingSecurity::FailedAccessCheckFor(" "${info}.GetIsolate(), " "${class_name}::GetWrapperTypeInfo(), " "${info}.Holder());") code_node.add_template_vars(template_vars) # Allow implementation-specific symbol definitions to have priority. for symbol_node in local_vars: if symbol_node.name not in code_node.own_template_vars: code_node.register_code_symbol(symbol_node) def _make_reflect_content_attribute_key(code_node, cg_context): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) name = (cg_context.attribute.extended_attributes.value_of("Reflect") or cg_context.attribute.identifier.lower()) if cg_context.attribute_get and name in ("class", "id", "name"): return None if cg_context.class_like.identifier.startswith("SVG"): namespace = "svg_names" code_node.accumulate( CodeGenAccumulator.require_include_headers( ["third_party/blink/renderer/core/svg_names.h"])) else: namespace = "html_names" code_node.accumulate( CodeGenAccumulator.require_include_headers( ["third_party/blink/renderer/core/html_names.h"])) return "{}::{}".format(namespace, name_style.constant(name, "attr")) def _make_reflect_accessor_func_name(cg_context): assert isinstance(cg_context, CodeGenContext) assert cg_context.attribute_get or cg_context.attribute_set if cg_context.attribute_get: name = (cg_context.attribute.extended_attributes.value_of("Reflect") or cg_context.attribute.identifier.lower()) if name in ("class", "id", "name"): return name_style.func("get", name, "attribute") if "URL" in cg_context.attribute.extended_attributes: return "GetURLAttribute" FAST_ACCESSORS = { "boolean": ("FastHasAttribute", "SetBooleanAttribute"), "long": ("GetIntegralAttribute", "SetIntegralAttribute"), "unsigned long": ("GetUnsignedIntegralAttribute", "SetUnsignedIntegralAttribute"), } idl_type = cg_context.attribute.idl_type.unwrap() accessors = FAST_ACCESSORS.get(idl_type.keyword_typename) if accessors: return accessors[0 if cg_context.attribute_get else 1] if (idl_type.is_interface and idl_type.type_definition_object.does_implement("Element")): if cg_context.attribute_get: return "GetElementAttribute" else: return "SetElementAttribute" if idl_type.element_type: element_type = idl_type.element_type.unwrap() if (element_type.is_interface and element_type.type_definition_object.does_implement("Element")): if cg_context.attribute_get: return "GetElementArrayAttribute" else: return "SetElementArrayAttribute" if cg_context.attribute_get: return "FastGetAttribute" else: return "setAttribute" def _make_reflect_process_keyword_state(cg_context): # https://html.spec.whatwg.org/C/#keywords-and-enumerated-attributes assert isinstance(cg_context, CodeGenContext) assert cg_context.attribute_get or cg_context.attribute_set T = TextNode F = FormatNode if not cg_context.attribute_get: return None ext_attrs = cg_context.attribute.extended_attributes keywords = ext_attrs.values_of("ReflectOnly") missing_default = ext_attrs.value_of("ReflectMissing") empty_default = ext_attrs.value_of("ReflectEmpty") invalid_default = ext_attrs.value_of("ReflectInvalid") def constant(keyword): if not keyword: return "g_empty_atom" return "keywords::{}".format(name_style.constant(keyword)) branches = CxxMultiBranchesNode() branches.accumulate( CodeGenAccumulator.require_include_headers( ["third_party/blink/renderer/core/keywords.h"])) nodes = [ T("// [ReflectOnly]"), T("const AtomicString reflect_value(${return_value}.LowerASCII());"), branches, ] if missing_default is not None: branches.append( cond="reflect_value.IsNull()", body=F("${return_value} = {};", constant(missing_default))) elif cg_context.return_type.unwrap(nullable=False).is_nullable: branches.append( cond="reflect_value.IsNull()", body=T("// Null string to IDL null.")) if empty_default is not None: branches.append( cond="reflect_value.IsEmpty()", body=F("${return_value} = {};", constant(empty_default))) expr = " || ".join( map(lambda keyword: "reflect_value == {}".format(constant(keyword)), keywords)) branches.append(cond=expr, body=T("${return_value} = reflect_value;")) if invalid_default is not None: branches.append( cond=True, body=F("${return_value} = {};", constant(invalid_default))) else: branches.append( cond=True, body=F("${return_value} = {};", constant(""))) return SequenceNode(nodes) def _make_blink_api_call(code_node, cg_context, num_of_args=None, overriding_args=None): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) assert num_of_args is None or isinstance(num_of_args, int) assert (overriding_args is None or (isinstance(overriding_args, (list, tuple)) and all(isinstance(arg, str) for arg in overriding_args))) arguments = [] ext_attrs = cg_context.member_like.extended_attributes values = ext_attrs.values_of("CallWith") + ( ext_attrs.values_of("GetterCallWith") if cg_context.attribute_get else ext_attrs.values_of("SetterCallWith") if cg_context.attribute_set else ()) if "Isolate" in values: arguments.append("${isolate}") if "ScriptState" in values: arguments.append("${script_state}") if "ExecutionContext" in values: arguments.append("${execution_context}") if "Document" in values: arguments.append( "*bindings::ToDocumentFromExecutionContext(${execution_context})") if "ThisValue" in values: arguments.append("ScriptValue(${isolate}, ${v8_receiver})") code_generator_info = cg_context.member_like.code_generator_info is_partial = code_generator_info.defined_in_partial if (is_partial and not (cg_context.constructor or cg_context.member_like.is_static)): arguments.append("*${blink_receiver}") if "Reflect" in ext_attrs: # [Reflect] key = _make_reflect_content_attribute_key(code_node, cg_context) if key: arguments.append(key) if overriding_args is not None: arguments.extend(overriding_args) elif cg_context.attribute_get: pass elif cg_context.attribute_set: arguments.append("${arg1_value}") else: for index, argument in enumerate(cg_context.function_like.arguments): if num_of_args is not None and index == num_of_args: break name = name_style.arg_f("arg{}_{}", index + 1, argument.identifier) arguments.append(_format("${{{}}}", name)) if cg_context.is_return_by_argument: arguments.append("${return_value}") if cg_context.may_throw_exception: arguments.append("${exception_state}") func_name = backward_compatible_api_func(cg_context) if cg_context.constructor: if cg_context.is_named_constructor: func_name = "CreateForJSConstructor" else: func_name = "Create" if "Reflect" in ext_attrs: # [Reflect] func_name = _make_reflect_accessor_func_name(cg_context) if (cg_context.constructor or cg_context.member_like.is_static or is_partial): class_like = cg_context.member_like.owner_mixin or cg_context.class_like class_name = (code_generator_info.receiver_implemented_as or name_style.class_(class_like.identifier)) func_designator = "{}::{}".format(class_name, func_name) else: func_designator = _format("${blink_receiver}->{}", func_name) expr = _format("{_1}({_2})", _1=func_designator, _2=", ".join(arguments)) if cg_context.no_alloc_direct_call_for_testing: expr = "\n".join([ # GCC extension: a compound statement enclosed in parentheses "({", "ThreadState::NoAllocationScope nadc_no_allocation_scope" "(ThreadState::Current());", "v8::Isolate::DisallowJavascriptExecutionScope " "nadc_disallow_js_exec_scope" "(${isolate}, " "v8::Isolate::DisallowJavascriptExecutionScope::CRASH_ON_FAILURE);", "blink::NoAllocDirectCallScope nadc_nadc_scope" "(${blink_receiver}, &${v8_fast_api_callback_options});", _format("{};", expr), "})", ]) return expr def bind_return_value(code_node, cg_context, overriding_args=None): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) assert (overriding_args is None or (isinstance(overriding_args, (list, tuple)) and all(isinstance(arg, str) for arg in overriding_args))) T = TextNode F = FormatNode def create_definition(symbol_node): api_calls = [] # Pairs of (num_of_args, api_call_text) if overriding_args is None: arguments = (cg_context.function_like.arguments if cg_context.function_like else []) for index, arg in enumerate(arguments): if arg.is_optional and not arg.default_value: api_calls.append((index, _make_blink_api_call( code_node, cg_context, index))) api_calls.append((None, _make_blink_api_call( code_node, cg_context))) else: api_calls.append((None, _make_blink_api_call( code_node, cg_context, overriding_args=overriding_args))) nodes = [] is_return_type_void = ((not cg_context.return_type or cg_context.return_type.unwrap().is_void) and not cg_context.does_override_idl_return_type) if not (is_return_type_void or cg_context.does_override_idl_return_type): return_type = blink_type_info(cg_context.return_type).value_t if len(api_calls) == 1: _, api_call = api_calls[0] if is_return_type_void: nodes.append(F("{};", api_call)) elif cg_context.is_return_by_argument: nodes.append(F("{} ${return_value};", return_type)) nodes.append(F("{};", api_call)) elif "ReflectOnly" in cg_context.member_like.extended_attributes: # [ReflectOnly] nodes.append(F("auto ${return_value} = {};", api_call)) else: nodes.append(F("auto&& ${return_value} = {};", api_call)) else: branches = SequenceNode() for index, api_call in api_calls: if is_return_type_void or cg_context.is_return_by_argument: assignment = "{};".format(api_call) else: assignment = _format("${return_value} = {};", api_call) if index is not None: branches.append( CxxLikelyIfNode( cond=_format( "${non_undefined_argument_length} <= {}", index), body=[ T(assignment), T("break;"), ])) else: branches.append(T(assignment)) if not is_return_type_void: nodes.append(F("{} ${return_value};", return_type)) nodes.append(CxxBreakableBlockNode(branches)) if cg_context.may_throw_exception: nodes.append( CxxUnlikelyIfNode( cond="${exception_state}.HadException()", body=T("return;"))) if "ReflectOnly" in cg_context.member_like.extended_attributes: # [ReflectOnly] node = _make_reflect_process_keyword_state(cg_context) if node: nodes.append(EmptyNode()) nodes.append(node) return SymbolDefinitionNode(symbol_node, nodes) code_node.register_code_symbol( SymbolNode("return_value", definition_constructor=create_definition)) def make_bindings_trace_event(cg_context): assert isinstance(cg_context, CodeGenContext) event_name = "{}.{}".format(cg_context.class_like.identifier, cg_context.property_.identifier) if cg_context.attribute_get: event_name = "{}.{}".format(event_name, "get") elif cg_context.attribute_set: event_name = "{}.{}".format(event_name, "set") elif cg_context.constructor_group and not cg_context.is_named_constructor: event_name = "{}.{}".format(cg_context.class_like.identifier, "constructor") return TextNode("BLINK_BINDINGS_TRACE_EVENT(\"{}\");".format(event_name)) def make_check_argument_length(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode F = FormatNode if cg_context.v8_callback_type != CodeGenContext.V8_FUNCTION_CALLBACK: return None if cg_context.attribute_get: num_of_required_args = 0 elif cg_context.attribute_set: idl_type = cg_context.attribute.idl_type if not (idl_type.does_include_nullable_or_dict or idl_type.unwrap().is_any or "LegacyTreatNonObjectAsNull" in idl_type.unwrap().extended_attributes or "PutForwards" in cg_context.attribute.extended_attributes or "Replaceable" in cg_context.attribute.extended_attributes): # ES undefined in ${v8_property_value} will cause a TypeError # anyway, so omit the check against the number of arguments. return None num_of_required_args = 1 elif cg_context.function_like: num_of_required_args = ( cg_context.function_like.num_of_required_arguments) elif isinstance(cg_context.property_, web_idl.OverloadGroup): num_of_required_args = ( cg_context.property_.min_num_of_required_arguments) else: assert False if num_of_required_args == 0: return None return CxxUnlikelyIfNode( cond=_format("UNLIKELY(${info}.Length() < {})", num_of_required_args), body=[ F(("${exception_state}.ThrowTypeError(" "ExceptionMessages::NotEnoughArguments" "({}, ${info}.Length()));"), num_of_required_args), T("return;"), ]) def make_check_constructor_call(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode node = SequenceNode([ CxxUnlikelyIfNode( cond="!${info}.IsConstructCall()", body=T("${exception_state}.ThrowTypeError(" "ExceptionMessages::ConstructorCalledAsFunction());\n" "return;")), ]) if not cg_context.is_named_constructor: node.append( CxxLikelyIfNode( cond=("ConstructorMode::Current(${isolate}) == " "ConstructorMode::kWrapExistingObject"), body=T("bindings::V8SetReturnValue(${info}, ${v8_receiver});\n" "return;"))) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/platform/bindings/v8_object_constructor.h" ])) return node def make_check_receiver(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode if cg_context.member_like.is_static: return None if (cg_context.attribute and "LegacyLenientThis" in cg_context.attribute.extended_attributes): return SequenceNode([ T("// [LegacyLenientThis]"), CxxUnlikelyIfNode( cond="!${class_name}::HasInstance(${isolate}, ${v8_receiver})", body=T("return;")), ]) if cg_context.is_return_type_promise_type: return SequenceNode([ T("// Promise returning function: " "Convert a TypeError to a reject promise."), CxxUnlikelyIfNode( cond="!${class_name}::HasInstance(${isolate}, ${v8_receiver})", body=[ T("${exception_state}.ThrowTypeError(" "\"Illegal invocation\");"), T("return;"), ]) ]) return None def make_check_security_of_return_value(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode check_security = cg_context.member_like.extended_attributes.value_of( "CheckSecurity") if check_security != "ReturnValue": return None web_feature = _format( "WebFeature::{}", name_style.constant("CrossOrigin", cg_context.class_like.identifier, cg_context.property_.identifier)) use_counter = _format( "UseCounter::Count(${current_execution_context}, {});", web_feature) cond = T("!BindingSecurity::ShouldAllowAccessTo(" "ToLocalDOMWindow(${current_context}), ${return_value}, " "BindingSecurity::ErrorReportOption::kDoNotReport)") body = [ T(use_counter), T("bindings::V8SetReturnValue(${info}, nullptr);\n" "return;"), ] node = SequenceNode([ T("// [CheckSecurity=ReturnValue]"), CxxUnlikelyIfNode(cond=cond, body=body), ]) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/bindings/core/v8/binding_security.h", "third_party/blink/renderer/core/frame/web_feature.h", "third_party/blink/renderer/platform/instrumentation/use_counter.h", ])) return node def make_cooperative_scheduling_safepoint(cg_context): assert isinstance(cg_context, CodeGenContext) node = TextNode("scheduler::CooperativeSchedulingManager::Instance()" "->Safepoint();") node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/platform/scheduler/public/cooperative_scheduling_manager.h" ])) return node def make_log_activity(cg_context): assert isinstance(cg_context, CodeGenContext) target = cg_context.member_like or cg_context.property_ ext_attrs = target.extended_attributes if "LogActivity" not in ext_attrs: return None target = ext_attrs.value_of("LogActivity") if target: assert target in ("GetterOnly", "SetterOnly") if ((target == "GetterOnly" and not cg_context.attribute_get) or (target == "SetterOnly" and not cg_context.attribute_set)): return None if (cg_context.for_world == cg_context.MAIN_WORLD and "LogAllWorlds" not in ext_attrs): return None pattern = "{_1}${per_context_data} && ${per_context_data}->ActivityLogger()" _1 = "" if (cg_context.attribute and "PerWorldBindings" not in ext_attrs and "LogAllWorlds" not in ext_attrs): _1 = "${script_state}->World().IsIsolatedWorld() && " cond = _format(pattern, _1=_1) pattern = "${per_context_data}->ActivityLogger()->{_1}(\"{_2}.{_3}\"{_4});" _2 = cg_context.class_like.identifier _3 = cg_context.property_.identifier if cg_context.attribute_get: _1 = "LogGetter" _4 = "" elif cg_context.attribute_set: _1 = "LogSetter" _4 = ", ${v8_property_value}" elif cg_context.operation_group: _1 = "LogMethod" _4 = ", ${info}" body = _format(pattern, _1=_1, _2=_2, _3=_3, _4=_4) pattern = ("// [LogActivity], [LogAllWorlds]\n" "if ({_1}) {{ {_2} }}") node = TextNode(_format(pattern, _1=cond, _2=body)) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/platform/bindings/v8_dom_activity_logger.h", "third_party/blink/renderer/platform/bindings/v8_per_context_data.h", ])) return node def _make_overload_dispatcher_per_arg_size(cg_context, items): """ https://heycam.github.io/webidl/#dfn-overload-resolution-algorithm Args: items: Partial list of an "effective overload set" with the same type list size. Returns: A pair of a resulting CodeNode and a boolean flag that is True if there exists a case that overload resolution will fail, i.e. a bailout that throws a TypeError is necessary. """ assert isinstance(cg_context, CodeGenContext) assert isinstance(items, (list, tuple)) assert all( isinstance(item, web_idl.OverloadGroup.EffectiveOverloadItem) for item in items) # Variables shared with nested functions if len(items) > 1: arg_index = web_idl.OverloadGroup.distinguishing_argument_index(items) else: arg_index = None func_like = None dispatcher_nodes = SequenceNode() # True if there exists a case that overload resolution will fail. can_fail = True def find_test(item, test): # |test| is a callable that takes (t, u) where: # t = the idl_type (in the original form) # u = the unwrapped version of t idl_type = item.type_list[arg_index] t = idl_type u = idl_type.unwrap() return test(t, u) or (u.is_union and any( [test(m, m.unwrap()) for m in u.flattened_member_types])) def find(test): for item in items: if find_test(item, test): return item.function_like return None def find_all_interfaces(): result = [] # [(func_like, idl_type), ...] for item in items: idl_type = item.type_list[arg_index].unwrap() if idl_type.is_interface: result.append((item.function_like, idl_type)) if idl_type.is_union: for member_type in idl_type.flattened_member_types: if member_type.unwrap().is_interface: result.append((item.function_like, member_type.unwrap())) return result def make_node(pattern): value = _format("${info}[{}]", arg_index) func_name = callback_function_name( cg_context, overload_index=func_like.overload_index) return TextNode(_format(pattern, value=value, func_name=func_name)) def dispatch_if(expr): if expr is True: pattern = "return {func_name}(${info});" else: pattern = ("if (" + expr + ") {{\n" " return {func_name}(${info});\n" "}}") node = make_node(pattern) conditional = expr_from_exposure(func_like.exposure) if not conditional.is_always_true: node = CxxUnlikelyIfNode(cond=conditional, body=node) dispatcher_nodes.append(node) return expr is True and conditional.is_always_true if len(items) == 1: func_like = items[0].function_like can_fail = False return make_node("return {func_name}(${info});"), can_fail # 12.2. If V is undefined, ... func_like = find(lambda t, u: t.is_optional) if func_like: dispatch_if("{value}->IsUndefined()") # 12.3. if V is null or undefined, ... func_like = find(lambda t, u: t.does_include_nullable_or_dict) if func_like: dispatch_if("{value}->IsNullOrUndefined()") # 12.4. if V is a platform object, ... def inheritance_length(func_and_type): return (len(func_and_type[1].type_definition_object. inclusive_inherited_interfaces), func_and_type[1].type_definition_object.identifier) # Attempt to match from most derived to least derived. for func_like, idl_type in sorted( find_all_interfaces(), key=inheritance_length, reverse=True): v8_bridge_name = v8_bridge_class_name( idl_type.unwrap().type_definition_object) dispatch_if( _format("{}::HasInstance(${isolate}, {value})", v8_bridge_name)) # V8 specific optimization: BufferSource = ArrayBufferView or ArrayBuffer is_typedef_name = lambda t, name: t.is_typedef and t.identifier == name func_like = find( lambda t, u: is_typedef_name(t.unwrap(typedef=False), "BufferSource")) if func_like: dispatch_if("{value}->IsArrayBufferView() || " "{value}->IsArrayBuffer() || " "{value}->IsSharedArrayBuffer()") else: # 12.5. if Type(V) is Object, V has an [[ArrayBufferData]] internal # slot, ... func_like = find(lambda t, u: u.is_array_buffer) if func_like: dispatch_if("{value}->IsArrayBuffer() || " "{value}->IsSharedArrayBuffer()") # V8 specific optimization: ArrayBufferView func_like = find(lambda t, u: u.is_array_buffer_view) if func_like: dispatch_if("{value}->IsArrayBufferView()") # 12.6. if Type(V) is Object, V has a [[DataView]] internal slot, ... func_like = find(lambda t, u: u.is_data_view) if func_like: dispatch_if("{value}->IsDataView()") # 12.7. if Type(V) is Object, V has a [[TypedArrayName]] internal slot, ... typed_array_types = ("Int8Array", "Int16Array", "Int32Array", "Uint8Array", "Uint16Array", "Uint32Array", "Uint8ClampedArray", "Float32Array", "Float64Array") for typed_array_type in typed_array_types: func_like = find(lambda t, u: u.keyword_typename == typed_array_type) if func_like: dispatch_if(_format("{value}->Is{}()", typed_array_type)) # 12.8. if IsCallable(V) is true, ... func_like = find(lambda t, u: u.is_callback_function) if func_like: dispatch_if("{value}->IsFunction()") # 12.9. if Type(V) is Object and ... @@iterator ... func_like = find(lambda t, u: u.is_sequence or u.is_frozen_array) if func_like: dispatch_if("{value}->IsArray() || " # Excessive optimization "bindings::IsEsIterableObject" "(${isolate}, {value}, ${exception_state})") dispatcher_nodes.append( CxxUnlikelyIfNode(cond="${exception_state}.HadException()", body=TextNode("return;"))) # 12.10. if Type(V) is Object and ... func_like = find(lambda t, u: u.is_callback_interface or u.is_dictionary or u.is_record or u.is_object) if func_like: dispatch_if("{value}->IsObject()") # 12.11. if Type(V) is Boolean and ... func_like = find(lambda t, u: u.is_boolean) if func_like: dispatch_if("{value}->IsBoolean()") # 12.12. if Type(V) is Number and ... func_like = find(lambda t, u: u.is_numeric) if func_like: dispatch_if("{value}->IsNumber()") # 12.13. if there is an entry in S that has ... a string type ... # 12.14. if there is an entry in S that has ... a numeric type ... # 12.15. if there is an entry in S that has ... boolean ... # 12.16. if there is an entry in S that has any ... func_likes = [ find(lambda t, u: u.is_enumeration), find(lambda t, u: u.is_string), find(lambda t, u: u.is_numeric), find(lambda t, u: u.is_boolean), find(lambda t, u: u.is_any), ] for func_like in func_likes: if func_like: if dispatch_if(True): can_fail = False break return dispatcher_nodes, can_fail def make_overload_dispatcher(cg_context): # https://heycam.github.io/webidl/#dfn-overload-resolution-algorithm assert isinstance(cg_context, CodeGenContext) T = TextNode F = FormatNode overload_group = cg_context.property_ items = overload_group.effective_overload_set() args_size = lambda item: len(item.type_list) items_grouped_by_arg_size = itertools.groupby( sorted(items, key=args_size, reverse=True), key=args_size) # TODO(yukishiino): Runtime-enabled features should be taken into account # when calculating the max argument size. max_arg_size = max(map(args_size, items)) arg_count_def = F("const int arg_count = std::min(${info}.Length(), {});", max_arg_size) branches = SequenceNode() did_use_break = False for arg_size, items in items_grouped_by_arg_size: items = list(items) node, can_fail = _make_overload_dispatcher_per_arg_size( cg_context, items) if arg_size > 0: node = CxxLikelyIfNode( cond="arg_count == {}".format(arg_size), body=[node, T("break;") if can_fail else None]) did_use_break = did_use_break or can_fail conditional = expr_or( list( map( lambda item: expr_from_exposure(item.function_like.exposure ), items))) if not conditional.is_always_true: node = CxxUnlikelyIfNode(cond=conditional, body=node) branches.append(node) if did_use_break: branches = CxxBreakableBlockNode(branches) branches = SequenceNode([ arg_count_def, branches, ]) if not did_use_break and arg_size == 0 and conditional.is_always_true: return branches return SequenceNode([ branches, EmptyNode(), make_check_argument_length(cg_context), T("${exception_state}.ThrowTypeError" "(\"Overload resolution failed.\");\n" "return;"), ]) def make_report_coop_access(cg_context): assert isinstance(cg_context, CodeGenContext) if cg_context.class_like.identifier != "Window": return None ext_attrs = cg_context.member_like.extended_attributes if "CrossOrigin" not in ext_attrs: return None values = ext_attrs.values_of("CrossOrigin") if (cg_context.attribute_get and not (not values or "Getter" in values)): return None elif (cg_context.attribute_set and not ("Setter" in values)): return None return TextNode("${blink_receiver}->ReportCoopAccess(${property_name});") def make_report_deprecate_as(cg_context): assert isinstance(cg_context, CodeGenContext) target = cg_context.member_like or cg_context.property_ name = target.extended_attributes.value_of("DeprecateAs") if not name: return None pattern = ("// [DeprecateAs]\n" "Deprecation::CountDeprecation(" "${current_execution_context}, WebFeature::k{_1});") _1 = name node = TextNode(_format(pattern, _1=_1)) node.accumulate( CodeGenAccumulator.require_include_headers( ["third_party/blink/renderer/core/frame/deprecation.h"])) return node def _make_measure_web_feature_constant(cg_context): assert isinstance(cg_context, CodeGenContext) target = cg_context.member_like or cg_context.property_ ext_attrs = target.extended_attributes suffix = "" if cg_context.attribute_get: suffix = "_AttributeGetter" elif cg_context.attribute_set: suffix = "_AttributeSetter" elif cg_context.constructor: suffix = "_Constructor" elif cg_context.exposed_construct: suffix = "_ConstructorGetter" elif cg_context.operation: suffix = "_Method" name = ext_attrs.value_of("MeasureAs") or ext_attrs.value_of("Measure") if name: name = "k{}".format(name) elif cg_context.constructor: name = "kV8{}{}".format(cg_context.class_like.identifier, suffix) else: name = "kV8{}_{}{}".format( cg_context.class_like.identifier, name_style.raw.upper_camel_case(cg_context.property_.identifier), suffix) return "WebFeature::{}".format(name) def make_report_high_entropy(cg_context): assert isinstance(cg_context, CodeGenContext) target = cg_context.member_like or cg_context.property_ ext_attrs = target.extended_attributes if cg_context.attribute_set or "HighEntropy" not in ext_attrs: return None assert "Measure" in ext_attrs or "MeasureAs" in ext_attrs, "{}: {}".format( cg_context.idl_location_and_name, "[HighEntropy] must be specified with either [Measure] or " "[MeasureAs].") if ext_attrs.value_of("HighEntropy") == "Direct": text = _format( "// [HighEntropy=Direct]\n" "Dactyloscoper::RecordDirectSurface(" "${current_execution_context}, {measure_constant}, " "${return_value});", measure_constant=_make_measure_web_feature_constant(cg_context)) else: text = _format( "// [HighEntropy]\n" "Dactyloscoper::Record(" "${current_execution_context}, {measure_constant});", measure_constant=_make_measure_web_feature_constant(cg_context)) node = TextNode(text) node.accumulate( CodeGenAccumulator.require_include_headers( ["third_party/blink/renderer/core/frame/dactyloscoper.h"])) return node def make_report_measure_as(cg_context): assert isinstance(cg_context, CodeGenContext) target = cg_context.member_like or cg_context.property_ ext_attrs = target.extended_attributes if not ("Measure" in ext_attrs or "MeasureAs" in ext_attrs): return None text = _format( "// [Measure], [MeasureAs]\n" "UseCounter::Count(${current_execution_context}, {measure_constant});", measure_constant=_make_measure_web_feature_constant(cg_context)) node = TextNode(text) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/frame/web_feature.h", "third_party/blink/renderer/platform/instrumentation/use_counter.h", ])) return node def make_return_value_cache_return_early(cg_context): assert isinstance(cg_context, CodeGenContext) pred = cg_context.member_like.extended_attributes.value_of( "CachedAttribute") if pred: return TextNode("""\ // [CachedAttribute] static const V8PrivateProperty::SymbolKey kPrivatePropertyCachedAttribute; auto&& v8_private_cached_attribute = V8PrivateProperty::GetSymbol(${isolate}, kPrivatePropertyCachedAttribute); if (!${blink_receiver}->""" + pred + """()) { v8::Local<v8::Value> v8_value; if (!v8_private_cached_attribute.GetOrUndefined(${v8_receiver}) .ToLocal(&v8_value)) { return; } if (!v8_value->IsUndefined()) { bindings::V8SetReturnValue(${info}, v8_value); return; } }""") if "SaveSameObject" in cg_context.member_like.extended_attributes: return TextNode("""\ // [SaveSameObject] static const V8PrivateProperty::SymbolKey kPrivatePropertySaveSameObject; auto&& v8_private_save_same_object = V8PrivateProperty::GetSymbol(${isolate}, kPrivatePropertySaveSameObject); { v8::Local<v8::Value> v8_value; if (!v8_private_save_same_object.GetOrUndefined(${v8_receiver}) .ToLocal(&v8_value)) { return; } if (!v8_value->IsUndefined()) { bindings::V8SetReturnValue(${info}, v8_value); return; } }""") def make_return_value_cache_update_value(cg_context): assert isinstance(cg_context, CodeGenContext) if "CachedAttribute" in cg_context.member_like.extended_attributes: return TextNode("// [CachedAttribute]\n" "v8_private_cached_attribute.Set" "(${v8_receiver}, ${info}.GetReturnValue().Get());") if "SaveSameObject" in cg_context.member_like.extended_attributes: return TextNode("// [SaveSameObject]\n" "v8_private_save_same_object.Set" "(${v8_receiver}, ${info}.GetReturnValue().Get());") def make_runtime_call_timer_scope(cg_context, overriding_name=None): assert isinstance(cg_context, CodeGenContext) assert _is_none_or_str(overriding_name) target = cg_context.member_like or cg_context.property_ suffix = "" if cg_context.attribute_get: suffix = "_Getter" elif cg_context.attribute_set: suffix = "_Setter" elif cg_context.exposed_construct: suffix = "_ConstructorGetterCallback" counter = (target and target.extended_attributes.value_of("RuntimeCallStatsCounter")) if counter: macro_name = "RUNTIME_CALL_TIMER_SCOPE" counter_name = "RuntimeCallStats::CounterId::k{}{}".format( counter, suffix) else: macro_name = "RUNTIME_CALL_TIMER_SCOPE_DISABLED_BY_DEFAULT" counter_name = "\"Blink_{}_{}{}\"".format( blink_class_name(cg_context.class_like), overriding_name or target.identifier, suffix) return TextNode( _format( "{macro_name}(${info}.GetIsolate(), {counter_name});", macro_name=macro_name, counter_name=counter_name)) def make_steps_of_ce_reactions(cg_context): assert isinstance(cg_context, CodeGenContext) assert (cg_context.attribute_set or cg_context.operation or cg_context.indexed_property_setter or cg_context.named_property_setter or cg_context.named_property_deleter) if "CEReactions" not in cg_context.member_like.extended_attributes: return None nodes = [ TextNode("// [CEReactions]"), TextNode("CEReactionsScope ce_reactions_scope;"), ] nodes[-1].accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/html/custom/ce_reactions_scope.h" ])) # CEReactions scope is not tolerant of V8 exception, so it's necessary to # invoke custom element reactions before throwing an exception. Thus, put # an ExceptionState before CEReactions scope. nodes.insert(0, WeakDependencyNode(dep_syms=["exception_state"])) return SequenceNode(nodes) def make_steps_of_put_forwards(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode return SequenceNode([ T("// [PutForwards]"), T("v8::Local<v8::Value> target;"), T("if (!${v8_receiver}->Get(${current_context}, " "V8AtomicString(${isolate}, ${property_name}))" ".ToLocal(&target)) {\n" " return;\n" "}"), CxxUnlikelyIfNode( cond="!target->IsObject()", body=[ T("${exception_state}.ThrowTypeError(" "\"The attribute value is not an object\");"), T("return;"), ]), T("bool did_set;"), T("if (!target.As<v8::Object>()->Set(${current_context}, " "V8AtomicString(${isolate}, " "\"${attribute.extended_attributes.value_of(\"PutForwards\")}\"" "), ${v8_property_value}).To(&did_set)) {{\n" " return;\n" "}}"), ]) def make_steps_of_replaceable(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode return SequenceNode([ T("// [Replaceable]"), T("bool did_create;"), T("if (!${v8_receiver}->CreateDataProperty(${current_context}, " "V8AtomicString(${isolate}, ${property_name}), " "${v8_property_value}).To(&did_create)) {\n" " return;\n" "}"), ]) def make_v8_set_return_value(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode F = FormatNode if cg_context.does_override_idl_return_type: return T("bindings::V8SetReturnValue(${info}, ${return_value});") if not cg_context.return_type or cg_context.return_type.unwrap().is_void: # Request a SymbolNode |return_value| to define itself without # rendering any text. return T("<% return_value.request_symbol_definition() %>") operation = cg_context.operation if operation and (operation.is_setter or operation.is_deleter): # Blink implementation returns in a type different from the IDL type. # Namely, IndexedPropertySetterResult, NamedPropertySetterResult, and # NamedPropertyDeleterResult are returned ignoring the operation's # return type. return T("bindings::V8SetReturnValue(${info}, ${return_value});") return_type = cg_context.return_type if return_type.is_typedef: if return_type.identifier in ("EventHandler", "OnBeforeUnloadEventHandler", "OnErrorEventHandler"): return T("bindings::V8SetReturnValue(${info}, ${return_value}, " "${isolate}, ${blink_receiver});") # [CheckSecurity=ReturnValue] # # The returned object must be wrapped in its own realm instead of the # receiver object's relevant realm or the current realm. # # [CheckSecurity=ReturnValue] is used only for 'contentDocument' attribute # and 'getSVGDocument' operation of HTML{IFrame,Frame,Object,Embed}Element # interfaces, and Window.frameElement attribute, so far. # # All the interfaces above except for Window support 'contentWindow' # attribute and that's the global object of the creation context of the # returned V8 wrapper. Window.frameElement is implemented with [Custom] # for now and there is no need to support it. # # Note that the global object has its own context and there is no need to # pass the creation context to ToV8. if (cg_context.member_like.extended_attributes.value_of("CheckSecurity") == "ReturnValue"): condition = F( "!ToV8Traits<{}>::ToV8(ToScriptState(To<LocalFrame>(" "${blink_receiver}->contentWindow()->GetFrame()), " "${script_state}->World()), ${return_value})" ".ToLocal(&v8_value)", native_value_tag(return_type)) node = CxxBlockNode([ T("// [CheckSecurity=ReturnValue]"), T("DCHECK(IsA<LocalFrame>(" "${blink_receiver}->contentWindow()->GetFrame()));"), T("v8::Local<v8::Value> v8_value;"), CxxUnlikelyIfNode(cond=condition, body=T("return;")), T("bindings::V8SetReturnValue(${info}, v8_value);"), ]) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/frame/local_frame.h", ])) return node return_type = return_type.unwrap(typedef=True) return_type_body = return_type.unwrap() PRIMITIVE_TYPE_TO_CXX_TYPE = { "boolean": "bool", "byte": "int8_t", "octet": "uint8_t", "short": "int16_t", "unsigned short": "uint16_t", "long": "int32_t", "unsigned long": "uint32_t", "long long": "int64_t", "unsigned long long": "uint64_t", "float": "float", "unrestricted float": "float", "double": "double", "unrestricted double": "double", } cxx_type = PRIMITIVE_TYPE_TO_CXX_TYPE.get( return_type_body.keyword_typename) if cxx_type: return F( "bindings::V8SetReturnValue(${info}, ${return_value}, " "bindings::V8ReturnValue::PrimitiveType<{cxx_type}>());", cxx_type=cxx_type) if return_type_body.is_string or return_type_body.is_enumeration: args = ["${info}", "${return_value}", "${isolate}"] if return_type.is_nullable: args.append("bindings::V8ReturnValue::kNullable") else: args.append("bindings::V8ReturnValue::kNonNullable") return T("bindings::V8SetReturnValue({});".format(", ".join(args))) if return_type_body.is_interface: args = ["${info}", "${return_value}"] if return_type_body.identifier == "Window": args.append("${blink_receiver}") args.append("bindings::V8ReturnValue::kMaybeCrossOriginWindow") elif cg_context.constructor or cg_context.member_like.is_static: args.append("${creation_context}") elif cg_context.for_world == cg_context.MAIN_WORLD: args.append("bindings::V8ReturnValue::kMainWorld") else: args.append("${blink_receiver}") return T("bindings::V8SetReturnValue({});".format(", ".join(args))) if return_type.is_any or return_type_body.is_object: return T("bindings::V8SetReturnValue(${info}, ${return_value});") if return_type.is_promise: return T("bindings::V8SetReturnValue" "(${info}, ${return_value}.V8Value());") return T("bindings::V8SetReturnValue(${info}, ${v8_return_value});") def _make_empty_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) if cg_context.v8_callback_type == CodeGenContext.V8_FUNCTION_CALLBACK: arg_decls = ["const v8::FunctionCallbackInfo<v8::Value>& info"] arg_names = ["info"] elif (cg_context.v8_callback_type == CodeGenContext. V8_ACCESSOR_NAME_GETTER_CALLBACK): arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] elif (cg_context.v8_callback_type == CodeGenContext. V8_ACCESSOR_NAME_SETTER_CALLBACK): arg_decls = [ "v8::Local<v8::Name> v8_property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<void>& info", ] arg_names = ["v8_property_name", "v8_property_value", "info"] elif (cg_context.v8_callback_type == CodeGenContext. V8_GENERIC_NAMED_PROPERTY_SETTER_CALLBACK): arg_decls = [ "v8::Local<v8::Name> v8_property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_value", "info"] if cg_context.no_alloc_direct_call_for_testing: arg_decls.append( "v8::FastApiCallbackOptions& v8_fast_api_callback_options") arg_names.append("v8_fast_api_callback_options") func_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type="void") func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body for arg_name in arg_names: body.add_template_var(arg_name, arg_name) bind_callback_local_vars(body, cg_context) if cg_context.attribute or cg_context.function_like: bind_blink_api_arguments(body, cg_context) bind_return_value(body, cg_context) return func_def def make_attribute_get_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body body.extend([ make_check_receiver(cg_context), EmptyNode(), make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), make_report_coop_access(cg_context), make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), ]) if "Getter" in cg_context.property_.extended_attributes.values_of( "Custom"): text = _format("${class_name}::{}(${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_def body.extend([ make_return_value_cache_return_early(cg_context), EmptyNode(), make_check_security_of_return_value(cg_context), make_v8_set_return_value(cg_context), make_report_high_entropy(cg_context), make_return_value_cache_update_value(cg_context), ]) return func_def def make_attribute_set_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) ext_attrs = cg_context.attribute.extended_attributes if cg_context.attribute.is_readonly and not any( ext_attr in ext_attrs for ext_attr in ("LegacyLenientSetter", "PutForwards", "Replaceable")): return None func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body if "LegacyLenientSetter" in ext_attrs: body.append(TextNode("// [LegacyLenientSetter]")) return func_def body.extend([ make_check_receiver(cg_context), EmptyNode(), make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), ]) if "Setter" in cg_context.property_.extended_attributes.values_of( "Custom"): text = _format("${class_name}::{}(${v8_property_value}, ${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_def # Binary size reduction hack # 1. Drop the check of argument length although this is a violation of # Web IDL. # 2. Leverage the nature of [LegacyTreatNonObjectAsNull] (ES to IDL # conversion never fails). if (cg_context.attribute.idl_type.is_typedef and (cg_context.attribute.idl_type.identifier in ( "EventHandler", "OnBeforeUnloadEventHandler", "OnErrorEventHandler"))): body.extend([ TextNode("""\ EventListener* event_handler = JSEventHandler::CreateOrNull( ${v8_property_value}, JSEventHandler::HandlerType::k${attribute.idl_type.identifier});\ """), ]) code_generator_info = cg_context.attribute.code_generator_info func_name = name_style.api_func("set", cg_context.attribute.identifier) if code_generator_info.defined_in_partial: class_name = (code_generator_info.receiver_implemented_as or name_style.class_( cg_context.attribute.owner_mixin.identifier)) text = _format( "{class_name}::{func_name}" "(*${blink_receiver}, event_handler);", class_name=class_name, func_name=func_name) else: text = _format("${blink_receiver}->{func_name}(event_handler);", func_name=func_name) body.append(TextNode(text)) return func_def # Binary size reduction hack # When the following conditions are met, the implementation is shared. # 1. The attribute is annotated with [CEReactions, Reflect] and not # annotated with other extended attributes having side effect. # 2. The interface is implementing Element. def optimize_element_cereactions_reflect(): has_cereactions = False has_reflect = False for key in ext_attrs.keys(): if key == "CEReactions": has_cereactions = True elif key == "Reflect": has_reflect = True elif key in ("Affects", "CrossOriginIsolated", "DeprecateAs", "DirectSocketEnabled", "Exposed", "LogActivity", "LogAllWorlds", "Measure", "MeasureAs", "ReflectEmpty", "ReflectInvalid", "ReflectMissing", "ReflectOnly", "RuntimeCallStatsCounter", "RuntimeEnabled", "SecureContext", "URL", "Unscopable"): pass else: return None if not (has_cereactions and has_reflect): return None if not cg_context.interface.does_implement("Element"): return None content_attribute = _make_reflect_content_attribute_key( body, cg_context) idl_type = cg_context.attribute.idl_type.unwrap(typedef=True) if idl_type.is_boolean: func_name = "PerformAttributeSetCEReactionsReflectTypeBoolean" elif idl_type.type_name == "String": func_name = "PerformAttributeSetCEReactionsReflectTypeString" elif idl_type.type_name == "StringTreatNullAs": func_name = ("PerformAttributeSetCEReactionsReflect" "TypeStringLegacyNullToEmptyString") elif idl_type.type_name == "StringOrNull": func_name = "PerformAttributeSetCEReactionsReflectTypeStringOrNull" else: return None text = _format( "bindings::{func_name}" "(${info}, {content_attribute}, " "${class_like_name}, ${property_name});", func_name=func_name, content_attribute=content_attribute) return TextNode(text) node = optimize_element_cereactions_reflect() if node: body.append(node) return func_def body.extend([ make_check_argument_length(cg_context), EmptyNode(), ]) if "PutForwards" in ext_attrs: body.append(make_steps_of_put_forwards(cg_context)) return func_def if "Replaceable" in ext_attrs: body.append(make_steps_of_replaceable(cg_context)) return func_def body.extend([ make_steps_of_ce_reactions(cg_context), EmptyNode(), make_v8_set_return_value(cg_context), ]) return func_def def make_constant_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) logging_nodes = SequenceNode([ make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), ]) if not logging_nodes: return None func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body v8_set_return_value = _format( "bindings::V8SetReturnValue(${info}, ${class_name}::Constant::{});", constant_name(cg_context)) body.extend([ make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), logging_nodes, EmptyNode(), TextNode(v8_set_return_value), make_report_high_entropy(cg_context), ]) return func_def def make_constant_constant_def(cg_context, constant_name): # IDL constant's C++ constant definition assert isinstance(cg_context, CodeGenContext) assert isinstance(constant_name, str) constant_type = blink_type_info(cg_context.constant.idl_type).value_t return TextNode("static constexpr {type} {name} = {value};".format( type=constant_type, name=constant_name, value=cg_context.constant.value.literal)) def make_overload_dispatcher_function_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body if cg_context.operation_group: body.append(make_operation_entry(cg_context)) body.append(EmptyNode()) body.append(make_cooperative_scheduling_safepoint(cg_context)) body.append(EmptyNode()) if cg_context.constructor_group: body.append(make_constructor_entry(cg_context)) body.append(EmptyNode()) body.append(make_overload_dispatcher(cg_context)) return func_def def make_constructor_entry(cg_context): assert isinstance(cg_context, CodeGenContext) return SequenceNode([ make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), EmptyNode(), make_check_constructor_call(cg_context), ]) def make_constructor_function_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) T = TextNode func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body if len(cg_context.constructor_group) == 1: body.append(make_constructor_entry(cg_context)) body.append(EmptyNode()) body.extend([ make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), make_check_argument_length(cg_context), EmptyNode(), ]) if "HTMLConstructor" in cg_context.constructor.extended_attributes: body.append(T("// [HTMLConstructor]")) text = _format( "V8HTMLConstructor::HtmlConstructor(" "${info}, *${class_name}::GetWrapperTypeInfo(), " "HTMLElementType::{});", name_style.constant(cg_context.class_like.identifier)) body.append(T(text)) body.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/bindings/core/v8/v8_html_constructor.h" ])) else: body.append( T("v8::Local<v8::Object> v8_wrapper = " "${return_value}->AssociateWithWrapper(${isolate}, " "${class_name}::GetWrapperTypeInfo(), ${v8_receiver});")) body.append(T("bindings::V8SetReturnValue(${info}, v8_wrapper);")) return func_def def make_constructor_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) constructor_group = cg_context.constructor_group if len(constructor_group) == 1: return make_constructor_function_def( cg_context.make_copy(constructor=constructor_group[0]), function_name) node = SequenceNode() for constructor in constructor_group: cgc = cg_context.make_copy(constructor=constructor) node.extend([ make_constructor_function_def( cgc, callback_function_name( cgc, overload_index=constructor.overload_index)), EmptyNode(), ]) node.append( make_overload_dispatcher_function_def(cg_context, function_name)) return node def make_exposed_construct_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body if (cg_context.exposed_construct.is_interface or cg_context.exposed_construct.is_callback_interface): tag = "bindings::V8ReturnValue::kInterfaceObject" elif cg_context.exposed_construct.is_namespace: tag = "bindings::V8ReturnValue::kNamespaceObject" else: assert False v8_set_return_value = _format( "bindings::V8SetReturnValue" "(${info}, {bridge}::GetWrapperTypeInfo(), {tag});", bridge=v8_bridge_class_name(cg_context.exposed_construct), tag=tag) body.extend([ make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), TextNode(v8_set_return_value), ]) return func_def def make_named_constructor_property_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body body.extend([ make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), ]) constructor_group = cg_context.exposed_construct assert isinstance(constructor_group, web_idl.ConstructorGroup) assert isinstance(constructor_group.owner, web_idl.Interface) named_ctor_v8_bridge = v8_bridge_class_name(constructor_group.owner) cgc = CodeGenContext( interface=constructor_group.owner, constructor_group=constructor_group, is_named_constructor=True, class_name=named_ctor_v8_bridge) named_ctor_name = callback_function_name(cgc) named_ctor_def = make_constructor_callback_def(cgc, named_ctor_name) return_value_cache_return_early = """\ static const V8PrivateProperty::SymbolKey kPrivatePropertyNamedConstructor; auto&& v8_private_named_constructor = V8PrivateProperty::GetSymbol(${isolate}, kPrivatePropertyNamedConstructor); v8::Local<v8::Value> v8_named_constructor; if (!v8_private_named_constructor.GetOrUndefined(${v8_receiver}) .ToLocal(&v8_named_constructor)) { return; } if (!v8_named_constructor->IsUndefined()) { bindings::V8SetReturnValue(${info}, v8_named_constructor); return; } """ pattern = """\ v8::Local<v8::Value> v8_value; if (!bindings::CreateNamedConstructorFunction( ${script_state}, {callback}, "{func_name}", {func_length}, {v8_bridge}::GetWrapperTypeInfo()) .ToLocal(&v8_value)) { return; } bindings::V8SetReturnValue(${info}, v8_value); """ create_named_constructor_function = _format( pattern, callback=named_ctor_name, func_name=constructor_group.identifier, func_length=constructor_group.min_num_of_required_arguments, v8_bridge=named_ctor_v8_bridge) return_value_cache_update_value = """\ v8_private_named_constructor.Set(${v8_receiver}, v8_value); """ body.extend([ TextNode(return_value_cache_return_early), TextNode(create_named_constructor_function), TextNode(return_value_cache_update_value), ]) return SequenceNode([named_ctor_def, EmptyNode(), func_def]) def list_no_alloc_direct_call_callbacks(cg_context): """ Returns a list of [NoAllocDirectCall] callback functions to be registered at V8, including all overloaded operations annotated with [NoAllocDirectCall] and their variants of optional arguments. Example: Given the following Web IDL fragments, void f(DOMString); // (a) [NoAllocDirectCall] void f(Node node); // (b) [NoAllocDirectCall] void f(optional long a, optional long b); // (c) the following callback functions should be generated, void F(v8::Local<v8::Value> node); // (b) void F(); // (c) void F(int32_t a); // (c) void F(int32_t a, int32_t b); // (c) thus the following entries are returned. [ Entry(operation=(b), argument_count=1), # overload_index=2 Entry(operation=(c), argument_count=2), # overload_index=3 Entry(operation=(c), argument_count=1), # overload_index=3 Entry(operation=(c), argument_count=0), # overload_index=3 ] """ assert isinstance(cg_context, CodeGenContext) class Entry(object): def __init__(self, operation, argument_count): self.operation = operation self.argument_count = argument_count self.callback_name = callback_function_name( cg_context, overload_index=self.operation.overload_index, argument_count=self.argument_count) entries = [] for operation in cg_context.operation_group: if "NoAllocDirectCall" not in operation.extended_attributes: continue for argument in reversed(operation.arguments): entries.append(Entry(operation, argument.index + 1)) if not argument.is_optional: break else: entries.append(Entry(operation, 0)) return entries def make_no_alloc_direct_call_callback_def(cg_context, function_name, argument_count): """ Args: cg_context: A CodeGenContext of the target IDL construct. function_name: The function name to be produced. argument_count: The number of arguments that the produced function takes, which may be different from the number of arguments of the target cg_context.function_like due to optional arguments. """ assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert isinstance(argument_count, int) S = SymbolNode T = TextNode F = FormatNode function_like = cg_context.function_like class ArgumentInfo(object): def __init__(self, v8_type, v8_arg_name, blink_arg_name, symbol_node): self.v8_type = v8_type self.v8_arg_name = v8_arg_name self.blink_arg_name = blink_arg_name self.symbol_node = symbol_node def v8_type_and_symbol_node(argument, v8_arg_name, blink_arg_name): if argument.idl_type.unwrap().is_interface: return ("v8::Local<v8::Value>", make_v8_to_blink_value(blink_arg_name, "${{{}}}".format(v8_arg_name), argument.idl_type, argument=argument, cg_context=cg_context)) else: return (blink_type_info(argument.idl_type).value_t, S(blink_arg_name, "auto&& {} = {};".format(blink_arg_name, v8_arg_name))) arg_list = [] for argument in function_like.arguments: if not (argument.index < argument_count): break blink_arg_name = name_style.arg_f("arg{}_{}", argument.index + 1, argument.identifier) v8_arg_name = name_style.arg_f("v8_arg{}_{}", argument.index + 1, argument.identifier) v8_type, symbol_node = v8_type_and_symbol_node(argument, v8_arg_name, blink_arg_name) arg_list.append( ArgumentInfo(v8_type, v8_arg_name, blink_arg_name, symbol_node)) arg_decls = (["v8::Local<v8::Object> v8_arg0_receiver"] + list( map(lambda arg: "{} {}".format(arg.v8_type, arg.v8_arg_name), arg_list)) + ["v8::FastApiCallbackOptions& v8_arg_callback_options"]) return_type = ("void" if function_like.return_type.is_void else blink_type_info(function_like.return_type).value_t) func_def = CxxFuncDefNode(name=function_name, arg_decls=arg_decls, return_type=return_type) func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body for arg in arg_list: body.add_template_var(arg.v8_arg_name, arg.v8_arg_name) body.register_code_symbol(arg.symbol_node) body.add_template_vars({ "v8_arg0_receiver": "v8_arg0_receiver", "v8_arg_callback_options": "v8_arg_callback_options" }) body.register_code_symbols([ S("blink_receiver", (_format( "{}* ${blink_receiver} = " "${class_name}::ToWrappableUnsafe(${v8_receiver});", blink_class_name(cg_context.interface)))), S("isolate", "v8::Isolate* ${isolate} = ${v8_receiver}->GetIsolate();"), S("v8_receiver", ("v8::Local<v8::Object> ${v8_receiver} = " "${v8_arg0_receiver};")), ]) bind_callback_local_vars(body, cg_context) body.extend([ T("ThreadState::NoAllocationScope " "thread_no_alloc_scope(ThreadState::Current());"), T("v8::Isolate::DisallowJavascriptExecutionScope no_js_exec_scope(" "${isolate}, " "v8::Isolate::DisallowJavascriptExecutionScope::CRASH_ON_FAILURE);"), T("blink::NoAllocDirectCallScope no_alloc_direct_call_scope(" "${blink_receiver}, &${v8_arg_callback_options});"), EmptyNode(), ]) blink_arguments = list( map(lambda arg: "${{{}}}".format(arg.blink_arg_name), arg_list)) # If there are following optional arguments with default values, append # them filled with the default values. for argument in function_like.arguments[argument_count:]: if not argument.default_value: break blink_arg_name = name_style.arg_f("arg{}_{}", argument.index + 1, argument.identifier) default_expr = make_default_value_expr(argument.idl_type, argument.default_value) body.register_code_symbol( S((blink_arg_name), "auto&& {}{{{}}};".format(blink_arg_name, default_expr.initializer_expr))) blink_arguments.append("${{{}}}".format(blink_arg_name)) if cg_context.may_throw_exception: blink_arguments.append("${exception_state}") body.append( F("${blink_receiver}->{member_func}({blink_arguments});", member_func=backward_compatible_api_func(cg_context), blink_arguments=", ".join(blink_arguments))) if cg_context.may_throw_exception: body.append( CxxUnlikelyIfNode( cond="UNLIKELY(${exception_state}.HadException())", body=T("return;"))) return func_def def make_no_alloc_direct_call_for_testing_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body body.extend([ make_v8_set_return_value(cg_context), ]) node = ListNode([ TextNode("#if DCHECK_IS_ON()"), func_def, TextNode("#endif // DCHECK_IS_ON()"), ]) node.accumulate( CodeGenAccumulator.require_include_headers(["base/dcheck_is_on.h"])) return node def make_no_alloc_direct_call_for_testing_call(cg_context): assert isinstance(cg_context, CodeGenContext) T = TextNode F = FormatNode if "NoAllocDirectCall" not in cg_context.operation.extended_attributes: return None scope = SymbolScopeNode() scope.register_code_symbol( SymbolNode( "v8_fast_api_callback_options", "v8::FastApiCallbackOptions ${v8_fast_api_callback_options}" " = v8::FastApiCallbackOptions::CreateForTesting(${isolate});")) scope.extend([ F(("{}(${info}, ${v8_fast_api_callback_options});"), callback_function_name( cg_context.make_copy(no_alloc_direct_call_for_testing=True), overload_index=cg_context.operation.overload_index)), CxxUnlikelyIfNode(cond="${blink_receiver}->HasDeferredActions()", body=[ T("${blink_receiver}->FlushDeferredActions();"), T("return;"), ]), CxxLikelyIfNode(cond="!${v8_fast_api_callback_options}.fallback", body=T("return;")), ]) return ListNode([ T("#if DCHECK_IS_ON()"), T("// [NoAllocDirectCall]"), CxxUnlikelyIfNode(cond=("RuntimeEnabledFeatures::" "FakeNoAllocDirectCallForTestingEnabled()"), body=scope), T("#endif // DCHECK_IS_ON()"), ]) def make_no_alloc_direct_call_flush_deferred_actions(cg_context): assert isinstance(cg_context, CodeGenContext) if "NoAllocDirectCall" not in cg_context.operation.extended_attributes: return None return SequenceNode([ TextNode("// [NoAllocDirectCall]"), CxxUnlikelyIfNode( cond="UNLIKELY(${blink_receiver}->HasDeferredActions())", body=[ TextNode("${blink_receiver}->FlushDeferredActions();"), TextNode("return;"), ]), ]) def make_operation_entry(cg_context): assert isinstance(cg_context, CodeGenContext) return SequenceNode([ make_runtime_call_timer_scope(cg_context), make_bindings_trace_event(cg_context), ]) def make_operation_function_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = _make_empty_callback_def(cg_context, function_name) body = func_def.body if not cg_context.operation_group or len(cg_context.operation_group) == 1: body.append(make_operation_entry(cg_context)) body.append(EmptyNode()) body.extend([ make_check_receiver(cg_context), EmptyNode(), make_report_coop_access(cg_context), make_report_deprecate_as(cg_context), make_report_measure_as(cg_context), make_log_activity(cg_context), EmptyNode(), ]) if "Custom" in cg_context.property_.extended_attributes: text = _format("${class_name}::{}(${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_def body.extend([ make_no_alloc_direct_call_flush_deferred_actions(cg_context), EmptyNode(), make_check_argument_length(cg_context), EmptyNode(), make_steps_of_ce_reactions(cg_context), EmptyNode(), make_no_alloc_direct_call_for_testing_call(cg_context), EmptyNode(), make_check_security_of_return_value(cg_context), make_v8_set_return_value(cg_context), make_report_high_entropy(cg_context), ]) return func_def def make_operation_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) operation_group = cg_context.operation_group assert (not ("Custom" in operation_group.extended_attributes) or len(operation_group) == 1) nodes = SequenceNode() if "NoAllocDirectCall" in operation_group.extended_attributes: for entry in list_no_alloc_direct_call_callbacks(cg_context): cgc = cg_context.make_copy(operation=entry.operation, no_alloc_direct_call=True) nodes.extend([ make_no_alloc_direct_call_callback_def( cgc, callback_function_name( cgc, overload_index=entry.operation.overload_index, argument_count=entry.argument_count), argument_count=entry.argument_count), EmptyNode(), ]) for operation in operation_group: if "NoAllocDirectCall" not in operation.extended_attributes: continue cgc = cg_context.make_copy(operation=operation, no_alloc_direct_call_for_testing=True) nodes.extend([ make_no_alloc_direct_call_for_testing_callback_def( cgc, callback_function_name( cgc, overload_index=operation.overload_index)), EmptyNode(), ]) if len(operation_group) == 1: nodes.append( make_operation_function_def( cg_context.make_copy(operation=operation_group[0]), function_name)) return nodes for operation in operation_group: cgc = cg_context.make_copy(operation=operation) nodes.extend([ make_operation_function_def( cgc, callback_function_name( cgc, overload_index=operation.overload_index)), EmptyNode(), ]) nodes.append( make_overload_dispatcher_function_def(cg_context, function_name)) return nodes def make_stringifier_callback_def(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) if cg_context.stringifier.attribute: return make_attribute_get_callback_def( cg_context.make_copy( attribute=cg_context.stringifier.attribute, attribute_get=True), function_name) elif cg_context.stringifier.operation: return make_operation_function_def( cg_context.make_copy(operation=cg_context.stringifier.operation), function_name) assert False # ---------------------------------------------------------------------------- # Callback functions of indexed and named interceptors # ---------------------------------------------------------------------------- def _make_interceptor_callback(cg_context, function_name, arg_decls, arg_names, class_name, runtime_call_timer_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert isinstance(arg_decls, (list, tuple)) assert all(isinstance(arg_decl, str) for arg_decl in arg_decls) assert isinstance(arg_names, (list, tuple)) assert all(isinstance(arg_name, str) for arg_name in arg_names) assert _is_none_or_str(class_name) assert isinstance(runtime_call_timer_name, str) func_decl = CxxFuncDeclNode( name=function_name, arg_decls=arg_decls, return_type="void", static=True) func_def = _make_interceptor_callback_def(cg_context, function_name, arg_decls, arg_names, class_name, runtime_call_timer_name) return func_decl, func_def def _make_interceptor_callback_def(cg_context, function_name, arg_decls, arg_names, class_name, runtime_call_timer_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert isinstance(arg_decls, (list, tuple)) assert all(isinstance(arg_decl, str) for arg_decl in arg_decls) assert isinstance(arg_names, (list, tuple)) assert all(isinstance(arg_name, str) for arg_name in arg_names) assert _is_none_or_str(class_name) assert isinstance(runtime_call_timer_name, str) func_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type="void", class_name=class_name) func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body for arg_name in arg_names: body.add_template_var(arg_name, arg_name) bind_callback_local_vars(body, cg_context) body.extend([ make_runtime_call_timer_scope(cg_context, runtime_call_timer_name), EmptyNode(), ]) return func_def def make_indexed_property_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertyGetter") body = func_def.body if not cg_context.interface.indexed_and_named_properties.indexed_getter: body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); ${class_name}::NamedPropertyGetterCallback(property_name, ${info}); """)) return func_decl, func_def bind_return_value(body, cg_context, overriding_args=["${index}"]) body.extend([ TextNode("""\ // LegacyPlatformObjectGetOwnProperty // https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty // step 1.2. If index is a supported property index, then: // step 3. Return OrdinaryGetOwnProperty(O, P). if (${index} >= ${blink_receiver}->length()) return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. """), make_v8_set_return_value(cg_context), ]) return func_decl, func_def def make_indexed_property_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_value", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertySetter") body = func_def.body if not cg_context.interface.indexed_and_named_properties.indexed_getter: body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); ${class_name}::NamedPropertySetterCallback( property_name, ${v8_property_value}, ${info}); """)) return func_decl, func_def if not cg_context.indexed_property_setter: body.append( TextNode("""\ // 3.9.2. [[Set]] // https://heycam.github.io/webidl/#legacy-platform-object-set // OrdinarySetWithOwnDescriptor will end up calling DefineOwnProperty, // which will fail when the receiver object is this legacy platform // object. bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError( "Indexed property setter is not supported."); } """)) return func_decl, func_def bind_return_value( body, cg_context, overriding_args=["${index}", "${blink_property_value}"]) body.register_code_symbol( make_v8_to_blink_value( "blink_property_value", "${v8_property_value}", cg_context.indexed_property_setter.arguments[1].idl_type, argument=cg_context.indexed_property_setter.arguments[1])) body.extend([ TextNode("""\ // 3.9.2. [[Set]] // https://heycam.github.io/webidl/#legacy-platform-object-set // step 1. If O and Receiver are the same object, then:\ """), CxxLikelyIfNode(cond="${info}.Holder() == ${info}.This()", body=[ TextNode("""\ // step 1.1.1. Invoke the indexed property setter with P and V.\ """), make_steps_of_ce_reactions(cg_context), EmptyNode(), make_v8_set_return_value(cg_context), TextNode("""\ bindings::V8SetReturnValue(${info}, nullptr); return;"""), ]), EmptyNode(), TextNode("""\ // Do not intercept. Fallback to OrdinarySetWithOwnDescriptor. """), ]) return func_decl, func_def def make_indexed_property_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["index", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertyDeleter") body = func_def.body if not cg_context.interface.indexed_and_named_properties.indexed_getter: body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); ${class_name}::NamedPropertyDeleterCallback(property_name, ${info}); """)) return func_decl, func_def body.append( TextNode("""\ // 3.9.4. [[Delete]] // https://heycam.github.io/webidl/#legacy-platform-object-delete // step 1.2. If index is not a supported property index, then return true. // step 1.3. Return false. const bool is_supported = ${index} < ${blink_receiver}->length(); bindings::V8SetReturnValue(${info}, !is_supported); if (is_supported and ${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedDeletionContext, "${interface.identifier}"); exception_state.ThrowTypeError("Index property deleter is not supported."); } """)) return func_decl, func_def def make_indexed_property_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_desc", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertyDefiner") body = func_def.body if not cg_context.interface.indexed_and_named_properties.indexed_getter: body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); ${class_name}::NamedPropertyDefinerCallback( property_name, ${v8_property_desc}, ${info}); """)) return func_decl, func_def body.append( TextNode("""\ // 3.9.3. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#legacy-platform-object-defineownproperty // step 1.1. If the result of calling IsDataDescriptor(Desc) is false, then // return false. if (v8_property_desc.has_get() || v8_property_desc.has_set()) { bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Accessor properties are not allowed."); } return; } """)) if not cg_context.interface.indexed_and_named_properties.indexed_setter: body.append( TextNode("""\ // step 1.2. If O does not implement an interface with an indexed property // setter, then return false. bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Index property setter is not supported."); } """)) else: body.append( TextNode("""\ // step 1.3. Invoke the indexed property setter with P and Desc.[[Value]]. ${class_name}::IndexedPropertySetterCallback( ${index}, ${v8_property_desc}.value(), ${info}); """)) return func_decl, func_def def make_indexed_property_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertyDescriptor") body = func_def.body if not cg_context.interface.indexed_and_named_properties.indexed_getter: body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); ${class_name}::NamedPropertyDescriptorCallback(property_name, ${info}); """)) return func_decl, func_def pattern = """\ // LegacyPlatformObjectGetOwnProperty // https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty // step 1.2.3. If operation was defined without an identifier, then set // value to the result of performing the steps listed in the interface // description to determine the value of an indexed property with index // as the index. // step 1.2.4. Otherwise, operation was defined with an identifier. Set // value to the result of performing the steps listed in the description // of operation with index as the only argument value. ${class_name}::IndexedPropertyGetterCallback(${index}, ${info}); v8::Local<v8::Value> v8_value = ${info}.GetReturnValue().Get(); // step 1.2. If index is a supported property index, then: // step 3. Return OrdinaryGetOwnProperty(O, P). if (v8_value->IsUndefined()) return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. // step 1.2.6. Set desc.[[Value]] to the result of converting value to an // ECMAScript value. // step 1.2.7. If O implements an interface with an indexed property setter, // then set desc.[[Writable]] to true, otherwise set it to false. // step 1.2.8. Set desc.[[Enumerable]] and desc.[[Configurable]] to true. v8::PropertyDescriptor desc(v8_value, /*writable=*/{cxx_writable}); desc.set_enumerable(true); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc);""" writable = bool( cg_context.interface.indexed_and_named_properties.indexed_setter) cxx_writable = "true" if writable else "false" body.append(TextNode(_format(pattern, cxx_writable=cxx_writable))) return func_decl, func_def def make_indexed_property_enumerator_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) if not cg_context.interface.indexed_and_named_properties.indexed_getter: return None, None arg_decls = ["const v8::PropertyCallbackInfo<v8::Array>& info"] arg_names = ["info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "IndexedPropertyEnumerator") body = func_def.body body.append( TextNode("""\ // 3.9.6. [[OwnPropertyKeys]] // https://heycam.github.io/webidl/#legacy-platform-object-ownpropertykeys // step 2. If O supports indexed properties, then for each index of O's // supported property indices, in ascending numerical order, append // ! ToString(index) to keys. uint32_t length = ${blink_receiver}->length(); v8::Local<v8::Array> array = bindings::EnumerateIndexedProperties(${isolate}, length); bindings::V8SetReturnValue(${info}, array); """)) body.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/bindings/core/v8/generated_code_helper.h" ])) return func_decl, func_def def make_named_property_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyGetter") body = func_def.body bind_return_value( body, cg_context, overriding_args=["${blink_property_name}"]) if "Custom" in cg_context.named_property_getter.extended_attributes: text = _format("${class_name}::{}(${blink_property_name}, ${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_decl, func_def # The named property getter's implementation of Blink is not designed to # represent the property existence, and we have to determine the property # existence by heuristics. type = cg_context.return_type.unwrap() if type.is_any or type.is_object: not_found_expr = "${return_value}.IsEmpty()" elif type.is_string: not_found_expr = "${return_value}.IsNull()" elif type.is_interface: not_found_expr = "!${return_value}" elif type.is_union: not_found_expr = "!${return_value}" else: assert False body.extend([ TextNode("""\ // LegacyPlatformObjectGetOwnProperty // https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty // step 2.1. If the result of running the named property visibility // algorithm with property name P and object O is true, then:\ """), CxxUnlikelyIfNode( cond=not_found_expr, body=[ TextNode("// step 3. Return OrdinaryGetOwnProperty(O, P)."), TextNode("return; // Do not intercept."), ]), TextNode("""\ // step 2.1.3. If operation was defined without an identifier, then set // value to the result of performing the steps listed in the interface // description to determine the value of a named property with P as the // name. // step 2.1.4. Otherwise, operation was defined with an identifier. Set // value to the result of performing the steps listed in the description // of operation with P as the only argument value.\ """), make_v8_set_return_value(cg_context), ]) return func_decl, func_def def make_named_property_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_value", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertySetter") body = func_def.body if not cg_context.named_property_setter: body.append( TextNode("""\ // 3.9.2. [[Set]] // https://heycam.github.io/webidl/#legacy-platform-object-set // step 3. Perform ? OrdinarySetWithOwnDescriptor(O, P, V, Receiver, ownDesc).\ """)) if ("LegacyOverrideBuiltIns" in cg_context.interface.extended_attributes): body.append( TextNode("""\ // [LegacyOverrideBuiltIns] if (${info}.Holder()->GetRealNamedPropertyAttributesInPrototypeChain( ${current_context}, ${v8_property_name}).IsJust()) { return; // Fallback to the existing property. } """)) body.append( TextNode("""\ ${class_name}::NamedPropertyGetterCallback(${v8_property_name}, ${info}); const bool is_creating = ${info}.GetReturnValue().Get()->IsUndefined(); if (!is_creating) { bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError( "Named property setter is not supported."); } return; } // Do not intercept. Fallback and let it define a new own property. """)) return func_decl, func_def bind_return_value( body, cg_context, overriding_args=["${blink_property_name}", "${blink_property_value}"]) body.register_code_symbol( make_v8_to_blink_value( "blink_property_value", "${v8_property_value}", cg_context.named_property_setter.arguments[1].idl_type, argument=cg_context.named_property_setter.arguments[1])) if "Custom" in cg_context.named_property_setter.extended_attributes: text = _format( "${class_name}::{}" "(${blink_property_name}, ${v8_property_value}, ${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_decl, func_def body.extend([ TextNode("""\ // 3.9.2. [[Set]] // https://heycam.github.io/webidl/#legacy-platform-object-set // step 1. If O and Receiver are the same object, then:\ """), CxxLikelyIfNode(cond="${info}.Holder() == ${info}.This()", body=[ TextNode("""\ // step 1.2.1. Invoke the named property setter with P and V.\ """), make_steps_of_ce_reactions(cg_context), EmptyNode(), make_v8_set_return_value(cg_context), TextNode("""\ % if interface.identifier == "CSSStyleDeclaration": // CSSStyleDeclaration is abusing named properties. // Do not intercept if the property is not found. % else: bindings::V8SetReturnValue(${info}, nullptr); % endif return;"""), ]), EmptyNode(), TextNode("""\ // Do not intercept. Fallback to OrdinarySetWithOwnDescriptor. """), ]) return func_decl, func_def def make_named_property_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["v8_property_name", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyDeleter") body = func_def.body props = cg_context.interface.indexed_and_named_properties if (not cg_context.named_property_deleter and "NotEnumerable" in props.named_getter.extended_attributes): body.append( TextNode("""\ // 3.9.4. [[Delete]] // https://heycam.github.io/webidl/#legacy-platform-object-delete // step 2. If O supports named properties, O does not implement an interface // with the [Global] extended attribute and the result of calling the // named property visibility algorithm with property name P and object O // is true, then: // // There is no easy way to determine whether the named property is visible // or not. Just do not intercept and fallback to the default behavior. """)) return func_decl, func_def if not cg_context.named_property_deleter: body.append( TextNode("""\ // 3.9.4. [[Delete]] // https://heycam.github.io/webidl/#legacy-platform-object-delete // step 2. If O supports named properties, O does not implement an interface // with the [Global] extended attribute and the result of calling the // named property visibility algorithm with property name P and object O // is true, then: // step 2.1. If O does not implement an interface with a named property // deleter, then return false. ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedDeletionContext, "${interface.identifier}"); bool does_exist = ${blink_receiver}->NamedPropertyQuery( ${blink_property_name}, exception_state); if (exception_state.HadException()) return; if (does_exist) { bindings::V8SetReturnValue(${info}, false); if (${info}.ShouldThrowOnError()) { exception_state.ThrowTypeError("Named property deleter is not supported."); } return; } // Do not intercept. """)) return func_decl, func_def bind_return_value( body, cg_context, overriding_args=["${blink_property_name}"]) if "Custom" in cg_context.named_property_deleter.extended_attributes: text = _format("${class_name}::{}(${blink_property_name}, ${info});", custom_function_name(cg_context)) body.append(TextNode(text)) return func_decl, func_def body.extend([ TextNode("""\ // 3.9.4. [[Delete]] // https://heycam.github.io/webidl/#legacy-platform-object-delete\ """), make_steps_of_ce_reactions(cg_context), EmptyNode(), make_v8_set_return_value(cg_context), TextNode("""\ if (${return_value} == NamedPropertyDeleterResult::kDidNotDelete) { if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedDeletionContext, "${interface.identifier}"); exception_state.ThrowTypeError("Failed to delete a property."); } return; }"""), ]) return func_decl, func_def def make_named_property_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_desc", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyDefiner") body = func_def.body if cg_context.interface.identifier == "CSSStyleDeclaration": body.append( TextNode("""\ // CSSStyleDeclaration is abusing named properties. // Do not intercept. Fallback to OrdinaryDefineOwnProperty. """)) elif cg_context.interface.identifier in ("HTMLEmbedElement", "HTMLObjectElement"): body.append( TextNode("""\ // HTMLEmbedElement and HTMLObjectElement are abusing named properties. // Do not intercept. Fallback to OrdinaryDefineOwnProperty. """)) elif not cg_context.interface.indexed_and_named_properties.named_setter: body.append( TextNode("""\ // 3.9.3. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#legacy-platform-object-defineownproperty // step 2.1. Let creating be true if P is not a supported property name, and // false otherwise. // step 2.2.1. If creating is false and O does not implement an interface // with a named property setter, then return false. ${class_name}::NamedPropertyGetterCallback(${v8_property_name}, ${info}); const bool is_creating = ${info}.GetReturnValue().Get()->IsUndefined(); if (!is_creating) { bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property setter is not supported."); } return; } // Do not intercept. Fallback to OrdinaryDefineOwnProperty. """)) else: body.append( TextNode("""\ // 3.9.3. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#legacy-platform-object-defineownproperty // step 2.2.2. If O implements an interface with a named property setter, // then: // step 2.2.2.1. If the result of calling IsDataDescriptor(Desc) is false, // then return false. if (v8_property_desc.has_get() || v8_property_desc.has_set()) { bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Accessor properties are not allowed."); } return; } // step 2.2.2.2. Invoke the named property setter with P and Desc.[[Value]]. ${class_name}::NamedPropertySetterCallback( ${v8_property_name}, ${v8_property_desc}.value(), ${info}); bindings::V8SetReturnValue(${info}, nullptr); """)) return func_decl, func_def def make_named_property_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyDescriptor") body = func_def.body body.append( TextNode("""\ // LegacyPlatformObjectGetOwnProperty // https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty\ """)) if ("LegacyOverrideBuiltIns" not in cg_context.interface.extended_attributes): body.append( TextNode("""\ // step 2.1. If the result of running the named property visibility algorithm // with property name P and object O is true, then: if (${v8_receiver}->GetRealNamedPropertyAttributesInPrototypeChain( ${current_context}, ${v8_property_name}).IsJust()) { return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. } """)) pattern = """\ // step 2.1.3. If operation was defined without an identifier, then set // value to the result of performing the steps listed in the interface // description to determine the value of a named property with P as the // name. // step 2.1.4. Otherwise, operation was defined with an identifier. Set // value to the result of performing the steps listed in the description // of operation with P as the only argument value. ${class_name}::NamedPropertyGetterCallback(${v8_property_name}, ${info}); v8::Local<v8::Value> v8_value = ${info}.GetReturnValue().Get(); // step 2.1. If the result of running the named property visibility // algorithm with property name P and object O is true, then: // step 3. Return OrdinaryGetOwnProperty(O, P). if (v8_value->IsUndefined()) return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. // step 2.1.6. Set desc.[[Value]] to the result of converting value to an // ECMAScript value. // step 2.1.7. If O implements an interface with a named property setter, // then set desc.[[Writable]] to true, otherwise set it to false. // step 2.1.8. If O implements an interface with the // [LegacyUnenumerableNamedProperties] extended attribute, then set // desc.[[Enumerable]] to false, otherwise set it to true. // step 2.1.9. Set desc.[[Configurable]] to true. v8::PropertyDescriptor desc(v8_value, /*writable=*/{cxx_writable}); desc.set_enumerable({cxx_enumerable}); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); """ props = cg_context.interface.indexed_and_named_properties writable = bool(props.named_setter) cxx_writable = "true" if writable else "false" enumerable = props.is_named_property_enumerable cxx_enumerable = "true" if enumerable else "false" body.append( TextNode( _format( pattern, cxx_writable=cxx_writable, cxx_enumerable=cxx_enumerable))) return func_decl, func_def def make_named_property_query_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) props = cg_context.interface.indexed_and_named_properties if "NotEnumerable" in props.named_getter.extended_attributes: return None, None arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Integer>& info", ] arg_names = ["v8_property_name", "info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyQuery") body = func_def.body flags = [] if not props.named_setter: flags.append("v8::ReadOnly") if not props.is_named_property_enumerable: flags.append("v8::DontEnum") if not flags: flags.append("v8::None") if len(flags) == 1: property_attribute = flags[0] else: property_attribute = " | ".join(flags) body.extend([ TextNode("""\ ExceptionState exception_state(${isolate}, ExceptionState::kNamedGetterContext, "${interface.identifier}"); bool does_exist = ${blink_receiver}->NamedPropertyQuery( ${blink_property_name}, exception_state); if (!does_exist) return; // Do not intercept. """), TextNode( _format( "bindings::V8SetReturnValue" "(${info}, uint32_t({property_attribute}));", property_attribute=property_attribute)), ]) return func_decl, func_def def make_named_property_enumerator_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) props = cg_context.interface.indexed_and_named_properties if "NotEnumerable" in props.named_getter.extended_attributes: return None, None arg_decls = ["const v8::PropertyCallbackInfo<v8::Array>& info"] arg_names = ["info"] func_decl, func_def = _make_interceptor_callback( cg_context, function_name, arg_decls, arg_names, cg_context.class_name, "NamedPropertyEnumerator") body = func_def.body body.append( TextNode("""\ // 3.9.6. [[OwnPropertyKeys]] // https://heycam.github.io/webidl/#legacy-platform-object-ownpropertykeys // step 3. If O supports named properties, then for each P of O's supported // property names that is visible according to the named property // visibility algorithm, append P to keys. Vector<String> blink_property_names; ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kEnumerationContext, "${interface.identifier}"); ${blink_receiver}->NamedPropertyEnumerator( blink_property_names, exception_state); if (exception_state.HadException()) return; bindings::V8SetReturnValue( ${info}, ToV8(blink_property_names, ${creation_context_object}, ${isolate})); """)) return func_decl, func_def # ---------------------------------------------------------------------------- # Callback functions of interceptors on named properties object # ---------------------------------------------------------------------------- def make_named_props_obj_indexed_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_IndexedPropertyGetter") body = func_def.body body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); NamedPropsObjNamedGetterCallback(property_name, ${info}); """)) return func_def def make_named_props_obj_indexed_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_value", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_IndexedPropertySetter") body = func_def.body body.append( TextNode("""\ // 3.6.4.2. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-defineownproperty bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property setter is not supported."); } """)) return func_def def make_named_props_obj_indexed_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_IndexedPropertyDeleter") body = func_def.body body.append( TextNode("""\ bindings::V8SetReturnValue(${info}, false); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedDeletionContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property deleter is not supported."); } """)) return func_def def make_named_props_obj_indexed_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_desc", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_IndexedPropertyDefiner") body = func_def.body body.append( TextNode("""\ // 3.6.4.2. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-defineownproperty bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property setter is not supported."); } """)) return func_def def make_named_props_obj_indexed_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_IndexedPropertyDescriptor") body = func_def.body body.append( TextNode("""\ v8::Local<v8::String> property_name = V8AtomicString(${isolate}, AtomicString::Number(${index})); NamedPropsObjNamedDescriptorCallback(property_name, ${info}); """)) return func_def def make_named_props_obj_named_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_NamedPropertyGetter") body = func_def.body body.append( TextNode("""\ // 3.6.4.1. [[GetOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-getownproperty // // TODO(yukishiino): Update the following hard-coded call to an appropriate // one. V8Window::NamedPropertyGetterCustom(${blink_property_name}, ${info}); """)) return func_def def make_named_props_obj_named_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_value", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_NamedPropertySetter") body = func_def.body body.append( TextNode("""\ // 3.6.4.2. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-defineownproperty bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property setter is not supported."); } """)) return func_def def make_named_props_obj_named_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_NamedPropertyDeleter") body = func_def.body body.append( TextNode("""\ // 3.6.4.3. [[Delete]] // https://heycam.github.io/webidl/#named-properties-object-delete bindings::V8SetReturnValue(${info}, false); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedDeletionContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property deleter is not supported."); } """)) return func_def def make_named_props_obj_named_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_desc", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_NamedPropertyDefiner") body = func_def.body body.append( TextNode("""\ // 3.6.4.2. [[DefineOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-defineownproperty bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kNamedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Named property setter is not supported."); } """)) return func_def def make_named_props_obj_named_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "NamedPropertiesObject_NamedPropertyDescriptor") body = func_def.body body.append( TextNode("""\ // 3.6.4.1. [[GetOwnProperty]] // https://heycam.github.io/webidl/#named-properties-object-getownproperty // step 4. If the result of running the named property visibility algorithm // with property name P and object object is true, then: if (${v8_receiver}->GetRealNamedPropertyAttributesInPrototypeChain( ${current_context}, ${v8_property_name}).IsJust()) { return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. } // TODO(yukishiino): Update the following hard-coded call to an appropriate // one. V8Window::NamedPropertyGetterCustom(${blink_property_name}, ${info}); v8::Local<v8::Value> v8_value = ${info}.GetReturnValue().Get(); if (v8_value->IsUndefined()) return; // Do not intercept. Fallback to OrdinaryGetOwnProperty. // step 4.7. If A implements an interface with the // [LegacyUnenumerableNamedProperties] extended attribute, then set // desc.[[Enumerable]] to false, otherwise set it to true. // step 4.8. Set desc.[[Writable]] to true and desc.[[Configurable]] to // true. v8::PropertyDescriptor desc(v8_value, /*writable=*/true); desc.set_enumerable(false); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); """)) return func_def # ---------------------------------------------------------------------------- # Callback functions of cross origin interceptors # ---------------------------------------------------------------------------- def make_cross_origin_access_check_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) func_def = CxxFuncDefNode( name=function_name, arg_decls=[ "v8::Local<v8::Context> accessing_context", "v8::Local<v8::Object> accessed_object", "v8::Local<v8::Value> unused_data", ], return_type="bool") func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body body.add_template_var("accessing_context", "accessing_context") body.add_template_var("accessed_object", "accessed_object") bind_callback_local_vars(body, cg_context) if cg_context.interface.identifier == "Window": blink_class = "DOMWindow" else: blink_class = blink_class_name(cg_context.interface) body.extend([ TextNode( _format( "{blink_class}* blink_accessed_object = " "${class_name}::ToWrappableUnsafe(${accessed_object});", blink_class=blink_class)), TextNode("return BindingSecurity::ShouldAllowAccessTo(" "ToLocalDOMWindow(${accessing_context}), " "blink_accessed_object, " "BindingSecurity::ErrorReportOption::kDoNotReport);"), ]) return func_def def make_cross_origin_indexed_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertyGetter") body = func_def.body if cg_context.interface.identifier != "Window": body.append(TextNode("${throw_security_error}")) return func_def bind_return_value(body, cg_context, overriding_args=["${index}"]) body.extend([ TextNode("""\ if (${index} >= ${blink_receiver}->length()) { ${throw_security_error} return; } """), make_v8_set_return_value(cg_context), ]) return func_def def make_cross_origin_indexed_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_value", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertySetter") body = func_def.body body.append(TextNode("${throw_security_error}")) return func_def def make_cross_origin_indexed_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertyDeleter") body = func_def.body body.append(TextNode("${throw_security_error}")) return func_def def make_cross_origin_indexed_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_desc", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertyDefiner") body = func_def.body body.append(TextNode("${throw_security_error}")) return func_def def make_cross_origin_indexed_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertyDescriptor") body = func_def.body if cg_context.interface.identifier != "Window": body.append(TextNode("${throw_security_error}")) return func_def body.append( TextNode("""\ CrossOriginIndexedGetterCallback(${index}, ${info}); v8::Local<v8::Value> v8_value = ${info}.GetReturnValue().Get(); if (v8_value->IsUndefined()) { // Must have already thrown a SecurityError. return; } v8::PropertyDescriptor desc(v8_value, /*writable=*/false); desc.set_enumerable(true); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); """)) return func_def def make_cross_origin_indexed_enumerator_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = ["const v8::PropertyCallbackInfo<v8::Array>& info"] arg_names = ["info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_IndexedPropertyEnumerator") body = func_def.body if cg_context.interface.identifier != "Window": return func_def body.append( TextNode("""\ uint32_t length = ${blink_receiver}->length(); v8::Local<v8::Array> array = bindings::EnumerateIndexedProperties(${isolate}, length); bindings::V8SetReturnValue(${info}, array); """)) return func_def def make_cross_origin_named_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyGetter") body = func_def.body string_case_body = [] string_case_body.append( TextNode("""\ for (const auto& attribute : kCrossOriginAttributeTable) { if (${blink_property_name} != attribute.name) continue; if (UNLIKELY(!attribute.get_value)) { ${throw_security_error} return; } attribute.get_value(${v8_property_name}, ${info}); return; } for (const auto& operation : kCrossOriginOperationTable) { if (${blink_property_name} != operation.name) continue; v8::Local<v8::Function> function; if (bindings::GetCrossOriginFunction( ${info}.GetIsolate(), operation.callback, operation.func_length, ${class_name}::GetWrapperTypeInfo()) .ToLocal(&function)) { bindings::V8SetReturnValue(${info}, function); } return; } """)) if cg_context.interface.identifier == "Window": string_case_body.append( TextNode("""\ // Window object's document-tree child browsing context name property set // // TODO(yukishiino): Update the following hard-coded call to an appropriate // one. V8Window::NamedPropertyGetterCustom(${blink_property_name}, ${info}); if (!${info}.GetReturnValue().Get()->IsUndefined()) return; """)) body.extend([ CxxLikelyIfNode( cond="${v8_property_name}->IsString()", body=string_case_body), EmptyNode(), TextNode("""\ // 7.2.3.2 CrossOriginPropertyFallback ( P ) // https://html.spec.whatwg.org/C/#crossoriginpropertyfallback-(-p-) if (bindings::IsSupportedInCrossOriginPropertyFallback( ${info}.GetIsolate(), ${v8_property_name})) { return ${info}.GetReturnValue().SetUndefined(); } ${throw_security_error} """), ]) return func_def def make_cross_origin_named_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_value", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertySetter") body = func_def.body string_case_body = [] string_case_body.append( TextNode("""\ for (const auto& attribute : kCrossOriginAttributeTable) { if (${blink_property_name} == attribute.name && attribute.set_value) { attribute.set_value(${v8_property_name}, ${v8_property_value}, ${info}); return; } } """)) body.extend([ CxxLikelyIfNode( cond="${v8_property_name}->IsString()", body=string_case_body), EmptyNode(), TextNode("${throw_security_error}"), ]) return func_def def make_cross_origin_named_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyDeleter") body = func_def.body body.append(TextNode("${throw_security_error}")) return func_def def make_cross_origin_named_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "v8_property_desc", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyDefiner") body = func_def.body body.append(TextNode("${throw_security_error}")) return func_def def make_cross_origin_named_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyDescriptor") body = func_def.body string_case_body = [] string_case_body.append( TextNode("""\ // 7.2.3.4 CrossOriginGetOwnPropertyHelper ( O, P ) // https://html.spec.whatwg.org/C/#crossorigingetownpropertyhelper-(-o,-p-) for (const auto& attribute : kCrossOriginAttributeTable) { if (${blink_property_name} != attribute.name) continue; v8::Local<v8::Value> get; v8::Local<v8::Value> set; if (!bindings::GetCrossOriginFunctionOrUndefined( ${info}.GetIsolate(), attribute.get_callback, 0, ${class_name}::GetWrapperTypeInfo()) .ToLocal(&get) || !bindings::GetCrossOriginFunctionOrUndefined( ${info}.GetIsolate(), attribute.set_callback, 1, ${class_name}::GetWrapperTypeInfo()) .ToLocal(&set)) { return; } v8::PropertyDescriptor desc(get, set); desc.set_enumerable(false); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); return; } for (const auto& operation : kCrossOriginOperationTable) { if (${blink_property_name} != operation.name) continue; v8::Local<v8::Function> function; if (!bindings::GetCrossOriginFunction( ${info}.GetIsolate(), operation.callback, operation.func_length, ${class_name}::GetWrapperTypeInfo()) .ToLocal(&function)) { return; } v8::PropertyDescriptor desc(function, /*writable=*/false); desc.set_enumerable(false); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); return; } """)) if cg_context.interface.identifier == "Window": string_case_body.append( TextNode("""\ // Window object's document-tree child browsing context name property set // // TODO(yukishiino): Update the following hard-coded call to an appropriate // one. V8Window::NamedPropertyGetterCustom(${blink_property_name}, ${info}); if (!${info}.GetReturnValue().Get()->IsUndefined()) { v8::PropertyDescriptor desc(${info}.GetReturnValue().Get(), /*writable=*/false); desc.set_enumerable(false); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); return; } """)) body.extend([ CxxLikelyIfNode( cond="${v8_property_name}->IsString()", body=string_case_body), EmptyNode(), TextNode("""\ // 7.2.3.2 CrossOriginPropertyFallback ( P ) // https://html.spec.whatwg.org/C/#crossoriginpropertyfallback-(-p-) if (bindings::IsSupportedInCrossOriginPropertyFallback( ${info}.GetIsolate(), ${v8_property_name})) { v8::PropertyDescriptor desc(v8::Undefined(${info}.GetIsolate()), /*writable=*/false); desc.set_enumerable(false); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); return; } ${throw_security_error} """), ]) return func_def def make_cross_origin_named_query_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "v8::Local<v8::Name> v8_property_name", "const v8::PropertyCallbackInfo<v8::Integer>& info", ] arg_names = ["v8_property_name", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyQuery") body = func_def.body string_case_body = [] string_case_body.append( TextNode("""\ // 7.2.3.4 CrossOriginGetOwnPropertyHelper ( O, P ) // https://html.spec.whatwg.org/C/#crossorigingetownpropertyhelper-(-o,-p-) for (const auto& attribute : kCrossOriginAttributeTable) { if (${blink_property_name} != attribute.name) continue; int32_t v8_property_attribute = v8::DontEnum; if (!attribute.set_callback) v8_property_attribute |= v8::ReadOnly; bindings::V8SetReturnValue(${info}, v8_property_attribute); return; } for (const auto& operation : kCrossOriginOperationTable) { if (${blink_property_name} != operation.name) continue; int32_t v8_property_attribute = v8::DontEnum | v8::ReadOnly; bindings::V8SetReturnValue(${info}, v8_property_attribute); return; } """)) body.extend([ CxxLikelyIfNode( cond="${v8_property_name}->IsString()", body=string_case_body), EmptyNode(), TextNode("""\ // 7.2.3.2 CrossOriginPropertyFallback ( P ) // https://html.spec.whatwg.org/C/#crossoriginpropertyfallback-(-p-) if (bindings::IsSupportedInCrossOriginPropertyFallback( ${info}.GetIsolate(), ${v8_property_name})) { int32_t v8_property_attribute = v8::DontEnum | v8::ReadOnly; bindings::V8SetReturnValue(${info}, v8_property_attribute); return; } """), ]) return func_def def make_cross_origin_named_enumerator_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = ["const v8::PropertyCallbackInfo<v8::Array>& info"] arg_names = ["info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "CrossOriginProperty_NamedPropertyEnumerator") body = func_def.body body.append( TextNode("""\ bindings::V8SetReturnValue( ${info}, bindings::EnumerateCrossOriginProperties( ${isolate}, kCrossOriginAttributeTable, kCrossOriginOperationTable)); """)) return func_def # ---------------------------------------------------------------------------- # Callback functions of same origin interceptors # ---------------------------------------------------------------------------- def make_same_origin_indexed_getter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertyGetter") body = func_def.body bind_return_value(body, cg_context, overriding_args=["${index}"]) body.extend([ TextNode("""\ if (${index} >= ${blink_receiver}->length()) { return; } """), make_v8_set_return_value(cg_context), ]) return func_def def make_same_origin_indexed_setter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_value", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertySetter") body = func_def.body body.append( TextNode("""\ bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError( "Indexed property setter is not supported."); } """)) return func_def def make_same_origin_indexed_deleter_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Boolean>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertyDeleter") body = func_def.body body.append( TextNode("""\ // 7.4.9 [[Delete]] ( P ) // https://html.spec.whatwg.org/C/#windowproxy-delete const bool is_supported = ${index} < ${blink_receiver}->length(); bindings::V8SetReturnValue(${info}, !is_supported); if (is_supported and ${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedDeletionContext, "${interface.identifier}"); exception_state.ThrowTypeError("Index property deleter is not supported."); } """)) return func_def def make_same_origin_indexed_definer_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyDescriptor& v8_property_desc", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "v8_property_desc", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertyDefiner") body = func_def.body body.append( TextNode("""\ // 7.4.6 [[DefineOwnProperty]] ( P, Desc ) // https://html.spec.whatwg.org/C/#windowproxy-defineownproperty bindings::V8SetReturnValue(${info}, nullptr); if (${info}.ShouldThrowOnError()) { ExceptionState exception_state(${info}.GetIsolate(), ExceptionState::kIndexedSetterContext, "${interface.identifier}"); exception_state.ThrowTypeError("Index property setter is not supported."); } """)) return func_def def make_same_origin_indexed_descriptor_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = [ "uint32_t index", "const v8::PropertyCallbackInfo<v8::Value>& info", ] arg_names = ["index", "info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertyDescriptor") body = func_def.body body.append( TextNode("""\ // 7.4.5 [[GetOwnProperty]] ( P ) // https://html.spec.whatwg.org/C/#windowproxy-getownproperty SameOriginIndexedGetterCallback(${index}, ${info}); v8::Local<v8::Value> v8_value = ${info}.GetReturnValue().Get(); if (v8_value->IsUndefined()) { return; // Do not intercept. } v8::PropertyDescriptor desc(v8_value, /*writable=*/false); desc.set_enumerable(true); desc.set_configurable(true); bindings::V8SetReturnValue(${info}, desc); """)) return func_def def make_same_origin_indexed_enumerator_callback(cg_context, function_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) arg_decls = ["const v8::PropertyCallbackInfo<v8::Array>& info"] arg_names = ["info"] func_def = _make_interceptor_callback_def( cg_context, function_name, arg_decls, arg_names, None, "SameOriginProperty_IndexedPropertyEnumerator") body = func_def.body body.append( TextNode("""\ uint32_t length = ${blink_receiver}->length(); v8::Local<v8::Array> array = bindings::EnumerateIndexedProperties(${isolate}, length); bindings::V8SetReturnValue(${info}, array); """)) return func_def # ---------------------------------------------------------------------------- # Installer functions # ---------------------------------------------------------------------------- # FN = function name FN_INSTALL_INTERFACE_TEMPLATE = name_style.func("InstallInterfaceTemplate") FN_INSTALL_UNCONDITIONAL_PROPS = name_style.func( "InstallUnconditionalProperties") FN_INSTALL_CONTEXT_INDEPENDENT_PROPS = name_style.func( "InstallContextIndependentProperties") FN_INSTALL_CONTEXT_DEPENDENT_PROPS = name_style.func( "InstallContextDependentProperties") # TP = trampoline name TP_INSTALL_INTERFACE_TEMPLATE = name_style.member_var( "install_interface_template_func") TP_INSTALL_UNCONDITIONAL_PROPS = name_style.member_var( "install_unconditional_props_func") TP_INSTALL_CONTEXT_INDEPENDENT_PROPS = name_style.member_var( "install_context_independent_props_func") TP_INSTALL_CONTEXT_DEPENDENT_PROPS = name_style.member_var( "install_context_dependent_props_func") def bind_installer_local_vars(code_node, cg_context): assert isinstance(code_node, SymbolScopeNode) assert isinstance(cg_context, CodeGenContext) S = SymbolNode local_vars = [] local_vars.extend([ S("is_cross_origin_isolated", ("const bool ${is_cross_origin_isolated} = " "${execution_context}->CrossOriginIsolatedCapability();")), S("is_direct_socket_enabled", ("const bool ${is_direct_socket_enabled} = " "${execution_context}->DirectSocketCapability();")), S("is_in_secure_context", ("const bool ${is_in_secure_context} = " "${execution_context}->IsSecureContext();")), S("isolate", "v8::Isolate* ${isolate} = ${v8_context}->GetIsolate();"), S("script_state", "ScriptState* ${script_state} = ScriptState::From(${v8_context});"), S("wrapper_type_info", ("const WrapperTypeInfo* const ${wrapper_type_info} = " "${class_name}::GetWrapperTypeInfo();")), ]) if cg_context.interface: local_vars.extend([ S("interface_function_template", ("v8::Local<v8::FunctionTemplate> " "${interface_function_template} = " "${interface_template}.As<v8::FunctionTemplate>();")), S("instance_object_template", ("v8::Local<v8::ObjectTemplate> ${instance_object_template} = " "${interface_function_template}->InstanceTemplate();")), S("instance_template", ("v8::Local<v8::Template> ${instance_template} = " "${instance_object_template};")), S("prototype_object_template", ("v8::Local<v8::ObjectTemplate> ${prototype_object_template} = " "${interface_function_template}->PrototypeTemplate();")), S("prototype_template", ("v8::Local<v8::Template> ${prototype_template} = " "${prototype_object_template};")), S("signature", ("v8::Local<v8::Signature> ${signature} = " "v8::Signature::New(${isolate}, " "${interface_function_template});")), ]) elif cg_context.namespace: local_vars.extend([ S("namespace_object_template", ("v8::Local<v8::ObjectTemplate> " "${namespace_object_template} = " "${interface_template}.As<v8::ObjectTemplate>();")), S("instance_template", "v8::Local<v8::Template> ${instance_template};"), S("prototype_template", "v8::Local<v8::Template> ${prototype_template};"), S("signature", "v8::Local<v8::Signature> ${signature};"), ]) elif cg_context.callback_interface: local_vars.extend([ S("interface_function_template", ("v8::Local<v8::FunctionTemplate> " "${interface_function_template} = " "${interface_template}.As<v8::FunctionTemplate>();")), S("instance_template", "v8::Local<v8::Template> ${instance_template};"), S("prototype_template", "v8::Local<v8::Template> ${prototype_template};"), S("signature", "v8::Local<v8::Signature> ${signature};"), ]) # context_feature_settings node = S("context_feature_settings", ("const ContextFeatureSettings* ${context_feature_settings} = " "ContextFeatureSettings::From(" "${execution_context}, " "ContextFeatureSettings::CreationMode::kDontCreateIfNotExists" ");")) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/context_features/context_feature_settings.h" ])) local_vars.append(node) # execution_context node = S("execution_context", ("ExecutionContext* ${execution_context} = " "ExecutionContext::From(${script_state});")) node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/core/execution_context/execution_context.h" ])) local_vars.append(node) # parent_interface_template pattern = ( "v8::Local<v8::FunctionTemplate> ${parent_interface_template}{_1};") interface = cg_context.interface if not interface: _1 = "" elif (interface and "Global" in interface.extended_attributes and interface.indexed_and_named_properties and interface.indexed_and_named_properties.has_named_properties): # https://heycam.github.io/webidl/#named-properties-object _1 = " = ${npo_interface_template}" # npo = named properties object elif interface.inherited: _1 = (" = ${wrapper_type_info}->parent_class" "->GetV8ClassTemplate(${isolate}, ${world})" ".As<v8::FunctionTemplate>()") else: _1 = "" local_vars.append(S("parent_interface_template", _format(pattern, _1=_1))) # npo_interface_template # npo = named properties object text = """\ // Named properties object v8::Local<v8::FunctionTemplate> ${npo_interface_template} = v8::FunctionTemplate::New(${isolate}); v8::Local<v8::ObjectTemplate> ${npo_prototype_template} = ${npo_interface_template}->PrototypeTemplate(); ${npo_interface_template}->Inherit( ${wrapper_type_info}->parent_class ->GetV8ClassTemplate(${isolate}, ${world}).As<v8::FunctionTemplate>()); ${npo_prototype_template}->SetImmutableProto(); ${npo_prototype_template}->Set( v8::Symbol::GetToStringTag(${isolate}), V8AtomicString(${isolate}, "${interface.identifier}Properties"), static_cast<v8::PropertyAttribute>(v8::ReadOnly | v8::DontEnum)); // Make the named properties object look like the global object. Note that // the named properties object is _not_ a prototype object, plus, we'd like // the named properties object to behave just like the global object (= the // wrapper object of the global object) from the point of view of named // properties. // https://heycam.github.io/webidl/#named-properties-object ${npo_prototype_template}->SetInternalFieldCount( kV8DefaultWrapperInternalFieldCount); """ local_vars.append(S("npo_interface_template", text)) local_vars.append( S("npo_prototype_template", "<% npo_interface_template.request_symbol_definition() %>")) # Arguments have priority over local vars. for symbol_node in local_vars: if symbol_node.name not in code_node.own_template_vars: code_node.register_code_symbol(symbol_node) def _make_property_entry_cross_origin_check(property_, is_get=False, is_set=False): constants = { False: "unsigned(IDLMemberInstaller::FlagCrossOriginCheck::kCheck)", True: "unsigned(IDLMemberInstaller::FlagCrossOriginCheck::kDoNotCheck)", } if property_.is_static: return constants[True] if "CrossOrigin" not in property_.extended_attributes: return constants[False] values = property_.extended_attributes.values_of("CrossOrigin") if is_get: return constants[not values or "Getter" in values] elif is_set: return constants["Setter" in values] else: return constants[True] def _make_property_entry_location(property_): if hasattr(property_, "is_static") and property_.is_static: return "unsigned(IDLMemberInstaller::FlagLocation::kInterface)" if "Global" in property_.owner.extended_attributes: return "unsigned(IDLMemberInstaller::FlagLocation::kInstance)" if "LegacyUnforgeable" in property_.extended_attributes: return "unsigned(IDLMemberInstaller::FlagLocation::kInstance)" return "unsigned(IDLMemberInstaller::FlagLocation::kPrototype)" def _make_property_entry_receiver_check(property_): if ("LegacyLenientThis" in property_.extended_attributes or property_.is_static or (isinstance(property_, web_idl.Attribute) and property_.idl_type.unwrap().is_promise) or (isinstance(property_, web_idl.OverloadGroup) and property_[0].return_type.unwrap().is_promise)): return "unsigned(IDLMemberInstaller::FlagReceiverCheck::kDoNotCheck)" else: return "unsigned(IDLMemberInstaller::FlagReceiverCheck::kCheck)" def _make_property_entry_v8_cached_accessor(property_): return "unsigned(V8PrivateProperty::CachedAccessor::{})".format( property_.extended_attributes.value_of("CachedAccessor") or "kNone") def _make_property_entry_v8_property_attribute(property_): values = [] if "NotEnumerable" in property_.extended_attributes: values.append("v8::DontEnum") if "LegacyUnforgeable" in property_.extended_attributes: if not isinstance(property_, web_idl.Attribute): values.append("v8::ReadOnly") values.append("v8::DontDelete") if not values: values.append("v8::None") if len(values) == 1: return "unsigned({})".format(values[0]) else: return "unsigned({})".format(" | ".join(values)) def _make_property_entry_v8_side_effect(property_): if property_.extended_attributes.value_of("Affects") == "Nothing": return "unsigned(v8::SideEffectType::kHasNoSideEffect)" else: return "unsigned(v8::SideEffectType::kHasSideEffect)" def _make_property_entry_world(world): if world == CodeGenContext.MAIN_WORLD: return "unsigned(IDLMemberInstaller::FlagWorld::kMainWorld)" if world == CodeGenContext.NON_MAIN_WORLDS: return "unsigned(IDLMemberInstaller::FlagWorld::kNonMainWorlds)" if world == CodeGenContext.ALL_WORLDS: return "unsigned(IDLMemberInstaller::FlagWorld::kAllWorlds)" assert False def _make_attribute_registration_table(table_name, attribute_entries): assert isinstance(table_name, str) assert isinstance(attribute_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryAttribute) for entry in attribute_entries) T = TextNode entry_nodes = [] pattern = ("{{" "\"{property_name}\", " "{attribute_get_callback}, " "{attribute_set_callback}, " "{v8_property_attribute}, " "{location}, " "{world}, " "{receiver_check}, " "{cross_origin_check_for_get}, " "{cross_origin_check_for_set}, " "{v8_side_effect}, " "{v8_cached_accessor}" "}},") for entry in attribute_entries: text = _format( pattern, property_name=entry.property_.identifier, attribute_get_callback=entry.attr_get_callback_name, attribute_set_callback=(entry.attr_set_callback_name or "nullptr"), v8_property_attribute=_make_property_entry_v8_property_attribute( entry.property_), location=_make_property_entry_location(entry.property_), world=_make_property_entry_world(entry.world), receiver_check=_make_property_entry_receiver_check( entry.property_), cross_origin_check_for_get=( _make_property_entry_cross_origin_check(entry.property_, is_get=True)), cross_origin_check_for_set=( _make_property_entry_cross_origin_check(entry.property_, is_set=True)), v8_side_effect=_make_property_entry_v8_side_effect( entry.property_), v8_cached_accessor=_make_property_entry_v8_cached_accessor( entry.property_)) entry_nodes.append(T(text)) return ListNode([ T("static const IDLMemberInstaller::AttributeConfig " + table_name + "[] = {"), ListNode(entry_nodes), T("};"), ]) def _make_constant_callback_registration_table(table_name, constant_entries): assert isinstance(table_name, str) assert isinstance(constant_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryConstant) and isinstance(entry.const_callback_name, str) for entry in constant_entries) T = TextNode entry_nodes = [] pattern = ( "{{" # "\"{property_name}\", " "{constant_callback}" "}},") for entry in constant_entries: text = _format( pattern, property_name=entry.property_.identifier, constant_callback=entry.const_callback_name) entry_nodes.append(T(text)) return ListNode([ T("static const IDLMemberInstaller::ConstantCallbackConfig " + table_name + "[] = {"), ListNode(entry_nodes), T("};"), ]) def _make_constant_value_registration_table(table_name, constant_entries): assert isinstance(table_name, str) assert isinstance(constant_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryConstant) and entry.const_callback_name is None for entry in constant_entries) T = TextNode entry_nodes = [] pattern = ( "{{" # "\"{property_name}\", " "{constant_value}" "}},") for entry in constant_entries: text = _format(pattern, property_name=entry.property_.identifier, constant_value=entry.const_constant_name) entry_nodes.append(T(text)) return ListNode([ T("static const IDLMemberInstaller::ConstantValueConfig " + table_name + "[] = {"), ListNode(entry_nodes), T("};"), ]) def _make_exposed_construct_registration_table(table_name, exposed_construct_entries): assert isinstance(table_name, str) assert isinstance(exposed_construct_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryExposedConstruct) for entry in exposed_construct_entries) T = TextNode entry_nodes = [] for entry in exposed_construct_entries: pattern = ("{{" "\"{property_name}\", " "{exposed_construct_callback}" "}}, ") text = _format(pattern, property_name=entry.property_.identifier, exposed_construct_callback=entry.prop_callback_name) entry_nodes.append(T(text)) return ListNode([ T("static const IDLMemberInstaller::ExposedConstructConfig " + table_name + "[] = {"), ListNode(entry_nodes), T("};"), ]) def _make_operation_registration_table(table_name, operation_entries): assert isinstance(table_name, str) assert isinstance(operation_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryOperationGroup) for entry in operation_entries) T = TextNode F = FormatNode no_alloc_direct_call_count = len( list( filter(lambda entry: entry.no_alloc_direct_call_callbacks, operation_entries))) assert (no_alloc_direct_call_count == 0 or no_alloc_direct_call_count == len(operation_entries)) no_alloc_direct_call_enabled = bool(no_alloc_direct_call_count) entry_nodes = [] nadc_overload_nodes = ListNode() pattern = ("{{" "\"{property_name}\", " "{operation_callback}, " "{function_length}, " "{v8_property_attribute}, " "{location}, " "{world}, " "{receiver_check}, " "{cross_origin_check}, " "{v8_side_effect}" "}}, ") if no_alloc_direct_call_enabled: pattern = ("{{" + pattern + "{v8_cfunction_table}, " "base::size({v8_cfunction_table})}}, ") for entry in operation_entries: if no_alloc_direct_call_enabled: nadc_overload_table_name = name_style.constant( "no_alloc_direct_call_overloads_of_", entry.property_.identifier) nadc_overload_nodes.append( ListNode([ T("static const v8::CFunction " + nadc_overload_table_name + "[] = {"), ListNode([ F("v8::CFunctionBuilder().Fn({}).Build(),", nadc_entry.callback_name) for nadc_entry in entry.no_alloc_direct_call_callbacks ]), T("};"), ])) else: nadc_overload_table_name = None text = _format( pattern, property_name=entry.property_.identifier, operation_callback=entry.op_callback_name, function_length=entry.op_func_length, v8_property_attribute=_make_property_entry_v8_property_attribute( entry.property_), location=_make_property_entry_location(entry.property_), world=_make_property_entry_world(entry.world), receiver_check=_make_property_entry_receiver_check( entry.property_), cross_origin_check=_make_property_entry_cross_origin_check( entry.property_), v8_side_effect=_make_property_entry_v8_side_effect( entry.property_), v8_cfunction_table=nadc_overload_table_name) entry_nodes.append(T(text)) table_decl_before_name = ( "static const IDLMemberInstaller::OperationConfig") if no_alloc_direct_call_enabled: table_decl_before_name = ( "static const " "IDLMemberInstaller::NoAllocDirectCallOperationConfig") node = ListNode() if nadc_overload_nodes: node.extend([ nadc_overload_nodes, EmptyNode(), ]) node.extend([ T(table_decl_before_name + " " + table_name + "[] = {"), ListNode(entry_nodes), T("};"), ]) return node class _PropEntryBase(object): def __init__(self, is_context_dependent, exposure_conditional, world, property_): assert isinstance(is_context_dependent, bool) assert isinstance(exposure_conditional, CodeGenExpr) self.is_context_dependent = is_context_dependent self.exposure_conditional = exposure_conditional self.world = world self.property_ = property_ class _PropEntryAttribute(_PropEntryBase): def __init__(self, is_context_dependent, exposure_conditional, world, attribute, attr_get_callback_name, attr_set_callback_name): assert isinstance(attr_get_callback_name, str) assert _is_none_or_str(attr_set_callback_name) _PropEntryBase.__init__(self, is_context_dependent, exposure_conditional, world, attribute) self.attr_get_callback_name = attr_get_callback_name self.attr_set_callback_name = attr_set_callback_name class _PropEntryConstant(_PropEntryBase): def __init__(self, is_context_dependent, exposure_conditional, world, constant, const_callback_name, const_constant_name): assert _is_none_or_str(const_callback_name) assert isinstance(const_constant_name, str) _PropEntryBase.__init__(self, is_context_dependent, exposure_conditional, world, constant) self.const_callback_name = const_callback_name self.const_constant_name = const_constant_name class _PropEntryConstructorGroup(_PropEntryBase): def __init__(self, is_context_dependent, exposure_conditional, world, constructor_group, ctor_callback_name, ctor_func_length): assert isinstance(ctor_callback_name, str) assert isinstance(ctor_func_length, int) _PropEntryBase.__init__(self, is_context_dependent, exposure_conditional, world, constructor_group) self.ctor_callback_name = ctor_callback_name self.ctor_func_length = ctor_func_length class _PropEntryExposedConstruct(_PropEntryBase): def __init__(self, is_context_dependent, exposure_conditional, world, exposed_construct, prop_callback_name): assert isinstance(prop_callback_name, str) _PropEntryBase.__init__(self, is_context_dependent, exposure_conditional, world, exposed_construct) self.prop_callback_name = prop_callback_name class _PropEntryOperationGroup(_PropEntryBase): def __init__(self, is_context_dependent, exposure_conditional, world, operation_group, op_callback_name, op_func_length, no_alloc_direct_call_callbacks=None): assert isinstance(op_callback_name, str) assert isinstance(op_func_length, int) _PropEntryBase.__init__(self, is_context_dependent, exposure_conditional, world, operation_group) self.op_callback_name = op_callback_name self.op_func_length = op_func_length self.no_alloc_direct_call_callbacks = no_alloc_direct_call_callbacks def make_property_entries_and_callback_defs(cg_context, attribute_entries, constant_entries, constructor_entries, exposed_construct_entries, operation_entries): """ Creates intermediate objects to help property installation and also makes code nodes of callback functions. Args: attribute_entries: constructor_entries: exposed_construct_entries: operation_entries: Output parameters to store the intermediate objects. """ assert isinstance(cg_context, CodeGenContext) assert isinstance(attribute_entries, list) assert isinstance(constant_entries, list) assert isinstance(constructor_entries, list) assert isinstance(exposed_construct_entries, list) assert isinstance(operation_entries, list) class_like = cg_context.class_like interface = cg_context.interface global_names = class_like.extended_attributes.values_of("Global") callback_def_nodes = ListNode() def iterate(members, callback): for member in members: is_context_dependent = member.exposure.is_context_dependent( global_names) exposure_conditional = expr_from_exposure( member.exposure, global_names=global_names, may_use_feature_selector=True) if "PerWorldBindings" in member.extended_attributes: worlds = (CodeGenContext.MAIN_WORLD, CodeGenContext.NON_MAIN_WORLDS) else: worlds = (CodeGenContext.ALL_WORLDS, ) for world in worlds: callback(member, is_context_dependent, exposure_conditional, world) def process_attribute(attribute, is_context_dependent, exposure_conditional, world): if "CSSProperty" in attribute.extended_attributes: return # [CSSProperty] will be installed in a special manner. cgc_attr = cg_context.make_copy(attribute=attribute, for_world=world) cgc = cgc_attr.make_copy(attribute_get=True) attr_get_callback_name = callback_function_name(cgc) attr_get_callback_node = make_attribute_get_callback_def( cgc, attr_get_callback_name) cgc = cgc_attr.make_copy(attribute_set=True) attr_set_callback_name = callback_function_name(cgc) attr_set_callback_node = make_attribute_set_callback_def( cgc, attr_set_callback_name) if attr_set_callback_node is None: attr_set_callback_name = None callback_def_nodes.extend([ attr_get_callback_node, EmptyNode(), attr_set_callback_node, EmptyNode(), ]) attribute_entries.append( _PropEntryAttribute( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, attribute=attribute, attr_get_callback_name=attr_get_callback_name, attr_set_callback_name=attr_set_callback_name)) def process_constant(constant, is_context_dependent, exposure_conditional, world): cgc = cg_context.make_copy( constant=constant, for_world=world, v8_callback_type=CodeGenContext.V8_ACCESSOR_NAME_GETTER_CALLBACK) const_callback_name = callback_function_name(cgc) const_callback_node = make_constant_callback_def( cgc, const_callback_name) if const_callback_node is None: const_callback_name = None # IDL constant's C++ constant name const_constant_name = _format("${class_name}::Constant::{}", constant_name(cgc)) callback_def_nodes.extend([ const_callback_node, EmptyNode(), ]) constant_entries.append( _PropEntryConstant( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, constant=constant, const_callback_name=const_callback_name, const_constant_name=const_constant_name)) def process_constructor_group(constructor_group, is_context_dependent, exposure_conditional, world): cgc = cg_context.make_copy( constructor_group=constructor_group, for_world=world) ctor_callback_name = callback_function_name(cgc) ctor_callback_node = make_constructor_callback_def( cgc, ctor_callback_name) callback_def_nodes.extend([ ctor_callback_node, EmptyNode(), ]) constructor_entries.append( _PropEntryConstructorGroup( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, constructor_group=constructor_group, ctor_callback_name=ctor_callback_name, ctor_func_length=( constructor_group.min_num_of_required_arguments))) def process_exposed_construct(exposed_construct, is_context_dependent, exposure_conditional, world): if isinstance(exposed_construct, web_idl.LegacyWindowAlias): cgc = cg_context.make_copy( exposed_construct=exposed_construct.original, legacy_window_alias=exposed_construct, for_world=world, v8_callback_type=CodeGenContext. V8_ACCESSOR_NAME_GETTER_CALLBACK) elif ("LegacyNoInterfaceObject" in exposed_construct.extended_attributes): return # Skip due to [LegacyNoInterfaceObject]. else: cgc = cg_context.make_copy( exposed_construct=exposed_construct, for_world=world, v8_callback_type=CodeGenContext. V8_ACCESSOR_NAME_GETTER_CALLBACK) prop_callback_name = callback_function_name(cgc) prop_callback_node = make_exposed_construct_callback_def( cgc, prop_callback_name) callback_def_nodes.extend([ prop_callback_node, EmptyNode(), ]) exposed_construct_entries.append( _PropEntryExposedConstruct( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, exposed_construct=exposed_construct, prop_callback_name=prop_callback_name)) def process_named_constructor_group(named_constructor_group, is_context_dependent, exposure_conditional, world): cgc = cg_context.make_copy( exposed_construct=named_constructor_group, is_named_constructor=True, for_world=world, v8_callback_type=CodeGenContext.V8_ACCESSOR_NAME_GETTER_CALLBACK) prop_callback_name = callback_function_name(cgc) prop_callback_node = make_named_constructor_property_callback_def( cgc, prop_callback_name) callback_def_nodes.extend([ prop_callback_node, EmptyNode(), ]) exposed_construct_entries.append( _PropEntryExposedConstruct( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, exposed_construct=named_constructor_group, prop_callback_name=prop_callback_name)) def process_operation_group(operation_group, is_context_dependent, exposure_conditional, world): cgc = cg_context.make_copy( operation_group=operation_group, for_world=world) op_callback_name = callback_function_name(cgc) op_callback_node = make_operation_callback_def(cgc, op_callback_name) no_alloc_direct_call_callbacks = ( list_no_alloc_direct_call_callbacks( cgc.make_copy(no_alloc_direct_call=True)) if "NoAllocDirectCall" in operation_group.extended_attributes else None) callback_def_nodes.extend([ op_callback_node, EmptyNode(), ]) operation_entries.append( _PropEntryOperationGroup( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, operation_group=operation_group, op_callback_name=op_callback_name, op_func_length=operation_group.min_num_of_required_arguments, no_alloc_direct_call_callbacks=no_alloc_direct_call_callbacks)) def process_stringifier(_, is_context_dependent, exposure_conditional, world): cgc = cg_context.make_copy( stringifier=interface.stringifier, for_world=world) op_callback_name = callback_function_name(cgc) op_callback_node = make_stringifier_callback_def(cgc, op_callback_name) callback_def_nodes.extend([ op_callback_node, EmptyNode(), ]) operation_entries.append( _PropEntryOperationGroup( is_context_dependent=is_context_dependent, exposure_conditional=exposure_conditional, world=world, operation_group=cgc.property_, op_callback_name=op_callback_name, op_func_length=0)) iterate(class_like.attributes, process_attribute) iterate(class_like.constants, process_constant) if interface: iterate(interface.constructor_groups, process_constructor_group) iterate(interface.exposed_constructs, process_exposed_construct) iterate(interface.legacy_window_aliases, process_exposed_construct) named_constructor_groups = [ group for construct in interface.exposed_constructs for group in construct.named_constructor_groups if construct.named_constructor_groups ] iterate(named_constructor_groups, process_named_constructor_group) if not class_like.is_callback_interface: iterate(class_like.operation_groups, process_operation_group) if interface and interface.stringifier: iterate([interface.stringifier.operation], process_stringifier) collectionlike = interface and (interface.iterable or interface.maplike or interface.setlike) if collectionlike: def should_define(target): if not target[0].is_optionally_defined: return True return all(target.identifier != member.identifier for member in itertools.chain( interface.attributes, interface.constants, interface.operation_groups)) iterate(collectionlike.attributes, process_attribute) iterate( filter(should_define, collectionlike.operation_groups), process_operation_group) return callback_def_nodes def _make_install_prototype_object(cg_context): assert isinstance(cg_context, CodeGenContext) nodes = [] class_like = cg_context.class_like interface = cg_context.interface unscopables = [] is_unscopable = lambda member: "Unscopable" in member.extended_attributes unscopables.extend(filter(is_unscopable, class_like.attributes)) unscopables.extend(filter(is_unscopable, class_like.operations)) if unscopables: nodes.extend([ TextNode("""\ // [Unscopable] // 3.7.3. Interface prototype object // https://heycam.github.io/webidl/#interface-prototype-object // step 10. If interface has any member declared with the [Unscopable] // extended attribute, then:\ """), ListNode([ TextNode("static constexpr const char* " "kUnscopablePropertyNames[] = {"), ListNode([ TextNode("\"{}\", ".format(member.identifier)) for member in unscopables ]), TextNode("};"), ]), TextNode("""\ bindings::InstallUnscopablePropertyNames( ${isolate}, ${v8_context}, ${prototype_object}, kUnscopablePropertyNames); """), ]) if "LegacyNoInterfaceObject" in class_like.extended_attributes: nodes.append( TextNode("""\ // [LegacyNoInterfaceObject] // 3.7.3. Interface prototype object // https://heycam.github.io/webidl/#interface-prototype-object // step 13. If the [LegacyNoInterfaceObject] extended attribute was not // specified on interface, then: // // V8 defines "constructor" property on the prototype object by default. ${prototype_object}->Delete( ${v8_context}, V8AtomicString(${isolate}, "constructor")).ToChecked(); """)) collectionlike = interface and (interface.iterable or interface.maplike or interface.setlike) if collectionlike: property_name = None for operation_group in collectionlike.operation_groups: if operation_group[0].is_iterator: property_name = operation_group.identifier break if property_name: pattern = """\ // @@iterator == "{property_name}" {{ v8::Local<v8::Value> v8_value = ${prototype_object}->Get( ${v8_context}, V8AtomicString(${isolate}, "{property_name}")) .ToLocalChecked(); ${prototype_object}->DefineOwnProperty( ${v8_context}, v8::Symbol::GetIterator(${isolate}), v8_value, v8::DontEnum).ToChecked(); }} """ nodes.append( TextNode(_format(pattern, property_name=property_name))) if class_like.identifier == "FileSystemDirectoryHandle": pattern = """\ // Temporary @@asyncIterator support for FileSystemDirectoryHandle // TODO(https://crbug.com/1087157): Replace with proper bindings support. // @@asyncIterator == "{property_name}" {{ v8::Local<v8::Value> v8_value = ${prototype_object}->Get( ${v8_context}, V8AtomicString(${isolate}, "{property_name}")) .ToLocalChecked(); ${prototype_object}->DefineOwnProperty( ${v8_context}, v8::Symbol::GetAsyncIterator(${isolate}), v8_value, v8::DontEnum).ToChecked(); }} """ nodes.append(TextNode(_format(pattern, property_name="entries"))) if ("Global" in class_like.extended_attributes and class_like.indexed_and_named_properties and class_like.indexed_and_named_properties.has_named_properties): nodes.append( TextNode("""\ // https://heycam.github.io/webidl/#named-properties-object // V8 defines "constructor" property on the prototype object by default. // Named properties object is currently implemented as a prototype object // (implemented with v8::FunctionTemplate::PrototypeTemplate()). ${prototype_object}->GetPrototype().As<v8::Object>()->Delete( ${v8_context}, V8AtomicString(${isolate}, "constructor")).ToChecked(); """)) return SequenceNode(nodes) if nodes else None def make_install_interface_template(cg_context, function_name, class_name, trampoline_var_name, constructor_entries, supplemental_install_node, install_unconditional_func_name, install_context_independent_func_name): """ Returns: A triplet of CodeNode of: - function declaration - function definition - trampoline function definition (from the API class to the implementation class), which is supposed to be defined inline """ assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert _is_none_or_str(class_name) assert _is_none_or_str(trampoline_var_name) assert isinstance(constructor_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryConstructorGroup) for entry in constructor_entries) assert isinstance(supplemental_install_node, SequenceNode) assert _is_none_or_str(install_unconditional_func_name) assert _is_none_or_str(install_context_independent_func_name) T = TextNode class_like = cg_context.class_like interface = cg_context.interface arg_decls = [ "v8::Isolate* isolate", "const DOMWrapperWorld& world", "v8::Local<v8::Template> interface_template", ] return_type = "void" if trampoline_var_name is None: trampoline_def = None else: trampoline_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type=return_type, static=True) trampoline_def.body.append( TextNode( _format("return {}(isolate, world, interface_template);", trampoline_var_name))) func_decl = CxxFuncDeclNode( name=function_name, arg_decls=arg_decls, return_type=return_type, static=True) func_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type=return_type, class_name=class_name) func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body body.add_template_vars({ "isolate": "isolate", "world": "world", "interface_template": "interface_template", }) bind_installer_local_vars(body, cg_context) if cg_context.interface: body.extend([ T("bindings::SetupIDLInterfaceTemplate(" "${isolate}, ${wrapper_type_info}, " "${instance_object_template}, " "${prototype_object_template}, " "${interface_function_template}, " "${parent_interface_template});"), EmptyNode(), ]) elif cg_context.namespace: body.extend([ T("bindings::SetupIDLNamespaceTemplate(" "${isolate}, ${wrapper_type_info}, " "${namespace_object_template});"), EmptyNode(), ]) elif cg_context.callback_interface: body.extend([ T("bindings::SetupIDLCallbackInterfaceTemplate(" "${isolate}, ${wrapper_type_info}, " "${interface_function_template});"), EmptyNode(), ]) else: assert False for entry in constructor_entries: set_callback = _format( "${interface_function_template}->SetCallHandler({});", entry.ctor_callback_name) set_length = _format("${interface_function_template}->SetLength({});", entry.ctor_func_length) if entry.world == CodeGenContext.MAIN_WORLD: body.append( CxxLikelyIfNode( cond="${world}.IsMainWorld()", body=[T(set_callback), T(set_length)])) elif entry.world == CodeGenContext.NON_MAIN_WORLDS: body.append( CxxLikelyIfNode( cond="!${world}.IsMainWorld()", body=[T(set_callback), T(set_length)])) elif entry.world == CodeGenContext.ALL_WORLDS: body.extend([T(set_callback), T(set_length)]) else: assert False body.append(EmptyNode()) body.extend([ supplemental_install_node, EmptyNode(), ]) if class_like.identifier == "CSSStyleDeclaration": css_properties = list( filter(lambda attr: "CSSProperty" in attr.extended_attributes, class_like.attributes)) if css_properties: prop_name_list = "".join( map(lambda attr: "\"{}\", ".format(attr.identifier), css_properties)) body.append( T("""\ // CSSStyleDeclaration-specific settings // [CSSProperty] { static constexpr const char* kCssProperties[] = { """ + prop_name_list + """ }; bindings::InstallCSSPropertyAttributes( ${isolate}, ${world}, ${instance_template}, ${prototype_template}, ${interface_template}, ${signature}, kCssProperties); } """)) if class_like.identifier == "DOMException": body.append( T("""\ // DOMException-specific settings // https://heycam.github.io/webidl/#es-DOMException-specialness { v8::Local<v8::FunctionTemplate> intrinsic_error_prototype_interface_template = v8::FunctionTemplate::New(${isolate}, nullptr, v8::Local<v8::Value>(), v8::Local<v8::Signature>(), 0, v8::ConstructorBehavior::kThrow); intrinsic_error_prototype_interface_template->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "prototype"), v8::kErrorPrototype); ${interface_function_template}->Inherit( intrinsic_error_prototype_interface_template); } """)) if class_like.identifier == "FileSystemDirectoryIterator": body.append( T("""\ // Temporary @@asyncIterator support for FileSystemDirectoryHandle // TODO(https://crbug.com/1087157): Replace with proper bindings support. { v8::Local<v8::FunctionTemplate> intrinsic_iterator_prototype_interface_template = v8::FunctionTemplate::New(${isolate}, nullptr, v8::Local<v8::Value>(), v8::Local<v8::Signature>(), 0, v8::ConstructorBehavior::kThrow); intrinsic_iterator_prototype_interface_template->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "prototype"), v8::kAsyncIteratorPrototype); ${interface_function_template}->Inherit( intrinsic_iterator_prototype_interface_template); } """)) if class_like.identifier == "HTMLAllCollection": body.append( T("""\ // HTMLAllCollection-specific settings // https://html.spec.whatwg.org/C/#the-htmlallcollection-interface ${instance_object_template}->SetCallAsFunctionHandler( ${class_name}::LegacyCallCustom); ${instance_object_template}->MarkAsUndetectable(); """)) if class_like.identifier == "Iterator": body.append( T("""\ // Iterator-specific settings // https://heycam.github.io/webidl/#es-iterator-prototype-object { v8::Local<v8::FunctionTemplate> intrinsic_iterator_prototype_interface_template = v8::FunctionTemplate::New(${isolate}, nullptr, v8::Local<v8::Value>(), v8::Local<v8::Signature>(), 0, v8::ConstructorBehavior::kThrow); intrinsic_iterator_prototype_interface_template->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "prototype"), v8::kIteratorPrototype); ${interface_function_template}->Inherit( intrinsic_iterator_prototype_interface_template); } """)) if class_like.identifier == "Location": body.append( T("""\ // Location-specific settings // https://html.spec.whatwg.org/C/#the-location-interface // To create a Location object, run these steps: // step 3. Let valueOf be location's relevant // Realm.[[Intrinsics]].[[%ObjProto_valueOf%]]. // step 3. Perform ! location.[[DefineOwnProperty]]("valueOf", // { [[Value]]: valueOf, [[Writable]]: false, [[Enumerable]]: false, // [[Configurable]]: false }). ${instance_template}->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "valueOf"), v8::kObjProto_valueOf, static_cast<v8::PropertyAttribute>( v8::ReadOnly | v8::DontEnum | v8::DontDelete)); // step 4. Perform ! location.[[DefineOwnProperty]](@@toPrimitive, // { [[Value]]: undefined, [[Writable]]: false, [[Enumerable]]: false, // [[Configurable]]: false }). ${instance_template}->Set( v8::Symbol::GetToPrimitive(${isolate}), v8::Undefined(${isolate}), static_cast<v8::PropertyAttribute>( v8::ReadOnly | v8::DontEnum | v8::DontDelete)); // 7.7.4.2 [[SetPrototypeOf]] ( V ) // https://html.spec.whatwg.org/C/#location-setprototypeof ${instance_object_template}->SetImmutableProto(); ${prototype_object_template}->SetImmutableProto(); """)) if (interface and interface.indexed_and_named_properties and interface.indexed_and_named_properties.indexed_getter and "Global" not in interface.extended_attributes): body.append( T("""\ // @@iterator for indexed properties // https://heycam.github.io/webidl/#define-the-iteration-methods ${prototype_template}->SetIntrinsicDataProperty( v8::Symbol::GetIterator(${isolate}), v8::kArrayProto_values, v8::DontEnum); """)) if interface and interface.iterable and not interface.iterable.key_type: body.append( T("""\ // Value iterator's properties // https://heycam.github.io/webidl/#define-the-iteration-methods ${prototype_template}->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "entries"), v8::kArrayProto_entries, v8::None); ${prototype_template}->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "keys"), v8::kArrayProto_keys, v8::None); ${prototype_template}->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "values"), v8::kArrayProto_values, v8::None); ${prototype_template}->SetIntrinsicDataProperty( V8AtomicString(${isolate}, "forEach"), v8::kArrayProto_forEach, v8::None); """)) if interface and "IsCodeLike" in interface.extended_attributes: body.append( CxxUnlikelyIfNode( cond="RuntimeEnabledFeatures::TrustedTypesUseCodeLikeEnabled()", body=[ TextNode("// [IsCodeLike]"), TextNode("${instance_object_template}->SetCodeLike();"), ])) if "Global" in class_like.extended_attributes: body.append( TextNode("""\ // [Global] // 3.7.1. [[SetPrototypeOf]] // https://heycam.github.io/webidl/#platform-object-setprototypeof ${instance_object_template}->SetImmutableProto(); ${prototype_object_template}->SetImmutableProto(); """)) elif interface and any("Global" in derived.extended_attributes for derived in interface.deriveds): body.append( TextNode("""\ // [Global] - prototype object in the prototype chain of global objects // 3.7.1. [[SetPrototypeOf]] // https://heycam.github.io/webidl/#platform-object-setprototypeof ${prototype_object_template}->SetImmutableProto(); """)) func_call_pattern = ("{}(${isolate}, ${world}, ${instance_template}, " "${prototype_template}, ${interface_template});") if install_unconditional_func_name: func_call = _format(func_call_pattern, install_unconditional_func_name) body.append(T(func_call)) if install_context_independent_func_name: func_call = _format(func_call_pattern, install_context_independent_func_name) body.append(T(func_call)) return func_decl, func_def, trampoline_def class PropInstallMode(object): class Mode(int): pass UNCONDITIONAL = Mode(0) CONTEXT_INDEPENDENT = Mode(1) CONTEXT_DEPENDENT = Mode(2) V8_CONTEXT_SNAPSHOT = Mode(3) def make_install_properties(cg_context, function_name, class_name, prop_install_mode, trampoline_var_name, attribute_entries, constant_entries, exposed_construct_entries, operation_entries): """ Returns: A triplet of CodeNode of: - function declaration - function definition - trampoline function definition (from the API class to the implementation class), which is supposed to be defined inline """ assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert _is_none_or_str(class_name) assert isinstance(prop_install_mode, PropInstallMode.Mode) assert _is_none_or_str(trampoline_var_name) assert isinstance(attribute_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryAttribute) for entry in attribute_entries) assert isinstance(constant_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryConstant) for entry in constant_entries) assert isinstance(exposed_construct_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryExposedConstruct) for entry in exposed_construct_entries) assert isinstance(operation_entries, (list, tuple)) assert all( isinstance(entry, _PropEntryOperationGroup) for entry in operation_entries) if prop_install_mode == PropInstallMode.CONTEXT_DEPENDENT: install_prototype_object_node = _make_install_prototype_object( cg_context) else: install_prototype_object_node = None if not (attribute_entries or constant_entries or exposed_construct_entries or operation_entries or install_prototype_object_node): if prop_install_mode != PropInstallMode.V8_CONTEXT_SNAPSHOT: return None, None, None if prop_install_mode in (PropInstallMode.UNCONDITIONAL, PropInstallMode.CONTEXT_INDEPENDENT): arg_decls = [ "v8::Isolate* isolate", "const DOMWrapperWorld& world", "v8::Local<v8::Template> instance_template", "v8::Local<v8::Template> prototype_template", "v8::Local<v8::Template> interface_template", ] arg_names = [ "isolate", "world", "instance_template", "prototype_template", "interface_template", ] elif prop_install_mode == PropInstallMode.CONTEXT_DEPENDENT: arg_decls = [ "v8::Local<v8::Context> context", "const DOMWrapperWorld& world", "v8::Local<v8::Object> instance_object", "v8::Local<v8::Object> prototype_object", "v8::Local<v8::Object> interface_object", "v8::Local<v8::Template> interface_template", "FeatureSelector feature_selector", ] arg_names = [ "context", "world", "instance_object", "prototype_object", "interface_object", "interface_template", "feature_selector", ] elif prop_install_mode == PropInstallMode.V8_CONTEXT_SNAPSHOT: arg_decls = [ "v8::Local<v8::Context> context", "const DOMWrapperWorld& world", "v8::Local<v8::Object> instance_object", "v8::Local<v8::Object> prototype_object", "v8::Local<v8::Object> interface_object", "v8::Local<v8::Template> interface_template", ] arg_names = [ "context", "world", "instance_object", "prototype_object", "interface_object", "interface_template", ] return_type = "void" is_per_context_install = ( prop_install_mode in (PropInstallMode.CONTEXT_DEPENDENT, PropInstallMode.V8_CONTEXT_SNAPSHOT)) if trampoline_var_name is None: trampoline_def = None else: trampoline_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type=return_type, static=True) text = _format( "return {func}({args});", func=trampoline_var_name, args=", ".join(arg_names)) trampoline_def.body.append(TextNode(text)) func_decl = CxxFuncDeclNode(name=function_name, arg_decls=arg_decls, return_type=return_type, static=bool(class_name)) func_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type=return_type, class_name=class_name) func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body for arg_name in arg_names: if arg_name == "context": # 'context' is reserved by Mako. body.add_template_var("v8_context", "context") else: body.add_template_var(arg_name, arg_name) bind_installer_local_vars(body, cg_context) body.extend([ TextNode("using bindings::IDLMemberInstaller;"), EmptyNode(), ]) if (is_per_context_install and "Global" in cg_context.class_like.extended_attributes): body.extend([ CxxLikelyIfNode(cond="${instance_object}.IsEmpty()", body=[ TextNode("""\ ${instance_object} = ${v8_context}->Global()->GetPrototype().As<v8::Object>();\ """), ]), EmptyNode(), ]) if install_prototype_object_node: body.extend([ CxxLikelyIfNode(cond="${feature_selector}.IsAll()", body=[install_prototype_object_node]), EmptyNode(), ]) def group_by_condition(entries): unconditional_entries = [] conditional_to_entries = {} for entry in entries: if entry.exposure_conditional.is_always_true: unconditional_entries.append(entry) else: conditional_to_entries.setdefault(entry.exposure_conditional, []).append(entry) return unconditional_entries, conditional_to_entries def install_properties(table_name, target_entries, make_table_func, installer_call_text): unconditional_entries, conditional_to_entries = group_by_condition( target_entries) if unconditional_entries: body.append( CxxBlockNode([ make_table_func(table_name, unconditional_entries), TextNode(installer_call_text), ])) body.append(EmptyNode()) for conditional, entries in conditional_to_entries.items(): body.append( CxxUnlikelyIfNode( cond=conditional, body=[ make_table_func(table_name, entries), TextNode(installer_call_text), ])) body.append(EmptyNode()) if is_per_context_install: pattern = ("{install_func}(" "${isolate}, ${world}, " "${instance_object}, " "${prototype_object}, " "${interface_object}, " "${signature}, {table_name});") else: pattern = ("{install_func}(" "${isolate}, ${world}, " "${instance_template}, " "${prototype_template}, " "${interface_template}, " "${signature}, {table_name});") table_name = "kAttributeTable" installer_call_text = _format( pattern, install_func="IDLMemberInstaller::InstallAttributes", table_name=table_name) install_properties(table_name, attribute_entries, _make_attribute_registration_table, installer_call_text) table_name = "kConstantCallbackTable" installer_call_text = _format( pattern, install_func="IDLMemberInstaller::InstallConstants", table_name=table_name) constant_callback_entries = list( filter(lambda entry: entry.const_callback_name, constant_entries)) install_properties(table_name, constant_callback_entries, _make_constant_callback_registration_table, installer_call_text) table_name = "kConstantValueTable" installer_call_text = _format( pattern, install_func="IDLMemberInstaller::InstallConstants", table_name=table_name) constant_value_entries = list( filter(lambda entry: not entry.const_callback_name, constant_entries)) install_properties(table_name, constant_value_entries, _make_constant_value_registration_table, installer_call_text) table_name = "kExposedConstructTable" installer_call_text = _format( pattern, install_func="IDLMemberInstaller::InstallExposedConstructs", table_name=table_name) install_properties(table_name, exposed_construct_entries, _make_exposed_construct_registration_table, installer_call_text) table_name = "kOperationTable" installer_call_text = _format( pattern, install_func="IDLMemberInstaller::InstallOperations", table_name=table_name) entries = list( filter(lambda entry: not entry.no_alloc_direct_call_callbacks, operation_entries)) install_properties(table_name, entries, _make_operation_registration_table, installer_call_text) entries = list( filter(lambda entry: entry.no_alloc_direct_call_callbacks, operation_entries)) install_properties(table_name, entries, _make_operation_registration_table, installer_call_text) return func_decl, func_def, trampoline_def def make_indexed_and_named_property_callbacks_and_install_node(cg_context): """ Implements non-ordinary internal methods of legacy platform objects. https://heycam.github.io/webidl/#es-legacy-platform-objects Also implements the same origin case of indexed access to WindowProxy objects just same as indexed properties of legacy platform objects. https://html.spec.whatwg.org/C/#the-windowproxy-exotic-object """ assert isinstance(cg_context, CodeGenContext) F = FormatNode func_decls = ListNode() func_defs = ListNode() install_node = SequenceNode() interface = cg_context.interface if not (interface and interface.indexed_and_named_properties and "Global" not in interface.extended_attributes): return func_decls, func_defs, install_node props = interface.indexed_and_named_properties def add_callback(func_decl, func_def): func_decls.append(func_decl) if func_def: func_defs.append(func_def) func_defs.append(EmptyNode()) def most_derived_interface(*interfaces): key = lambda interface: len(interface.inclusive_inherited_interfaces) return sorted(filter(None, interfaces), key=key)[-1] cg_context = cg_context.make_copy( v8_callback_type=CodeGenContext.V8_OTHER_CALLBACK) if props.own_named_getter: add_callback(*make_named_property_getter_callback( cg_context.make_copy(named_property_getter=props.named_getter), "NamedPropertyGetterCallback")) add_callback(*make_named_property_setter_callback( cg_context.make_copy(named_property_setter=props.named_setter), "NamedPropertySetterCallback")) add_callback(*make_named_property_deleter_callback( cg_context.make_copy(named_property_deleter=props.named_deleter), "NamedPropertyDeleterCallback")) add_callback(*make_named_property_definer_callback( cg_context, "NamedPropertyDefinerCallback")) add_callback(*make_named_property_descriptor_callback( cg_context, "NamedPropertyDescriptorCallback")) add_callback(*make_named_property_query_callback( cg_context, "NamedPropertyQueryCallback")) add_callback(*make_named_property_enumerator_callback( cg_context, "NamedPropertyEnumeratorCallback")) if props.named_getter: impl_bridge = v8_bridge_class_name( most_derived_interface( props.named_getter.owner, props.named_setter and props.named_setter.owner, props.named_deleter and props.named_deleter.owner)) flags = ["v8::PropertyHandlerFlags::kOnlyInterceptStrings"] if "LegacyOverrideBuiltIns" not in interface.extended_attributes: flags.append("v8::PropertyHandlerFlags::kNonMasking") if (props.named_getter.extended_attributes.value_of("Affects") == "Nothing"): flags.append("v8::PropertyHandlerFlags::kHasNoSideEffect") property_handler_flags = ( "static_cast<v8::PropertyHandlerFlags>({})".format(" | ".join( map(lambda flag: "int32_t({})".format(flag), flags)))) pattern = """\ // Named interceptors ${instance_object_template}->SetHandler( v8::NamedPropertyHandlerConfiguration( {impl_bridge}::NamedPropertyGetterCallback, {impl_bridge}::NamedPropertySetterCallback, % if "NotEnumerable" not in \ interface.indexed_and_named_properties.named_getter.extended_attributes: {impl_bridge}::NamedPropertyQueryCallback, % else: nullptr, // query % endif {impl_bridge}::NamedPropertyDeleterCallback, % if "NotEnumerable" not in \ interface.indexed_and_named_properties.named_getter.extended_attributes: {impl_bridge}::NamedPropertyEnumeratorCallback, % else: nullptr, // enumerator % endif {impl_bridge}::NamedPropertyDefinerCallback, {impl_bridge}::NamedPropertyDescriptorCallback, v8::Local<v8::Value>(), {property_handler_flags}));""" install_node.append( F(pattern, impl_bridge=impl_bridge, property_handler_flags=property_handler_flags)) if props.own_indexed_getter or props.own_named_getter: add_callback(*make_indexed_property_getter_callback( cg_context.make_copy(indexed_property_getter=props.indexed_getter), "IndexedPropertyGetterCallback")) add_callback(*make_indexed_property_setter_callback( cg_context.make_copy(indexed_property_setter=props.indexed_setter), "IndexedPropertySetterCallback")) add_callback(*make_indexed_property_deleter_callback( cg_context, "IndexedPropertyDeleterCallback")) add_callback(*make_indexed_property_definer_callback( cg_context, "IndexedPropertyDefinerCallback")) add_callback(*make_indexed_property_descriptor_callback( cg_context, "IndexedPropertyDescriptorCallback")) add_callback(*make_indexed_property_enumerator_callback( cg_context, "IndexedPropertyEnumeratorCallback")) if props.indexed_getter or props.named_getter: impl_bridge = v8_bridge_class_name( most_derived_interface( props.indexed_getter and props.indexed_getter.owner, props.indexed_setter and props.indexed_setter.owner, props.named_getter and props.named_getter.owner, props.named_setter and props.named_setter.owner, props.named_deleter and props.named_deleter.owner)) flags = [] if (props.indexed_getter and props.indexed_getter.extended_attributes. value_of("Affects") == "Nothing"): flags.append("v8::PropertyHandlerFlags::kHasNoSideEffect") else: flags.append("v8::PropertyHandlerFlags::kNone") property_handler_flags = flags[0] pattern = """\ // Indexed interceptors ${instance_object_template}->SetHandler( v8::IndexedPropertyHandlerConfiguration( {impl_bridge}::IndexedPropertyGetterCallback, {impl_bridge}::IndexedPropertySetterCallback, nullptr, // query {impl_bridge}::IndexedPropertyDeleterCallback, % if interface.indexed_and_named_properties.indexed_getter: {impl_bridge}::IndexedPropertyEnumeratorCallback, % else: nullptr, // enumerator % endif {impl_bridge}::IndexedPropertyDefinerCallback, {impl_bridge}::IndexedPropertyDescriptorCallback, v8::Local<v8::Value>(), {property_handler_flags}));""" install_node.append( F(pattern, impl_bridge=impl_bridge, property_handler_flags=property_handler_flags)) func_defs.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/bindings/core/v8/v8_set_return_value_for_core.h" ])) return func_decls, func_defs, install_node def make_named_properties_object_callbacks_and_install_node(cg_context): """ Implements non-ordinary internal methods of named properties objects. https://heycam.github.io/webidl/#named-properties-object """ assert isinstance(cg_context, CodeGenContext) callback_defs = [] install_node = SequenceNode() interface = cg_context.interface if not (interface and interface.indexed_and_named_properties and interface.indexed_and_named_properties.named_getter and "Global" in interface.extended_attributes): return callback_defs, install_node cg_context = cg_context.make_copy( v8_callback_type=CodeGenContext.V8_OTHER_CALLBACK) func_defs = [ make_named_props_obj_named_getter_callback( cg_context, "NamedPropsObjNamedGetterCallback"), make_named_props_obj_named_setter_callback( cg_context, "NamedPropsObjNamedSetterCallback"), make_named_props_obj_named_deleter_callback( cg_context, "NamedPropsObjNamedDeleterCallback"), make_named_props_obj_named_definer_callback( cg_context, "NamedPropsObjNamedDefinerCallback"), make_named_props_obj_named_descriptor_callback( cg_context, "NamedPropsObjNamedDescriptorCallback"), make_named_props_obj_indexed_getter_callback( cg_context, "NamedPropsObjIndexedGetterCallback"), make_named_props_obj_indexed_setter_callback( cg_context, "NamedPropsObjIndexedSetterCallback"), make_named_props_obj_indexed_deleter_callback( cg_context, "NamedPropsObjIndexedDeleterCallback"), make_named_props_obj_indexed_definer_callback( cg_context, "NamedPropsObjIndexedDefinerCallback"), make_named_props_obj_indexed_descriptor_callback( cg_context, "NamedPropsObjIndexedDescriptorCallback"), ] for func_def in func_defs: callback_defs.append(func_def) callback_defs.append(EmptyNode()) text = """\ // Named interceptors ${npo_prototype_template}->SetHandler( v8::NamedPropertyHandlerConfiguration( NamedPropsObjNamedGetterCallback, NamedPropsObjNamedSetterCallback, nullptr, // query NamedPropsObjNamedDeleterCallback, nullptr, // enumerator NamedPropsObjNamedDefinerCallback, NamedPropsObjNamedDescriptorCallback, v8::Local<v8::Value>(), static_cast<v8::PropertyHandlerFlags>( int32_t(v8::PropertyHandlerFlags::kNonMasking) | int32_t(v8::PropertyHandlerFlags::kOnlyInterceptStrings)))); // Indexed interceptors ${npo_prototype_template}->SetHandler( v8::IndexedPropertyHandlerConfiguration( NamedPropsObjIndexedGetterCallback, NamedPropsObjIndexedSetterCallback, nullptr, // query NamedPropsObjIndexedDeleterCallback, nullptr, // enumerator NamedPropsObjIndexedDefinerCallback, NamedPropsObjIndexedDescriptorCallback, v8::Local<v8::Value>(), v8::PropertyHandlerFlags::kNone));""" install_node.append(TextNode(text)) return callback_defs, install_node def make_cross_origin_property_callbacks_and_install_node( cg_context, attribute_entries, operation_entries): """ Implements non-ordinary internal methods of WindowProxy and Location objects. https://html.spec.whatwg.org/C/#the-windowproxy-exotic-object https://html.spec.whatwg.org/C/#the-location-interface """ assert isinstance(cg_context, CodeGenContext) callback_defs = [] install_node = SequenceNode() CROSS_ORIGIN_INTERFACES = ("Window", "Location") if cg_context.class_like.identifier not in CROSS_ORIGIN_INTERFACES: return callback_defs, install_node props = cg_context.interface.indexed_and_named_properties entry_nodes = [] for entry in attribute_entries: attribute = entry.property_ if "CrossOrigin" not in attribute.extended_attributes: continue assert entry.world == CodeGenContext.ALL_WORLDS values = attribute.extended_attributes.values_of("CrossOrigin") get_func = "nullptr" set_func = "nullptr" get_value = "nullptr" set_value = "nullptr" if not values or "Getter" in values: get_func = entry.attr_get_callback_name cgc = cg_context.make_copy( attribute=attribute, attribute_get=True, v8_callback_type=( CodeGenContext.V8_ACCESSOR_NAME_GETTER_CALLBACK)) get_value = callback_function_name(cgc, for_cross_origin=True) func_def = make_attribute_get_callback_def(cgc, get_value) callback_defs.extend([func_def, EmptyNode()]) if values and "Setter" in values: set_func = entry.attr_set_callback_name cgc = cg_context.make_copy( attribute=attribute, attribute_set=True, v8_callback_type=( CodeGenContext.V8_GENERIC_NAMED_PROPERTY_SETTER_CALLBACK)) set_value = callback_function_name(cgc, for_cross_origin=True) func_def = make_attribute_set_callback_def(cgc, set_value) callback_defs.extend([func_def, EmptyNode()]) pattern = ("{{\"{property_name}\", " "{get_func}, {set_func}, {get_value}, {set_value}}},") entry_nodes.append( TextNode( _format( pattern, property_name=attribute.identifier, get_func=get_func, set_func=set_func, get_value=get_value, set_value=set_value))) callback_defs.append( ListNode([ TextNode("constexpr bindings::CrossOriginAttributeTableEntry " "kCrossOriginAttributeTable[] = {"), ListNode(entry_nodes), TextNode("};"), EmptyNode(), ])) entry_nodes = [] for entry in operation_entries: operation_group = entry.property_ if "CrossOrigin" not in operation_group.extended_attributes: continue assert entry.world == CodeGenContext.ALL_WORLDS entry_nodes.append( TextNode( _format( "{{\"{property_name}\", {op_callback}, {op_func_length}}},", property_name=operation_group.identifier, op_callback=entry.op_callback_name, op_func_length=entry.op_func_length))) callback_defs.append( ListNode([ TextNode("constexpr bindings::CrossOriginOperationTableEntry " "kCrossOriginOperationTable[] = {"), ListNode(entry_nodes), TextNode("};"), EmptyNode(), ])) cg_context = cg_context.make_copy( v8_callback_type=CodeGenContext.V8_OTHER_CALLBACK) func_defs = [ make_cross_origin_access_check_callback( cg_context, "CrossOriginAccessCheckCallback"), make_cross_origin_named_getter_callback( cg_context, "CrossOriginNamedGetterCallback"), make_cross_origin_named_setter_callback( cg_context, "CrossOriginNamedSetterCallback"), make_cross_origin_named_deleter_callback( cg_context, "CrossOriginNamedDeleterCallback"), make_cross_origin_named_definer_callback( cg_context, "CrossOriginNamedDefinerCallback"), make_cross_origin_named_descriptor_callback( cg_context, "CrossOriginNamedDescriptorCallback"), make_cross_origin_named_query_callback( cg_context, "CrossOriginNamedQueryCallback"), make_cross_origin_named_enumerator_callback( cg_context, "CrossOriginNamedEnumeratorCallback"), make_cross_origin_indexed_getter_callback( cg_context.make_copy( indexed_property_getter=(props and props.indexed_getter)), "CrossOriginIndexedGetterCallback"), make_cross_origin_indexed_setter_callback( cg_context, "CrossOriginIndexedSetterCallback"), make_cross_origin_indexed_deleter_callback( cg_context, "CrossOriginIndexedDeleterCallback"), make_cross_origin_indexed_definer_callback( cg_context, "CrossOriginIndexedDefinerCallback"), make_cross_origin_indexed_descriptor_callback( cg_context, "CrossOriginIndexedDescriptorCallback"), make_cross_origin_indexed_enumerator_callback( cg_context, "CrossOriginIndexedEnumeratorCallback"), ] for func_def in func_defs: callback_defs.append(func_def) callback_defs.append(EmptyNode()) text = """\ // Cross origin properties ${instance_object_template}->SetAccessCheckCallbackAndHandler( CrossOriginAccessCheckCallback, v8::NamedPropertyHandlerConfiguration( CrossOriginNamedGetterCallback, CrossOriginNamedSetterCallback, CrossOriginNamedQueryCallback, CrossOriginNamedDeleterCallback, CrossOriginNamedEnumeratorCallback, CrossOriginNamedDefinerCallback, CrossOriginNamedDescriptorCallback, v8::Local<v8::Value>(), v8::PropertyHandlerFlags::kNone), v8::IndexedPropertyHandlerConfiguration( CrossOriginIndexedGetterCallback, CrossOriginIndexedSetterCallback, nullptr, // query CrossOriginIndexedDeleterCallback, CrossOriginIndexedEnumeratorCallback, CrossOriginIndexedDefinerCallback, CrossOriginIndexedDescriptorCallback, v8::Local<v8::Value>(), v8::PropertyHandlerFlags::kNone), v8::External::New( ${isolate}, const_cast<WrapperTypeInfo*>(${class_name}::GetWrapperTypeInfo()))); """ install_node.append(TextNode(text)) install_node.accumulate( CodeGenAccumulator.require_include_headers([ "third_party/blink/renderer/bindings/core/v8/binding_security.h", "third_party/blink/renderer/platform/bindings/v8_cross_origin_property_support.h", ])) if cg_context.interface.identifier != "Window": return callback_defs, install_node func_defs = [ make_same_origin_indexed_getter_callback( cg_context.make_copy( indexed_property_getter=(props and props.indexed_getter)), "SameOriginIndexedGetterCallback"), make_same_origin_indexed_setter_callback( cg_context, "SameOriginIndexedSetterCallback"), make_same_origin_indexed_deleter_callback( cg_context, "SameOriginIndexedDeleterCallback"), make_same_origin_indexed_definer_callback( cg_context, "SameOriginIndexedDefinerCallback"), make_same_origin_indexed_descriptor_callback( cg_context, "SameOriginIndexedDescriptorCallback"), make_same_origin_indexed_enumerator_callback( cg_context, "SameOriginIndexedEnumeratorCallback"), ] for func_def in func_defs: callback_defs.append(func_def) callback_defs.append(EmptyNode()) text = """\ // Same origin interceptors ${instance_object_template}->SetHandler( v8::IndexedPropertyHandlerConfiguration( SameOriginIndexedGetterCallback, SameOriginIndexedSetterCallback, nullptr, // query SameOriginIndexedDeleterCallback, SameOriginIndexedEnumeratorCallback, SameOriginIndexedDefinerCallback, SameOriginIndexedDescriptorCallback, v8::Local<v8::Value>(), v8::PropertyHandlerFlags::kNone)); """ install_node.append(TextNode(text)) return callback_defs, install_node def make_cross_component_init( cg_context, function_name, class_name, has_unconditional_props, has_context_independent_props, has_context_dependent_props): """ Returns: A triplet of CodeNode of: - function declaration - function definition - trampoline member variable definitions """ assert isinstance(cg_context, CodeGenContext) assert isinstance(function_name, str) assert isinstance(class_name, str) assert isinstance(has_unconditional_props, bool) assert isinstance(has_context_independent_props, bool) assert isinstance(has_context_dependent_props, bool) F = FormatNode def filter_four_trampolines(nodes): assert len(nodes) == 4 flags = (True, has_unconditional_props, has_context_independent_props, has_context_dependent_props) return [node for node, flag in zip(nodes, flags) if flag] trampoline_var_decls = ListNode( filter_four_trampolines([ F("static InstallInterfaceTemplateFuncType {};", TP_INSTALL_INTERFACE_TEMPLATE), F("static InstallUnconditionalPropertiesFuncType {};", TP_INSTALL_UNCONDITIONAL_PROPS), F("static InstallContextIndependentPropertiesFuncType {};", TP_INSTALL_CONTEXT_INDEPENDENT_PROPS), F("static InstallContextDependentPropertiesFuncType {};", TP_INSTALL_CONTEXT_DEPENDENT_PROPS), ])) trampoline_var_defs = ListNode( filter_four_trampolines([ F(("${class_name}::InstallInterfaceTemplateFuncType " "${class_name}::{} = nullptr;"), TP_INSTALL_INTERFACE_TEMPLATE), F(("${class_name}::InstallUnconditionalPropertiesFuncType " "${class_name}::{} = nullptr;"), TP_INSTALL_UNCONDITIONAL_PROPS), F(("${class_name}::InstallContextIndependentPropertiesFuncType " "${class_name}::{} = nullptr;"), TP_INSTALL_CONTEXT_INDEPENDENT_PROPS), F(("${class_name}::InstallContextDependentPropertiesFuncType " "${class_name}::{} = nullptr;"), TP_INSTALL_CONTEXT_DEPENDENT_PROPS), ])) trampoline_var_defs.set_base_template_vars(cg_context.template_bindings()) func_decl = CxxFuncDeclNode( name=function_name, arg_decls=[], return_type="void", static=True) func_def = CxxFuncDefNode( name=function_name, arg_decls=[], return_type="void", class_name=class_name) func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body body.extend( filter_four_trampolines([ F("${class_name}::{} = {};", TP_INSTALL_INTERFACE_TEMPLATE, FN_INSTALL_INTERFACE_TEMPLATE), F("${class_name}::{} = {};", TP_INSTALL_UNCONDITIONAL_PROPS, FN_INSTALL_UNCONDITIONAL_PROPS), F("${class_name}::{} = {};", TP_INSTALL_CONTEXT_INDEPENDENT_PROPS, FN_INSTALL_CONTEXT_INDEPENDENT_PROPS), F("${class_name}::{} = {};", TP_INSTALL_CONTEXT_DEPENDENT_PROPS, FN_INSTALL_CONTEXT_DEPENDENT_PROPS), ])) return func_decl, func_def, trampoline_var_decls, trampoline_var_defs # ---------------------------------------------------------------------------- # WrapperTypeInfo # ---------------------------------------------------------------------------- def make_wrapper_type_info(cg_context, function_name, has_context_dependent_props): assert isinstance(cg_context, CodeGenContext) assert function_name == "GetWrapperTypeInfo" assert isinstance(has_context_dependent_props, bool) F = FormatNode func_def = CxxFuncDefNode( name=function_name, arg_decls=[], return_type="constexpr const WrapperTypeInfo*", static=True) func_def.set_base_template_vars(cg_context.template_bindings()) func_def.body.append(TextNode("return &wrapper_type_info_;")) member_var_def = TextNode( "static const WrapperTypeInfo wrapper_type_info_;") wrapper_type_info_def = ListNode() wrapper_type_info_def.set_base_template_vars( cg_context.template_bindings()) pattern = """\ // Construction of WrapperTypeInfo may require non-trivial initialization due // to cross-component address resolution in order to load the pointer to the // parent interface's WrapperTypeInfo. We ignore this issue because the issue // happens only on component builds and the official release builds // (statically-linked builds) are never affected by this issue. #if defined(COMPONENT_BUILD) && defined(WIN32) && defined(__clang__) #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wglobal-constructors" #endif const WrapperTypeInfo ${class_name}::wrapper_type_info_{{ gin::kEmbedderBlink, ${class_name}::{install_interface_template_func}, {install_context_dependent_func}, "${{class_like.identifier}}", {wrapper_type_info_of_inherited}, {wrapper_type_prototype}, {wrapper_class_id}, {active_script_wrappable_inheritance}, {idl_definition_kind}, }}; #if defined(COMPONENT_BUILD) && defined(WIN32) && defined(__clang__) #pragma clang diagnostic pop #endif """ class_like = cg_context.class_like if has_context_dependent_props: install_context_dependent_func = _format( "${class_name}::{}", FN_INSTALL_CONTEXT_DEPENDENT_PROPS) else: install_context_dependent_func = "nullptr" if class_like.is_interface and class_like.inherited: wrapper_type_info_of_inherited = "{}::GetWrapperTypeInfo()".format( v8_bridge_class_name(class_like.inherited)) else: wrapper_type_info_of_inherited = "nullptr" if class_like.is_interface: wrapper_type_prototype = "WrapperTypeInfo::kWrapperTypeObjectPrototype" else: wrapper_type_prototype = "WrapperTypeInfo::kWrapperTypeNoPrototype" if class_like.is_interface and class_like.does_implement("Node"): wrapper_class_id = "WrapperTypeInfo::kNodeClassId" else: wrapper_class_id = "WrapperTypeInfo::kObjectClassId" if class_like.code_generator_info.is_active_script_wrappable: active_script_wrappable_inheritance = ( "WrapperTypeInfo::kInheritFromActiveScriptWrappable") else: active_script_wrappable_inheritance = ( "WrapperTypeInfo::kNotInheritFromActiveScriptWrappable") if class_like.is_interface: idl_definition_kind = "WrapperTypeInfo::kIdlInterface" elif class_like.is_namespace: idl_definition_kind = "WrapperTypeInfo::kIdlNamespace" elif class_like.is_callback_interface: idl_definition_kind = "WrapperTypeInfo::kIdlCallbackInterface" wrapper_type_info_def.append( F(pattern, install_interface_template_func=FN_INSTALL_INTERFACE_TEMPLATE, install_context_dependent_func=install_context_dependent_func, wrapper_type_info_of_inherited=wrapper_type_info_of_inherited, wrapper_type_prototype=wrapper_type_prototype, wrapper_class_id=wrapper_class_id, active_script_wrappable_inheritance=( active_script_wrappable_inheritance), idl_definition_kind=idl_definition_kind)) if class_like.is_interface: blink_class = blink_class_name(class_like) pattern = """\ const WrapperTypeInfo& {blink_class}::wrapper_type_info_ = ${class_name}::wrapper_type_info_; """ wrapper_type_info_def.append(F(pattern, blink_class=blink_class)) if class_like.code_generator_info.is_active_script_wrappable: pattern = """\ // [ActiveScriptWrappable] static_assert( std::is_base_of<ActiveScriptWrappableBase, {blink_class}>::value, "{blink_class} does not inherit from ActiveScriptWrappable<> despite " "the IDL has [ActiveScriptWrappable] extended attribute."); static_assert( !std::is_same<decltype(&{blink_class}::HasPendingActivity), decltype(&ScriptWrappable::HasPendingActivity)>::value, "{blink_class} is not overriding hasPendingActivity() despite " "the IDL has [ActiveScriptWrappable] extended attribute.");""" else: pattern = """\ // non-[ActiveScriptWrappable] static_assert( !std::is_base_of<ActiveScriptWrappableBase, {blink_class}>::value, "{blink_class} inherits from ActiveScriptWrappable<> without " "[ActiveScriptWrappable] extended attribute."); static_assert( std::is_same<decltype(&{blink_class}::HasPendingActivity), decltype(&ScriptWrappable::HasPendingActivity)>::value, "{blink_class} is overriding hasPendingActivity() without " "[ActiveScriptWrappable] extended attribute.");""" if class_like.is_interface: wrapper_type_info_def.append(F(pattern, blink_class=blink_class)) return func_def, member_var_def, wrapper_type_info_def # ---------------------------------------------------------------------------- # V8 Context Snapshot # ---------------------------------------------------------------------------- def make_v8_context_snapshot_api(cg_context, component, attribute_entries, constant_entries, constructor_entries, exposed_construct_entries, operation_entries, named_properties_object_callback_defs, cross_origin_property_callback_defs, install_context_independent_func_name): assert isinstance(cg_context, CodeGenContext) assert isinstance(component, web_idl.Component) if not cg_context.interface: return None, None derived_interfaces = cg_context.interface.deriveds derived_names = list( map(lambda interface: interface.identifier, derived_interfaces)) derived_names.append(cg_context.interface.identifier) if not ("Window" in derived_names or "HTMLDocument" in derived_names): return None, None header_ns = CxxNamespaceNode(name_style.namespace("v8_context_snapshot")) source_ns = CxxNamespaceNode(name_style.namespace("v8_context_snapshot")) export_text = component_export(component, False) def add_func(func_decl, func_def): header_ns.body.extend([ TextNode(export_text), func_decl, EmptyNode(), ]) source_ns.body.extend([ func_def, EmptyNode(), ]) add_func(*_make_v8_context_snapshot_get_reference_table_function( cg_context, name_style.func("GetRefTableOf", cg_context.class_name), attribute_entries, constant_entries, constructor_entries, exposed_construct_entries, operation_entries, named_properties_object_callback_defs, cross_origin_property_callback_defs)) add_func(*_make_v8_context_snapshot_install_props_per_context_function( cg_context, name_style.func("InstallPropsOf", cg_context.class_name), attribute_entries, constant_entries, exposed_construct_entries, operation_entries)) add_func(*_make_v8_context_snapshot_install_props_per_isolate_function( cg_context, name_style.func("InstallPropsOf", cg_context.class_name), install_context_independent_func_name)) return header_ns, source_ns def _make_v8_context_snapshot_get_reference_table_function( cg_context, function_name, attribute_entries, constant_entries, constructor_entries, exposed_construct_entries, operation_entries, named_properties_object_callback_defs, cross_origin_property_callback_defs): callback_names = ["${class_name}::GetWrapperTypeInfo()"] for entry in attribute_entries: if entry.exposure_conditional.is_always_true: callback_names.append(entry.attr_get_callback_name) callback_names.append(entry.attr_set_callback_name) for entry in constant_entries: if entry.exposure_conditional.is_always_true: callback_names.append(entry.const_callback_name) for entry in constructor_entries: if entry.exposure_conditional.is_always_true: callback_names.append(entry.ctor_callback_name) for entry in exposed_construct_entries: if entry.exposure_conditional.is_always_true: callback_names.append(entry.prop_callback_name) for entry in operation_entries: if entry.exposure_conditional.is_always_true: callback_names.append(entry.op_callback_name) def collect_callbacks(node): if isinstance(node, CxxFuncDefNode): callback_names.append(node.function_name) elif hasattr(node, "__iter__"): for child_node in node: collect_callbacks(child_node) collect_callbacks(named_properties_object_callback_defs) collect_callbacks(cross_origin_property_callback_defs) entry_nodes = list( map( lambda name: TextNode("reinterpret_cast<intptr_t>({}),".format(name )), filter(None, callback_names))) table_node = ListNode([ TextNode("static const intptr_t kReferenceTable[] = {"), ListNode(entry_nodes), TextNode("};"), ]) func_decl = CxxFuncDeclNode(name=function_name, arg_decls=[], return_type="base::span<const intptr_t>") func_def = CxxFuncDefNode(name=function_name, arg_decls=[], return_type="base::span<const intptr_t>") func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body body.extend([table_node, TextNode("return kReferenceTable;")]) return func_decl, func_def def _make_v8_context_snapshot_install_props_per_context_function( cg_context, function_name, attribute_entries, constant_entries, exposed_construct_entries, operation_entries): def selector(entry): if entry.exposure_conditional.is_always_true: return False if entry.is_context_dependent: return False return True func_decl, func_def, _ = make_install_properties( cg_context, function_name, class_name=None, prop_install_mode=PropInstallMode.V8_CONTEXT_SNAPSHOT, trampoline_var_name=None, attribute_entries=list(filter(selector, attribute_entries)), constant_entries=list(filter(selector, constant_entries)), exposed_construct_entries=list( filter(selector, exposed_construct_entries)), operation_entries=list(filter(selector, operation_entries))) return func_decl, func_def def _make_v8_context_snapshot_install_props_per_isolate_function( cg_context, function_name, install_context_independent_func_name): arg_decls = [ "v8::Isolate* isolate", "const DOMWrapperWorld& world", "v8::Local<v8::Template> instance_template", "v8::Local<v8::Template> prototype_template", "v8::Local<v8::Template> interface_template", ] arg_names = [ "isolate", "world", "instance_template", "prototype_template", "interface_template", ] return_type = "void" func_decl = CxxFuncDeclNode(name=function_name, arg_decls=arg_decls, return_type=return_type) func_def = CxxFuncDefNode(name=function_name, arg_decls=arg_decls, return_type=return_type) if not install_context_independent_func_name: return func_decl, func_def func_def.set_base_template_vars(cg_context.template_bindings()) body = func_def.body for arg_name in arg_names: body.add_template_var(arg_name, arg_name) pattern = """\ return ${class_name}::{func}( ${isolate}, ${world}, ${instance_template}, ${prototype_template}, ${interface_template});\ """ body.append( TextNode(_format(pattern, func=install_context_independent_func_name))) return func_decl, func_def # ---------------------------------------------------------------------------- # Main functions # ---------------------------------------------------------------------------- def _collect_include_headers(class_like): assert isinstance(class_like, (web_idl.Interface, web_idl.Namespace)) headers = set(class_like.code_generator_info.blink_headers) def collect_from_idl_type(idl_type): idl_type.apply_to_all_composing_elements(add_include_headers) def add_include_headers(idl_type): # ScriptPromise doesn't require any header for the result type. if idl_type.is_promise: raise StopIteration(idl_type.syntactic_form) type_def_obj = idl_type.type_definition_object if type_def_obj is not None: if (type_def_obj.identifier in ( "OnErrorEventHandlerNonNull", "OnBeforeUnloadEventHandlerNonNull")): raise StopIteration(idl_type.syntactic_form) headers.add(PathManager(type_def_obj).api_path(ext="h")) if type_def_obj.is_interface or type_def_obj.is_namespace: headers.add(PathManager(type_def_obj).blink_path(ext="h")) raise StopIteration(idl_type.syntactic_form) union_def_obj = idl_type.union_definition_object if union_def_obj is not None: headers.add(PathManager(union_def_obj).api_path(ext="h")) for attribute in class_like.attributes: collect_from_idl_type(attribute.idl_type) operations = [] operations.extend(class_like.constructors) operations.extend(class_like.operations) if class_like.is_interface: for x in [class_like.iterable, class_like.maplike, class_like.setlike]: if x: operations.extend(x.operations) for operation in operations: collect_from_idl_type(operation.return_type) for argument in operation.arguments: collect_from_idl_type(argument.idl_type) if class_like.is_interface: for exposed_construct in class_like.exposed_constructs: headers.add(PathManager(exposed_construct).api_path(ext="h")) for legacy_window_alias in class_like.legacy_window_aliases: headers.add( PathManager(legacy_window_alias.original).api_path(ext="h")) path_manager = PathManager(class_like) headers.discard(path_manager.api_path(ext="h")) headers.discard(path_manager.impl_path(ext="h")) # TODO(yukishiino): Window interface should be # [ImplementedAs=LocalDOMWindow] instead of [ImplementedAs=DOMWindow], and # [CrossOrigin] properties should be implemented specifically with # DOMWindow class. Then, we'll have less hacks. if class_like.identifier == "Window": headers.add("third_party/blink/renderer/core/frame/local_dom_window.h") return headers def generate_class_like(class_like): assert isinstance(class_like, (web_idl.Interface, web_idl.Namespace)) path_manager = PathManager(class_like) api_component = path_manager.api_component impl_component = path_manager.impl_component is_cross_components = path_manager.is_cross_components for_testing = class_like.code_generator_info.for_testing # Class names api_class_name = v8_bridge_class_name(class_like) if is_cross_components: impl_class_name = "{}::Impl".format(api_class_name) else: impl_class_name = api_class_name interface = None namespace = None if class_like.is_interface: interface = class_like cg_context = CodeGenContext(interface=interface, class_name=api_class_name) elif class_like.is_namespace: namespace = class_like cg_context = CodeGenContext(namespace=namespace, class_name=api_class_name) # Filepaths api_header_path = path_manager.api_path(ext="h") api_source_path = path_manager.api_path(ext="cc") if is_cross_components: impl_header_path = path_manager.impl_path(ext="h") impl_source_path = path_manager.impl_path(ext="cc") # Root nodes api_header_node = ListNode(tail="\n") api_header_node.set_accumulator(CodeGenAccumulator()) api_header_node.set_renderer(MakoRenderer()) api_source_node = ListNode(tail="\n") api_source_node.set_accumulator(CodeGenAccumulator()) api_source_node.set_renderer(MakoRenderer()) if is_cross_components: impl_header_node = ListNode(tail="\n") impl_header_node.set_accumulator(CodeGenAccumulator()) impl_header_node.set_renderer(MakoRenderer()) impl_source_node = ListNode(tail="\n") impl_source_node.set_accumulator(CodeGenAccumulator()) impl_source_node.set_renderer(MakoRenderer()) else: impl_header_node = api_header_node impl_source_node = api_source_node # Namespaces api_header_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) api_source_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) if is_cross_components: impl_header_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) impl_source_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) else: impl_header_blink_ns = api_header_blink_ns impl_source_blink_ns = api_source_blink_ns # Class definitions api_class_def = CxxClassDefNode( cg_context.class_name, base_class_names=[ _format("bindings::V8InterfaceBridge<${class_name}, {}>", blink_class_name(class_like)), ], final=True, export=component_export(api_component, for_testing)) api_class_def.set_base_template_vars(cg_context.template_bindings()) api_class_def.bottom_section.append( TextNode("friend class {};".format(blink_class_name(class_like)))) if is_cross_components: impl_class_def = CxxClassDefNode(impl_class_name, final=True, export=component_export( impl_component, for_testing)) impl_class_def.set_base_template_vars(cg_context.template_bindings()) api_class_def.public_section.extend([ TextNode("// Cross-component implementation class"), TextNode("class Impl;"), EmptyNode(), ]) else: impl_class_def = api_class_def # Constants constants_def = None if class_like.constants: constants_def = CxxClassDefNode(name="Constant", final=True) constants_def.top_section.append(TextNode("STATIC_ONLY(Constant);")) for constant in class_like.constants: cgc = cg_context.make_copy(constant=constant) constants_def.public_section.append( make_constant_constant_def(cgc, constant_name(cgc))) # Custom callback implementations custom_callback_impl_decls = ListNode() def add_custom_callback_impl_decl(**params): arg_decls = params.pop("arg_decls") name = params.pop("name", None) if name is None: name = custom_function_name(cg_context.make_copy(**params)) custom_callback_impl_decls.append( CxxFuncDeclNode( name=name, arg_decls=arg_decls, return_type="void", static=True)) if class_like.identifier == "HTMLAllCollection": add_custom_callback_impl_decl( name=name_style.func("LegacyCallCustom"), arg_decls=["const v8::FunctionCallbackInfo<v8::Value>&"]) for attribute in class_like.attributes: custom_values = attribute.extended_attributes.values_of("Custom") is_cross_origin = "CrossOrigin" in attribute.extended_attributes cross_origin_values = attribute.extended_attributes.values_of( "CrossOrigin") if "Getter" in custom_values: add_custom_callback_impl_decl( attribute=attribute, attribute_get=True, arg_decls=["const v8::FunctionCallbackInfo<v8::Value>&"]) if is_cross_origin and (not cross_origin_values or "Getter" in cross_origin_values): add_custom_callback_impl_decl( attribute=attribute, attribute_get=True, arg_decls=["const v8::PropertyCallbackInfo<v8::Value>&"]) if "Setter" in custom_values: add_custom_callback_impl_decl( attribute=attribute, attribute_set=True, arg_decls=[ "v8::Local<v8::Value>", "const v8::FunctionCallbackInfo<v8::Value>&", ]) if is_cross_origin and (not cross_origin_values or "Setter" in cross_origin_values): add_custom_callback_impl_decl( attribute=attribute, attribute_set=True, arg_decls=[ "v8::Local<v8::Value>", "const v8::PropertyCallbackInfo<void>&", ]) for operation_group in class_like.operation_groups: if "Custom" in operation_group.extended_attributes: add_custom_callback_impl_decl( operation_group=operation_group, arg_decls=["const v8::FunctionCallbackInfo<v8::Value>&"]) if interface and interface.indexed_and_named_properties: props = interface.indexed_and_named_properties operation = props.own_named_getter if operation and "Custom" in operation.extended_attributes: add_custom_callback_impl_decl( named_property_getter=operation, arg_decls=[ "const AtomicString& property_name", "const v8::PropertyCallbackInfo<v8::Value>&", ]) operation = props.own_named_setter if operation and "Custom" in operation.extended_attributes: add_custom_callback_impl_decl( named_property_setter=operation, arg_decls=[ "const AtomicString& property_name", "v8::Local<v8::Value> v8_property_value", "const v8::PropertyCallbackInfo<v8::Value>&", ]) operation = props.own_named_deleter if operation and "Custom" in operation.extended_attributes: add_custom_callback_impl_decl( named_property_deleter=operation, arg_decls=[ "const AtomicString& property_name", "const v8::PropertyCallbackInfo<v8::Value>&", ]) # Cross-component trampolines if is_cross_components: # tp_ = trampoline name tp_install_interface_template = TP_INSTALL_INTERFACE_TEMPLATE tp_install_unconditional_props = TP_INSTALL_UNCONDITIONAL_PROPS tp_install_context_independent_props = ( TP_INSTALL_CONTEXT_INDEPENDENT_PROPS) tp_install_context_dependent_props = TP_INSTALL_CONTEXT_DEPENDENT_PROPS else: tp_install_interface_template = None tp_install_unconditional_props = None tp_install_context_independent_props = None tp_install_context_dependent_props = None # Callback functions attribute_entries = [] constant_entries = [] constructor_entries = [] exposed_construct_entries = [] operation_entries = [] callback_defs = make_property_entries_and_callback_defs( cg_context, attribute_entries=attribute_entries, constant_entries=constant_entries, constructor_entries=constructor_entries, exposed_construct_entries=exposed_construct_entries, operation_entries=operation_entries) supplemental_install_node = SequenceNode() # Indexed and named properties # Shorten a function name to mitigate a style check error. f = make_indexed_and_named_property_callbacks_and_install_node (indexed_and_named_property_decls, indexed_and_named_property_defs, indexed_and_named_property_install_node) = f(cg_context) supplemental_install_node.append(indexed_and_named_property_install_node) supplemental_install_node.append(EmptyNode()) # Named properties object (named_properties_object_callback_defs, named_properties_object_install_node) = ( make_named_properties_object_callbacks_and_install_node(cg_context)) callback_defs.extend(named_properties_object_callback_defs) supplemental_install_node.append(named_properties_object_install_node) supplemental_install_node.append(EmptyNode()) # Cross origin properties (cross_origin_property_callback_defs, cross_origin_property_install_node) = ( make_cross_origin_property_callbacks_and_install_node( cg_context, attribute_entries, operation_entries)) callback_defs.extend(cross_origin_property_callback_defs) supplemental_install_node.append(cross_origin_property_install_node) supplemental_install_node.append(EmptyNode()) # Installer functions is_unconditional = lambda entry: entry.exposure_conditional.is_always_true is_context_dependent = lambda entry: entry.is_context_dependent is_context_independent = ( lambda e: not is_context_dependent(e) and not is_unconditional(e)) (install_unconditional_props_decl, install_unconditional_props_def, install_unconditional_props_trampoline) = make_install_properties( cg_context, FN_INSTALL_UNCONDITIONAL_PROPS, class_name=impl_class_name, prop_install_mode=PropInstallMode.UNCONDITIONAL, trampoline_var_name=tp_install_unconditional_props, attribute_entries=list(filter(is_unconditional, attribute_entries)), constant_entries=list(filter(is_unconditional, constant_entries)), exposed_construct_entries=list( filter(is_unconditional, exposed_construct_entries)), operation_entries=list(filter(is_unconditional, operation_entries))) (install_context_independent_props_decl, install_context_independent_props_def, install_context_independent_props_trampoline) = make_install_properties( cg_context, FN_INSTALL_CONTEXT_INDEPENDENT_PROPS, class_name=impl_class_name, prop_install_mode=PropInstallMode.CONTEXT_INDEPENDENT, trampoline_var_name=tp_install_context_independent_props, attribute_entries=list( filter(is_context_independent, attribute_entries)), constant_entries=list(filter(is_context_independent, constant_entries)), exposed_construct_entries=list( filter(is_context_independent, exposed_construct_entries)), operation_entries=list( filter(is_context_independent, operation_entries))) (install_context_dependent_props_decl, install_context_dependent_props_def, install_context_dependent_props_trampoline) = make_install_properties( cg_context, FN_INSTALL_CONTEXT_DEPENDENT_PROPS, class_name=impl_class_name, prop_install_mode=PropInstallMode.CONTEXT_DEPENDENT, trampoline_var_name=tp_install_context_dependent_props, attribute_entries=list(filter(is_context_dependent, attribute_entries)), constant_entries=list(filter(is_context_dependent, constant_entries)), exposed_construct_entries=list( filter(is_context_dependent, exposed_construct_entries)), operation_entries=list(filter(is_context_dependent, operation_entries))) (install_interface_template_decl, install_interface_template_def, install_interface_template_trampoline) = make_install_interface_template( cg_context, FN_INSTALL_INTERFACE_TEMPLATE, class_name=impl_class_name, trampoline_var_name=tp_install_interface_template, constructor_entries=constructor_entries, supplemental_install_node=supplemental_install_node, install_unconditional_func_name=(install_unconditional_props_def and FN_INSTALL_UNCONDITIONAL_PROPS), install_context_independent_func_name=( install_context_independent_props_def and FN_INSTALL_CONTEXT_INDEPENDENT_PROPS)) installer_function_decls = ListNode([ install_interface_template_decl, install_unconditional_props_decl, install_context_independent_props_decl, install_context_dependent_props_decl, ]) installer_function_defs = ListNode([ install_interface_template_def, EmptyNode(), install_unconditional_props_def, EmptyNode(), install_context_independent_props_def, EmptyNode(), install_context_dependent_props_def, ]) installer_function_trampolines = ListNode([ install_interface_template_trampoline, install_unconditional_props_trampoline, install_context_independent_props_trampoline, install_context_dependent_props_trampoline, ]) # WrapperTypeInfo (get_wrapper_type_info_def, wrapper_type_info_var_def, wrapper_type_info_init) = make_wrapper_type_info( cg_context, "GetWrapperTypeInfo", has_context_dependent_props=bool( install_context_dependent_props_decl)) # Cross-component trampolines if is_cross_components: (cross_component_init_decl, cross_component_init_def, trampoline_var_decls, trampoline_var_defs) = make_cross_component_init( cg_context, "Init", class_name=impl_class_name, has_unconditional_props=bool(install_unconditional_props_decl), has_context_independent_props=bool( install_context_independent_props_decl), has_context_dependent_props=bool( install_context_dependent_props_decl)) # V8 Context Snapshot (header_v8_context_snapshot_ns, source_v8_context_snapshot_ns) = make_v8_context_snapshot_api( cg_context, impl_component, attribute_entries, constant_entries, constructor_entries, exposed_construct_entries, operation_entries, named_properties_object_callback_defs, cross_origin_property_callback_defs, (install_context_independent_props_def and FN_INSTALL_CONTEXT_INDEPENDENT_PROPS)) # Header part (copyright, include directives, and forward declarations) api_header_node.extend([ make_copyright_header(), EmptyNode(), enclose_with_header_guard( ListNode([ make_header_include_directives(api_header_node.accumulator), EmptyNode(), api_header_blink_ns, ]), name_style.header_guard(api_header_path)), ]) api_header_blink_ns.body.extend([ make_forward_declarations(api_header_node.accumulator), EmptyNode(), ]) api_source_node.extend([ make_copyright_header(), EmptyNode(), TextNode("#include \"{}\"".format(api_header_path)), EmptyNode(), make_header_include_directives(api_source_node.accumulator), EmptyNode(), api_source_blink_ns, ]) api_source_blink_ns.body.extend([ make_forward_declarations(api_source_node.accumulator), EmptyNode(), ]) if is_cross_components: impl_header_node.extend([ make_copyright_header(), EmptyNode(), enclose_with_header_guard( ListNode([ make_header_include_directives( impl_header_node.accumulator), EmptyNode(), impl_header_blink_ns, ]), name_style.header_guard(impl_header_path)), ]) impl_header_blink_ns.body.extend([ make_forward_declarations(impl_header_node.accumulator), EmptyNode(), ]) impl_source_node.extend([ make_copyright_header(), EmptyNode(), TextNode("#include \"{}\"".format(impl_header_path)), EmptyNode(), make_header_include_directives(impl_source_node.accumulator), EmptyNode(), impl_source_blink_ns, ]) impl_source_blink_ns.body.extend([ make_forward_declarations(impl_source_node.accumulator), EmptyNode(), ]) api_header_node.accumulator.add_include_headers([ class_like.code_generator_info.blink_headers[0], component_export_header(api_component, for_testing), "third_party/blink/renderer/platform/bindings/v8_interface_bridge.h", ]) if interface and interface.inherited: api_source_node.accumulator.add_include_headers( [PathManager(interface.inherited).api_path(ext="h")]) if is_cross_components: impl_header_node.accumulator.add_include_headers([ api_header_path, component_export_header(impl_component, for_testing), ]) impl_source_node.accumulator.add_include_headers([ "third_party/blink/renderer/bindings/core/v8/generated_code_helper.h", "third_party/blink/renderer/bindings/core/v8/native_value_traits_impl.h", "third_party/blink/renderer/bindings/core/v8/to_v8_traits.h", "third_party/blink/renderer/bindings/core/v8/v8_set_return_value_for_core.h", "third_party/blink/renderer/platform/bindings/exception_messages.h", "third_party/blink/renderer/platform/bindings/idl_member_installer.h", "third_party/blink/renderer/platform/bindings/runtime_call_stats.h", "third_party/blink/renderer/platform/bindings/v8_binding.h", ]) impl_source_node.accumulator.add_include_headers( _collect_include_headers(class_like)) # Assemble the parts. api_header_blink_ns.body.extend([ api_class_def, EmptyNode(), ]) if is_cross_components: impl_header_blink_ns.body.extend([ impl_class_def, EmptyNode(), ]) if constants_def: api_class_def.public_section.extend([ TextNode("// Constants"), constants_def, EmptyNode(), ]) api_class_def.public_section.append(get_wrapper_type_info_def) api_class_def.public_section.append(EmptyNode()) api_class_def.private_section.append(wrapper_type_info_var_def) api_class_def.private_section.append(EmptyNode()) api_source_blink_ns.body.extend([ wrapper_type_info_init, EmptyNode(), ]) if is_cross_components: api_class_def.public_section.append(installer_function_trampolines) api_class_def.public_section.append(EmptyNode()) api_class_def.private_section.extend([ TextNode("// Cross-component trampolines"), trampoline_var_decls, EmptyNode(), ]) api_source_blink_ns.body.extend([ TextNode("// Cross-component trampolines"), trampoline_var_defs, EmptyNode(), ]) impl_class_def.public_section.append(cross_component_init_decl) impl_class_def.private_section.append(installer_function_decls) impl_source_blink_ns.body.extend([ cross_component_init_def, EmptyNode(), ]) else: api_class_def.public_section.append(installer_function_decls) api_class_def.public_section.append(EmptyNode()) if custom_callback_impl_decls: api_class_def.public_section.extend([ TextNode("// Custom callback implementations"), custom_callback_impl_decls, EmptyNode(), ]) if indexed_and_named_property_decls: api_class_def.public_section.extend([ TextNode("// Indexed properties and named properties"), indexed_and_named_property_decls, EmptyNode(), ]) api_source_blink_ns.body.extend([ indexed_and_named_property_defs, EmptyNode(), ]) debugging_namespace_name = name_style.namespace("v8", class_like.identifier) impl_source_blink_ns.body.extend([ CxxNamespaceNode( name="", body=[ # Enclose the implementations with a namespace just in order to # include the class_like name in a stacktrace, such as # # blink::(anonymous namespace)::v8_class_like::XxxCallback # # Note that XxxCallback doesn't include the class_like name. CxxNamespaceNode(name=debugging_namespace_name, body=callback_defs), EmptyNode(), TextNode( "using namespace {};".format(debugging_namespace_name)), ]), EmptyNode(), installer_function_defs, EmptyNode(), ]) if header_v8_context_snapshot_ns: impl_header_blink_ns.body.extend([ CxxNamespaceNode(name=name_style.namespace("bindings"), body=header_v8_context_snapshot_ns), EmptyNode(), ]) impl_source_blink_ns.body.extend([ CxxNamespaceNode(name=name_style.namespace("bindings"), body=source_v8_context_snapshot_ns), EmptyNode(), ]) # Write down to the files. write_code_node_to_file(api_header_node, path_manager.gen_path_to(api_header_path)) write_code_node_to_file(api_source_node, path_manager.gen_path_to(api_source_path)) if path_manager.is_cross_components: write_code_node_to_file(impl_header_node, path_manager.gen_path_to(impl_header_path)) write_code_node_to_file(impl_source_node, path_manager.gen_path_to(impl_source_path)) def generate_interface(interface_identifier): assert isinstance(interface_identifier, web_idl.Identifier) web_idl_database = package_initializer().web_idl_database() interface = web_idl_database.find(interface_identifier) generate_class_like(interface) def generate_install_properties_per_feature(function_name, filepath_basename, for_testing=False): assert isinstance(function_name, str) assert isinstance(filepath_basename, str) assert isinstance(for_testing, bool) web_idl_database = package_initializer().web_idl_database() # Filepaths header_path = PathManager.component_path("modules", "{}.h".format(filepath_basename)) source_path = PathManager.component_path("modules", "{}.cc".format(filepath_basename)) # Root nodes header_node = ListNode(tail="\n") header_node.set_accumulator(CodeGenAccumulator()) header_node.set_renderer(MakoRenderer()) source_node = ListNode(tail="\n") source_node.set_accumulator(CodeGenAccumulator()) source_node.set_renderer(MakoRenderer()) # Namespaces header_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) source_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) header_bindings_ns = CxxNamespaceNode(name_style.namespace("bindings")) source_bindings_ns = CxxNamespaceNode(name_style.namespace("bindings")) header_blink_ns.body.extend([ make_forward_declarations(header_node.accumulator), EmptyNode(), header_bindings_ns, ]) source_blink_ns.body.append(source_bindings_ns) # Function nodes arg_decls = [ "ScriptState* script_state", "OriginTrialFeature feature", ] func_decl = CxxFuncDeclNode( name=function_name, arg_decls=arg_decls, return_type="void") func_def = CxxFuncDefNode( name=function_name, arg_decls=arg_decls, return_type="void") func_def.body.add_template_vars({ "script_state": "script_state", "feature": "feature", }) helper_func_def = CxxFuncDefNode( name="InstallPropertiesPerFeatureInternal", arg_decls=[ "ScriptState* script_state", "OriginTrialFeature feature", "base::span<const WrapperTypeInfo* const> wrapper_type_info_list", ], return_type="void") # Assemble the parts. header_node.accumulator.add_class_decls(["ScriptState"]) header_node.accumulator.add_include_headers([ "third_party/blink/renderer/platform/runtime_enabled_features.h", ]) header_node.extend([ make_copyright_header(), EmptyNode(), enclose_with_header_guard( ListNode([ make_header_include_directives(header_node.accumulator), EmptyNode(), header_blink_ns, ]), name_style.header_guard(header_path)), ]) source_node.accumulator.add_include_headers([ "base/containers/span.h", "third_party/blink/renderer/platform/bindings/script_state.h", "third_party/blink/renderer/platform/bindings/v8_per_context_data.h", ]) source_node.extend([ make_copyright_header(), EmptyNode(), TextNode("#include \"{}\"".format(header_path)), EmptyNode(), make_header_include_directives(source_node.accumulator), EmptyNode(), source_blink_ns, ]) header_bindings_ns.body.extend([ TextNode("""\ // Install ES properties associated with the given origin trial feature.\ """), func_decl, ]) source_bindings_ns.body.extend([ CxxNamespaceNode(name="", body=helper_func_def), EmptyNode(), func_def, ]) # The public function feature_to_class_likes = {} set_of_class_likes = set() for class_like in itertools.chain(web_idl_database.interfaces, web_idl_database.namespaces): if class_like.code_generator_info.for_testing != for_testing: continue for member in itertools.chain(class_like.attributes, class_like.constants, class_like.operation_groups, class_like.exposed_constructs): features = list( member.exposure.context_dependent_runtime_enabled_features) for entry in member.exposure.global_names_and_features: if entry.feature and entry.feature.is_context_dependent: features.append(entry.feature) for feature in features: feature_to_class_likes.setdefault(feature, set()).add(class_like) if features: set_of_class_likes.add(class_like) switch_node = CxxSwitchNode(cond="${feature}") switch_node.append( case=None, body=[ TextNode("// Ignore unknown, deprecated, and unused features."), TextNode("return;"), ], should_add_break=False) for feature, class_likes in sorted(feature_to_class_likes.items()): entries = [ TextNode("{}::GetWrapperTypeInfo(), ".format( v8_bridge_class_name(class_like))) for class_like in sorted(class_likes, key=lambda x: x.identifier) ] table_def = ListNode([ TextNode("static const WrapperTypeInfo* const wti_list[] = {"), ListNode(entries), TextNode("};"), ]) switch_node.append( case="OriginTrialFeature::k{}".format(feature), body=[ table_def, TextNode("selected_wti_list = wti_list;"), ]) func_def.body.extend([ TextNode( "base::span<const WrapperTypeInfo* const> selected_wti_list;"), EmptyNode(), switch_node, EmptyNode(), TextNode("InstallPropertiesPerFeatureInternal" "(${script_state}, ${feature}, selected_wti_list);"), ]) for class_like in set_of_class_likes: path_manager = PathManager(class_like) source_node.accumulator.add_include_headers( [path_manager.api_path(ext="h")]) # The helper function helper_func_def.body.append( TextNode("""\ V8PerContextData* per_context_data = script_state->PerContextData(); v8::Isolate* isolate = script_state->GetIsolate(); v8::Local<v8::Context> context = script_state->GetContext(); const DOMWrapperWorld& world = script_state->World(); V8InterfaceBridgeBase::FeatureSelector feature_selector(feature); for (const auto* wrapper_type_info : wrapper_type_info_list) { v8::Local<v8::Object> instance_object; v8::Local<v8::Object> prototype_object; v8::Local<v8::Function> interface_object; v8::Local<v8::Template> interface_template = wrapper_type_info->GetV8ClassTemplate(isolate, world); switch (wrapper_type_info->idl_definition_kind) { case WrapperTypeInfo::kIdlInterface: if (!per_context_data->GetExistingConstructorAndPrototypeForType( wrapper_type_info, &prototype_object, &interface_object)) { continue; } break; case WrapperTypeInfo::kIdlNamespace: NOTIMPLEMENTED(); break; default: NOTREACHED(); } wrapper_type_info->install_context_dependent_props_func( context, world, instance_object, prototype_object, interface_object, interface_template, feature_selector); }\ """)) # Write down to the files. write_code_node_to_file(header_node, path_manager.gen_path_to(header_path)) write_code_node_to_file(source_node, path_manager.gen_path_to(source_path)) def generate_init_idl_interfaces(function_name, filepath_basename, for_testing=False): assert isinstance(function_name, str) assert isinstance(filepath_basename, str) assert isinstance(for_testing, bool) web_idl_database = package_initializer().web_idl_database() # Filepaths header_path = PathManager.component_path("modules", "{}.h".format(filepath_basename)) source_path = PathManager.component_path("modules", "{}.cc".format(filepath_basename)) # Root nodes header_node = ListNode(tail="\n") header_node.set_accumulator(CodeGenAccumulator()) header_node.set_renderer(MakoRenderer()) source_node = ListNode(tail="\n") source_node.set_accumulator(CodeGenAccumulator()) source_node.set_renderer(MakoRenderer()) # Namespaces header_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) source_blink_ns = CxxNamespaceNode(name_style.namespace("blink")) header_bindings_ns = CxxNamespaceNode(name_style.namespace("bindings")) source_bindings_ns = CxxNamespaceNode(name_style.namespace("bindings")) header_blink_ns.body.append(header_bindings_ns) source_blink_ns.body.append(source_bindings_ns) # Function nodes func_decl = CxxFuncDeclNode( name=function_name, arg_decls=[], return_type="void") func_def = CxxFuncDefNode( name=function_name, arg_decls=[], return_type="void") header_bindings_ns.body.extend([ TextNode("""\ // Initializes cross-component trampolines of IDL interface / namespace.\ """), func_decl, ]) source_bindings_ns.body.append(func_def) # Assemble the parts. header_node.extend([ make_copyright_header(), EmptyNode(), enclose_with_header_guard( ListNode([ make_header_include_directives(header_node.accumulator), EmptyNode(), header_blink_ns, ]), name_style.header_guard(header_path)), ]) source_node.extend([ make_copyright_header(), EmptyNode(), TextNode("#include \"{}\"".format(header_path)), EmptyNode(), make_header_include_directives(source_node.accumulator), EmptyNode(), source_blink_ns, ]) init_calls = [] for class_like in itertools.chain(web_idl_database.interfaces, web_idl_database.namespaces): if class_like.code_generator_info.for_testing != for_testing: continue path_manager = PathManager(class_like) if path_manager.is_cross_components: source_node.accumulator.add_include_headers( [path_manager.impl_path(ext="h")]) class_name = v8_bridge_class_name(class_like) init_calls.append(_format("{}::Impl::Init();", class_name)) for init_call in sorted(init_calls): func_def.body.append(TextNode(init_call)) # Write down to the files. write_code_node_to_file(header_node, path_manager.gen_path_to(header_path)) write_code_node_to_file(source_node, path_manager.gen_path_to(source_path)) def generate_interfaces(task_queue): assert isinstance(task_queue, TaskQueue) web_idl_database = package_initializer().web_idl_database() for interface in web_idl_database.interfaces: task_queue.post_task(generate_interface, interface.identifier) task_queue.post_task(generate_install_properties_per_feature, "InstallPropertiesPerFeature", "properties_per_feature_installer") task_queue.post_task(generate_install_properties_per_feature, "InstallPropertiesPerFeatureForTesting", "properties_per_feature_installer_for_testing", for_testing=True) task_queue.post_task(generate_init_idl_interfaces, "InitIDLInterfaces", "init_idl_interfaces") task_queue.post_task(generate_init_idl_interfaces, "InitIDLInterfacesForTesting", "init_idl_interfaces_for_testing", for_testing=True)
bsd-3-clause
-3,042,133,025,695,879,000
37.178735
105
0.618404
false
3.894837
false
false
false
brian-cleary/LatentStrainAnalysis
LSA/create_hash.py
3
1150
#!/usr/bin/env python import sys,getopt,os from fastq_reader import Fastq_Reader help_message = "usage example: python create_hash.py -i /project/home/original_reads/ -o /project/home/hashed_reads/ -k kmer_size -s hash_size" if __name__ == "__main__": try: opts, args = getopt.getopt(sys.argv[1:],'hi:o:k:s:',["inputdir=","outputdir=","kmersize=","hashsize="]) except: print help_message sys.exit(2) for opt, arg in opts: if opt in ('-h','--help'): print help_message sys.exit() elif opt in ('-i','--inputdir'): inputdir = arg if inputdir[-1] != '/': inputdir += '/' elif opt in ('-o','--outputdir'): outputdir = arg if outputdir[-1] != '/': outputdir += '/' elif opt in ('-k','--kmersize'): k_size = int(arg) elif opt in ('-s','--hashsize'): h_size = int(arg) hashobject = Fastq_Reader(inputdir,outputdir,new_hash=(h_size,k_size)) total_rand_kmers = k_size*h_size*2 hashobject.rand_kmers_for_wheel(total_rand_kmers) hashobject.set_wheels(wheels=1) os.system('rm %s/random_kmers.fastq' % inputdir) f = open(outputdir + 'hashParts.txt','w') f.write('%d\n' % (2**h_size/10**6 + 1)) f.close()
mit
-3,431,116,481,094,081,500
30.972222
143
0.626957
false
2.595937
false
false
false
jbalogh/zamboni
apps/compat/views.py
1
5983
import json import re from django import http from django.conf import settings from django.db.models import Count from django.shortcuts import redirect from django.views.decorators.csrf import csrf_exempt import jingo import redisutils from tower import ugettext as _ import amo.utils from amo.decorators import post_required from amo.utils import urlparams from amo.urlresolvers import reverse from addons.models import Addon from versions.compare import version_int as vint from .models import CompatReport, AppCompat from .forms import CompatForm def index(request, version=None): COMPAT = [v for v in settings.COMPAT if v['app'] == request.APP.id] compat_dict = dict((v['main'], v) for v in COMPAT) if not COMPAT: raise http.Http404() if version not in compat_dict: return redirect('compat.index', COMPAT[0]['main']) qs = AppCompat.search() binary = None initial = {'appver': '%s-%s' % (request.APP.id, version), 'type': 'all'} initial.update(request.GET.items()) form = CompatForm(initial) if request.GET and form.is_valid(): if form.cleaned_data['appver']: app, ver = form.cleaned_data['appver'].split('-') if int(app) != request.APP.id or ver != version: new = reverse('compat.index', args=[ver], add_prefix=False) url = '/%s%s' % (amo.APP_IDS[int(app)].short, new) type_ = form.cleaned_data['type'] or None return redirect(urlparams(url, type=type_)) if form.cleaned_data['type'] != 'all': binary = form.cleaned_data['type'] == 'binary' compat, app = compat_dict[version], str(request.APP.id) compat_queries = ( ('prev', qs.query(**{ 'top_95.%s.%s' % (app, vint(compat['previous'])): True, 'support.%s.max__gte' % app: vint(compat['previous'])})), ('top_95', qs.query(**{'top_95_all.%s' % app: True})), ('all', qs), ) compat_levels = [(key, version_compat(qs, compat, app, binary)) for key, qs in compat_queries] usage_addons, usage_total = usage_stats(request, compat, app, binary) return jingo.render(request, 'compat/index.html', {'version': version, 'usage_addons': usage_addons, 'usage_total': usage_total, 'compat_levels': compat_levels, 'form': form}) def version_compat(qs, compat, app, binary): facets = [] for v, prev in zip(compat['versions'], (None,) + compat['versions']): d = {'from': vint(v)} if prev: d['to'] = vint(prev) facets.append(d) # Pick up everything else for an Other count. facets.append({'to': vint(compat['versions'][-1])}) facet = {'range': {'support.%s.max' % app: facets}} if binary is not None: facet['facet_filter'] = {'term': {'binary': binary}} qs = qs.facet(by_status=facet) result = qs[:0].raw() total_addons = result['hits']['total'] ranges = result['facets']['by_status']['ranges'] titles = compat['versions'] + (_('Other'),) faceted = [(v, r['count']) for v, r in zip(titles, ranges)] return total_addons, faceted def usage_stats(request, compat, app, binary=None): # Get the list of add-ons for usage stats. redis = redisutils.connections['master'] qs = (AppCompat.search().order_by('-usage.%s' % app).values_dict() .filter(**{'support.%s.max__gte' % app: vint(compat['previous'])})) if binary is not None: qs = qs.filter(binary=binary) addons = amo.utils.paginate(request, qs) for obj in addons.object_list: obj['usage'] = obj['usage'][app] obj['max_version'] = obj['max_version'][app] total = int(redis.hget('compat:%s' % app, 'total')) return addons, total @csrf_exempt @post_required def incoming(request): # Turn camelCase into snake_case. snake_case = lambda s: re.sub('[A-Z]+', '_\g<0>', s).lower() try: data = [(snake_case(k), v) for k, v in json.loads(request.raw_post_data).items()] except Exception: return http.HttpResponseBadRequest() # Build up a new report. report = CompatReport(client_ip=request.META.get('REMOTE_ADDR', '')) fields = CompatReport._meta.get_all_field_names() for key, value in data: if key in fields: setattr(report, key, value) else: return http.HttpResponseBadRequest() report.save() return http.HttpResponse(status=204) def reporter(request): query = request.GET.get('guid') if query: qs = None if query.isdigit(): qs = Addon.objects.filter(id=query) if not qs: qs = Addon.objects.filter(slug=query) if not qs: qs = Addon.objects.filter(guid=query) if not qs and len(query) > 4: qs = CompatReport.objects.filter(guid__startswith=query) if qs: return redirect('compat.reporter_detail', qs[0].guid) addons = (request.amo_user.addons.all() if request.user.is_authenticated() else []) return jingo.render(request, 'compat/reporter.html', dict(query=query, addons=addons)) def reporter_detail(request, guid): qs = CompatReport.objects.filter(guid=guid) works_ = dict(qs.values_list('works_properly').annotate(Count('id'))) works = {'success': works_.get(True, 0), 'failure': works_.get(False, 0)} if 'works_properly' in request.GET: qs = qs.filter(works_properly=request.GET['works_properly']) reports = amo.utils.paginate(request, qs.order_by('-created'), 100) addon = Addon.objects.filter(guid=guid) name = addon[0].name if addon else guid return jingo.render(request, 'compat/reporter_detail.html', dict(reports=reports, works=works, name=name, guid=guid))
bsd-3-clause
-8,089,806,695,348,152,000
35.705521
77
0.598362
false
3.615106
false
false
false
KixPanganiban/shisetsu
shisetsu/middlewares.py
1
2794
""" shisetsu.middlewares <github.com/kixpanganiban> Contains `Middlewares`. """ from .contract import Contract from .exceptions import MiddlewareError class Middlewares(object): """Middlewares facilitate registering and executing middlwares wherever they are defined. To add a middleware handler, a function called `handler` may be passed to `Middlewares.register_before` or `Middlewares.register_after`. In a Server: All handlers registered via the former are called before the contract is sent to the `func` it calls, that is, after it is unpacked and created. Handlers registered via the latter are called after the `func` returns a response and an outgoing contract is created. In a Client: All handlers registered via the former are called before the request is packed and sent to the channel. Handlers registered via the latter are called after the response is received, and before the response body is returned. A `handler` MUST return a Contract after execution, or a `MiddlewareError` will be raised. To interrupt processing of a contract, you may raise a custom exception inside a handler. Example usage: from shisetsu.server import Server def dictify(contract): contract.body = { 'status': 'OK', 'content': contract.body } return contract s = Server('db_reader') server.middlewares.register_after(dictify) """ def __init__(self): self._middlewares = { 'before': [], 'after': [] } @classmethod def check_return(cls, return_value): """Check if the middleware handler's response is a valid Contract. """ if not return_value: raise MiddlewareError('Middleware handler returned None') if not isinstance(return_value, Contract): raise MiddlewareError('Middleware must return a contract') return return_value def register_before(self, handler): """Register a before `handler`. """ self._middlewares['before'].append(handler) def register_after(self, handler): """Register an after `handler`. """ self._middlewares['after'].append(handler) def execute_before(self, contract): """Execute all before `handler`s on the contract. """ for handler in self._middlewares['before']: contract = self.check_return(handler(contract)) return contract def execute_after(self, contract): """Execute all after `handler`s on the contract. """ for handler in self._middlewares['after']: contract = self.check_return(handler(contract)) return contract
mit
7,818,230,602,946,721,000
32.261905
78
0.646385
false
4.484751
false
false
false
SageBerg/St.GeorgeGame
Python_version/actions.py
1
211741
from __future__ import print_function # for printing to stderr # for exmaple: print(state.character.get_attack(), file=sys.stderr) import sys import random import abc import places from raffle import Raffle import money import items import persons from outcome import Outcome class Action(object): """ abstract class """ __metaclass__ = abc.ABCMeta @abc.abstractproperty def slot(self): pass @abc.abstractmethod def __init__(self, state): self.name = "" self.combat_action = False self.outcomes = Raffle() @abc.abstractmethod def execute(self, state): """ returns nothing, edits game state """ def get_outcome(self, state): self.execute(state) outcome = self.outcomes.get() # outcome may be function or instance return outcome def __str__(self): return self.name # A slot actions class KissYourFrog(Action): slot = "a" def __init__(self, state): super(KissYourFrog, self).__init__(state) self.name = random.choice(["Kiss your frog."]*9 + ["Snog your frog."]) def execute(self, state): self.outcomes.add(Outcome(state, "The frog turns into an assassin. He assassinates you.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into a cat.", remove_item=items.frog, add_item=items.cat, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into a prince. The prince rewards you with a bag " "of jewels.", remove_item=items.frog, add_item=items.jewels, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into a prince. The prince is disgusted to be " "kissing a man and has you put to death.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into a peasant woman. \"Oh blessed be Lord " "Bartholomew!\" she exclaims.", remove_item=items.frog, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into an ugly fat man. He starts shaking you " "violently. \"I liked being a frog!\" he yells before storming " "off.", remove_item=items.frog, ), weight=1) self.outcomes.add(Outcome(state, "The frog seems to enjoy it.", ), weight=1) self.outcomes.add(Outcome(state, "You feel stupid kissing a frog.", ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into a guard. He says you must be a lunatic for " "kissing a frog, but he lets this one slide.", remove_item=items.frog, ), weight=1) self.outcomes.add(Outcome(state, "The frog turns into an old woman. She thanks you and gives you " "some mushrooms.", succeed=True, funcs_with_args=[(state.character.add_item, items.white_mushroom), (state.character.add_item, items.black_mushroom), (state.character.add_item, items.yellow_mushroom), (state.character.add_item, items.many_colored_mushroom), ], remove_item=items.frog, ), weight=1) class Anne(Action): """ used when guessing Eve's name """ slot = "a" def __init__(self, state): super(Anne, self).__init__(state) self.name = "\"Anne.\"" def execute(self, state): self.outcomes.add(Outcome(state, "She grimaces. \"That's my mother, you idiot!\"", die=True, ), weight=1) class AskForAnAudienceWithLordBartholomew(Action): slot = "a" def __init__(self, state): super(AskForAnAudienceWithLordBartholomew, self).__init__(state) self.name = "Ask for an audience with Lord Bartholomew." def execute(self, state): self.outcomes.add(Outcome(state, "The first person you meet is Lord Bartholomew.", new_person=state.persons.persons_dict["lord_bartholomew"], ), weight=1) self.outcomes.add(Outcome(state, "The line to meet Lord Bartholomew is very long, " "so you lose patience and wander off.", move_to=state.places.places_dict["countryside"], ), weight=1) self.outcomes.add(Outcome(state, "You are granted one." , new_person=state.persons.persons_dict["lord_bartholomew"], ), weight=1) class AskForAnAudienceWithLordDaniel(Action): slot = "a" def __init__(self, state): super(AskForAnAudienceWithLordDaniel, self).__init__(state) self.name = "Ask for an audience with Lord Daniel." def execute(self, state): self.outcomes.add(Outcome(state, "The guards laugh. \"{0},\" " "one of the guards says.".format(random.choice([ "He has no time for peasants", "Such audacity", ])), new_person=state.persons.persons_dict["guards"], clover=True, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "The guards mistake you for someone important and take you " "to Lord Daniel.", new_person=state.persons.persons_dict["lord_daniel"], ), weight=1) class A3(Action): slot = "a" def __init__(self, state): super(A3, self).__init__(state) self.name = "a3." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "After seeing your pitifully stupid move, Lord Carlos is no " "longer concerned that you might beat him and has his " "servants assassinate you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You eventually win the game. Lord Carlos says no one " "can know of his defeat. He assassinates you.", die=True, ), weight=1) class LaughAboutWarden(Action): slot = "a" def __init__(self, state): super(LaughAboutWarden, self).__init__(state) self.name = "Laugh about the warden doing it alone on holidays." def execute(self, state): self.outcomes.add(Outcome(state, "One of the prison guards pokes you with an eleven-foot pole. " "\"No laughing!\" he says.", ), weight=1) self.outcomes.add(Outcome(state, "You feel good for a second, then you remember you're " "in prison.", ), weight=1) class GiveHimTheYellowMushroom(Action): slot = "a" def __init__(self, state): super(GiveHimTheYellowMushroom, self).__init__(state) self.name = "Give him the yellow mushroom." def execute(self, state): self.outcomes.add(Outcome(state, "The wizard chows down on the yellow mushroom.", remove_item=items.yellow_mushroom, ), weight=1) self.outcomes.add(Outcome(state, "The wizard gives you a potion in return.", add_item=random.choice([items.love_potion, items.tail_potion, items.strength_potion]), ), weight=1) self.outcomes.add(Outcome(state, "Having no further use for you, the wizard turns you into a frog.", funcs=[state.character.frogify], ), weight=1) self.outcomes.add(Outcome(state, "The wizard swallows the mushroom whole and chokes to death.", kill=state.persons.persons_dict["wizard"], ), weight=1) class EnactYourElaborateScheme(Action): slot = "a" def __init__(self, state): super(EnactYourElaborateScheme, self).__init__(state) self.name = "Enact your elaborate scheme." def execute(self, state): if state.persons.persons_dict["lord_carlos"].alive: self.outcomes.add(Outcome(state, "You are just about to dump a cauldron of hot soup on Lord " "Carlos when he looks up and notices you. You then dump the " "hot soup on him and he dies.", new_person=state.persons.persons_dict["lord_carlos"], kill=state.persons.persons_dict["lord_carlos"], move_to=state.places.places_dict["lord_carlos_manor"], ), weight=1) self.outcomes.add(Outcome(state, "Everything goes as planned until you ask a dragon to do your " "bidding.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Your plan goes swimmingly.", add_item=items.jeweled_cutlass, move_to=state.places.places_dict["ocean"], ), weight=1) self.outcomes.add(Outcome(state, "After several months, you realize you don't have what it takes " "to be a clown.", fail=True, move_to=state.places.places_dict["market"], ), weight=1) self.outcomes.add(Outcome(state, "After several years, you realize you don't have what it takes " "to be a priest.", fail=True, move_to=state.places.places_dict["church"], ), weight=1) class AskHerToTakeYouBackToLand(Action): slot = "a" def __init__(self, state): super(AskHerToTakeYouBackToLand, self).__init__(state) self.name = "Ask her to take you back to land." def execute(self, state): if state.character.place == state.places.places_dict["mermaid_rock"]: self.outcomes.add(Outcome(state, "She doesn't know where land is, but " "she gives you a fish.", add_item=items.fish, ), weight=1) self.outcomes.add(Outcome(state, "\"You're on land, silly!\" she says.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "She takes you out to sea, but gets bored and leaves you " "there.", move_to=state.places.places_dict["ocean"], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "She does.", move_to=state.places.places_dict["docks"], ), weight=1) class AskHerToBrew(Action): slot = "a" def __init__(self, state, potion): super(AskHerToBrew, self).__init__(state) self.potion = potion self.name = "Ask her to brew a {0}.".format(self.potion.name) def execute(self, state): # potential errors here if none of the if statements execute # we'll have an empyt raffle if self.potion.name == "love potion": self.outcomes.add(Outcome(state, "The witch puts your ingredients in her cauldron and brews " "a large batch.", add_item=self.potion, funcs=[lambda: state.character.remove_item(items.bottle_of_sap), lambda: state.character.remove_item(items.bouquet_of_flowers), lambda: state.character.remove_item(items.many_colored_mushroom)] ), weight=1) if self.potion.name == "potion of tail growth": self.outcomes.add(Outcome(state, "The witch puts your ingredients in her cauldron and brews " "a large batch.", add_item=self.potion, funcs=[lambda: state.character.remove_item(items.cat), lambda: state.character.remove_item(items.pearl)], ), weight=1) if self.potion.name == "potion of strength": self.outcomes.add(Outcome(state, "The witch puts your ingredients in her cauldron and brews " "a large batch.", add_item=self.potion, funcs=[lambda: state.character.remove_item(items.white_mushroom), lambda: state.character.remove_item(items.deep_cave_newt)], ), weight=1) class Think(Action): slot = "a" def __init__(self, state): super(Think, self).__init__(state) self.name = "Think." def execute(self, state): self.outcomes.add(Outcome(state, "You come up with four brilliant ideas.", actions=[(LickTheGround(state, state.character.place), 10)], ), weight=1) self.outcomes.add(Outcome(state, "You concoct an elaborate scheme.", actions=[(EnactYourElaborateScheme(state), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "All you can think is \"Think. Think. Think.\".", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You come up with a plan B in case things go south.", ), weight=1) self.outcomes.add(Outcome(state, "Since you're a man, you think about sex." ), weight=1) self.outcomes.add(Outcome(state, "You spend some time reevaluating your life and conclude " "that you need to stay the course.", ), weight=1) self.outcomes.add(Outcome(state, "You get lost in your thoughts.", ), weight=1) if state.character.place != state.places.places_dict["tavern"] and \ state.persons.persons_dict["olga"].name != "Olga": self.outcomes.add(Outcome(state, "You think about a pretty lady you saw in the tavern.", topic="marriage", ), weight=1) elif state.persons.persons_dict["olga"].name == "Olga": self.outcomes.add(Outcome(state, "You think about Olga.", topic="marriage", ), weight=1) if state.character.place == state.places.places_dict["wizards_lab"] or \ state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "You think you probably shouldn't be here.", ), weight=8) if state.character.place == state.places.places_dict["tavern"] or \ state.character.place == state.places.places_dict["dark_alley"] or \ state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "You think about how painful it would be to get stabbed. " "You soon find out.", die=True, ), weight=4) if state.character.place == state.places.places_dict["docks"]: self.outcomes.add(Outcome(state, "Some pirates laugh at you for thinking.", new_person=state.persons.persons_dict["pirates"], ), weight=8) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You think it would be a bad idea to join Lord Arthur's " "crew. Lord Arthur gives you no choice.", add_employer=state.persons.persons_dict["lord_arthur"], move_to=state.places.places_dict["pirate_ship"], ), weight=4) if state.character.place == state.places.places_dict["docks"] or \ state.character.place == state.places.places_dict["ocean"] or \ state.character.place == state.places.places_dict["pirate_ship"] or \ state.character.place == state.places.places_dict["mermaid_rock"]: self.outcomes.add(Outcome(state, "You think the ocean is really big.", ), weight=8) self.outcomes.add(Outcome(state, "You think the bad smell might be coming from you.", ), weight=2) if state.character.place == state.places.places_dict["tower"]: self.outcomes.add(Outcome(state, "You think you can survive the jump from the top of the " "tower.", die=True, ), weight=5) self.outcomes.add(Outcome(state, "While you're thinking, a guard hands you an ax and tells " "you to chop firewood for the cooks.", add_item=items.ax, ), weight=5) if state.character.place == state.places.places_dict["countryside"]: self.outcomes.add(Outcome(state, "You think about Lord Bartholomew.", topic="Lord Bartholomew", ), weight=3) self.outcomes.add(Outcome(state, "You wonder if any peasant women would " "go for a man like you.", topic="peasants", ), weight=2) if state.character.place == state.places.places_dict["woods"]: self.outcomes.add(Outcome(state, "You think about fire.", topic="fire", ), weight=3) if state.character.place == state.places.places_dict["church"]: self.outcomes.add(Outcome(state, "You wonder what life is all about and feel smug " "for being so philosophical.", topic="yourself", ), weight=3) if state.character.place == state.places.places_dict["arctic"]: self.outcomes.add(Outcome(state, "You think about ice.", topic="ice", ), weight=3) self.outcomes.add(Outcome(state, "You can't think about much besides how cold you are.", topic="misery", ), weight=4) if state.character.place == state.places.places_dict["cave"]: self.outcomes.add(Outcome(state, "You think about the darkness that is crushing in on you from " "all sides.", ), weight=9) self.outcomes.add(Outcome(state, "You think you hear bats, but you also think you might be crazy.", ), weight=3) self.outcomes.add(Outcome(state, "You think about death.", ), weight=3) self.outcomes.add(Outcome(state, "You think about suffocation.", ), weight=3) if state.character.person: self.outcomes.add(Outcome(state, "You zone out while " + state.character.person.name + " talk" + persons.get_tense(state.character.person) + ".", ), weight=3) self.outcomes.add(Outcome(state, "You space out.", ), weight=2) class Yell(Action): slot = "a" def __init__(self, state, exclamation): super(Yell, self).__init__(state) self.exclamation = exclamation self.name = "Yell \"{0}!\"".format(exclamation) def execute(self, state): if self.exclamation == "I lost my leg": self.outcomes.add(Outcome(state, "No one cares.", fail=True, ), weight=1) if state.character.person == state.persons.persons_dict["lord_arthur"]: self.outcomes.add(Outcome(state, "Lord Arthur says he knows a town where you can " "find a wooden leg.", ), weight=10000) if self.exclamation == "There aren't penguins in the arctic": self.outcomes.add(Outcome(state, "The penguins don't care.", fail=True, ), weight=1) if self.exclamation == "Don't leave without me": self.outcomes.add(Outcome(state, "The wizard ignores you and sails away before you can " "get to his boat.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "The wizard leaves without you.", fail=True, ), weight=1) class ReadASpellBook(Action): slot = "a" def __init__(self, state): super(ReadASpellBook, self).__init__(state) self.name = "Read a spellbook." def execute(self, state): self.outcomes.add(Outcome(state, "You open a book of curses. It's cursed.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You learn that it takes sap, flowers, and a many-colored " "mushroom to make a love potion.", ), weight=1) self.outcomes.add(Outcome(state, "You learn that it takes a cat and a pearl " "to brew a potion of tail growth.", ), weight=1) self.outcomes.add(Outcome(state, "You learn that it takes a white mushroom and a deep-cave " "newt to brew a potion of strength.", ), weight=1) self.outcomes.add(Outcome(state, "You find the book arcane and boring.", ), weight=1) self.outcomes.add(Outcome(state, "You learn a spell to set things on fire, but it requires a " "focused mind.", ), weight=1) self.outcomes.add(Outcome(state, "The wizard's handwriting is terrible.", ), weight=1) self.outcomes.add(Outcome(state, "You find a four-leaf clover in the pages of the " "spellbook.", add_item=items.four_leaf_clover, ), weight=1) self.outcomes.add(Outcome(state, "The first book you open appears to be the wizard's diary. " "{0}.".format(random.choice([ "It is full of details about how he is too chicken to " "ask out a woman he often sees in the market", "He appears to be obsessed with void dust, but can't " "figure out how to get any.", "It's mostly math proofs."])), ), weight=1) class SuckUpTo(Action): slot = "a" def __init__(self, state, person): super(SuckUpTo, self).__init__(state) self.person = person self.name = "Suck up to {0}.".format(person) self.combat_action = True def execute(self, state): if self.person == state.persons.persons_dict["lord_arthur"]: self.outcomes.add(Outcome(state, "Lord Arthur sends you on a mission to find him a pet sea " "turtle.", move_to=state.places.places_dict["ocean"], ), weight=1) if self.person == state.persons.persons_dict["lord_bartholomew"]: self.outcomes.add(Outcome(state, "Lord Bartholomew wishes you well and sends you on your way.", move_to=state.places.places_dict["countryside"], ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew takes a liking to you and gives you a long " "pitchfork.", add_item=items.long_pitchfork, ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew tells you to take more pride in yourself.", succeed=True, ), weight=1) if self.person == state.persons.persons_dict["lord_carlos"]: self.outcomes.add(Outcome(state, "He tells you that your are forgiven, but his men never fail.", ), weight=1) self.outcomes.add(Outcome(state, "He has you thrown out the window.", move_to=state.places.places_dict["woods"], ), weight=1) self.outcomes.add(Outcome(state, "Lord Carlos is having none of it. He kills you.", die=True, ), weight=1) if self.person == state.persons.persons_dict["lord_daniel"]: self.outcomes.add(Outcome(state, "Lord Daniel sends you away.", move_to=state.places.places_dict["streets"], ), weight=1) self.outcomes.add(Outcome(state, "Lord Daniel questions your sanity.", fail=True, ), weight=1) class TellThemYouAreALunatic(Action): slot = "a" def __init__(self, state): super(TellThemYouAreALunatic, self).__init__(state) self.name = "Tell them you are a lunatic." def execute(self, state): self.outcomes.add(Outcome(state, "\"A rich lunatic,\" they say before moving along.", new_person=None, ), weight=1) class Swashbuckle(Action): """ Note: only use when attacking merchant ship """ slot = "a" def __init__(self, state): super(Swashbuckle, self).__init__(state) self.name = "Swashbuckle." def execute(self, state): if state.character.has_item(items.cutlass) or \ state.character.has_item(items.jeweled_cutlass): if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You kill several innocent merchants. Lord Arthur is " "pleased and gives you a large share of the plunder.", get_money=money.large_fortune, ), weight=1) else: self.outcomes.add(Outcome(state, "You kill several innocent merchants. The captain is " "pleased and gives you a large share of the plunder.", get_money=money.large_fortune, ), weight=1) else: self.outcomes.add(Outcome(state, "You find it difficult to swashbuckle without a cutlass. " "You are soon killed.", die=True, ), weight=1) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You manage to hold your own. Afterwards Lord Arthur divvies " "up the booty.", add_item=items.jewels, ), weight=1) else: self.outcomes.add(Outcome(state, "You manage to hold your own. Afterwards you divide " "the booty.", add_item=items.jewels, ), weight=1) self.outcomes.add(Outcome(state, "A cabin boy stabs you in the back during the fight.", die=True, ), weight=1) if not state.character.has_item(items.sailor_peg) and \ state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You lose your leg in the battle, but Lord Arthur gives you a " "sailor peg as a replacement.", add_item=items.sailor_peg, ), weight=1) class LookForAssassins(Action): """ Note: only use in dark alley """ slot = "a" def __init__(self, state): super(LookForAssassins, self).__init__(state) self.name = "Look for assassins." def execute(self, state): self.outcomes.add(Outcome(state, "You don't see any.", die=True, ), weight=2) self.outcomes.add(Outcome(state, "The dark alley appears to be safe.", ), weight=1) class PickSomeFlowers(Action): slot = "a" def __init__(self, state): super(PickSomeFlowers, self).__init__(state) self.name = "Pick some flowers." def execute(self, state): self.outcomes.add(Outcome(state, "{0}".format(random.choice([ "You find many pretty flowers.", "A peasant girl picks flowers with you. She tells you she " "wants to be like Lord Bartholomew when she grows up.", "You spend all day looking for flowers, but it was worth it.", "You get stung by a bee, but you still find many pretty flowers.", ])), add_item=items.bouquet_of_flowers, succeed=True, ), weight=4) self.outcomes.add(Outcome(state, "You don't find any flowers, but you find a four-leaf clover.", add_item=items.four_leaf_clover, ), weight=1) self.outcomes.add(Outcome(state, "You can't find any flowers. Only grass.", fail=True, ), weight=1) class GoFishing(Action): slot = "a" def __init__(self, state): super(GoFishing, self).__init__(state) self.name = "Go fishing." def execute(self, state): if state.character.place == state.places.places_dict["docks"]: self.outcomes.add(Outcome(state, "Some pirates laugh at you. \"You'll never make a large " "fortune that way,\" one of them says.", new_person=state.persons.persons_dict["pirates"], ), weight=10) self.outcomes.add(Outcome(state, "You don't catch any fish.", fail=True, ), weight=10) self.outcomes.add(Outcome(state, "You fish up an ax.", add_item=items.ax, ), weight=1) self.outcomes.add(Outcome(state, "You fish up a pitchfork.", add_item=items.pitchfork, ), weight=1) self.outcomes.add(Outcome(state, "You catch a fish.", add_item=items.fish, succeed=True, ), weight=10) if state.character.place == state.places.places_dict["docks"]: self.outcomes.add(Outcome(state, "You don't catch any fish, but the assassins catch you.", clover=True, die=True, ), weight=1) class TakeIt(Action): slot = "a" def __init__(self, state, wronged_party, item): super(TakeIt, self).__init__(state) self.wronged_party = wronged_party self.item = item self.name = "Take it." def execute(self, state): self.outcomes.add(Outcome(state, None, add_item=self.item, ), weight=3) if self.wronged_party.alive: self.outcomes.add(Outcome(state, self.wronged_party.name[0].upper() + self.wronged_party.name[1:] + " notice" + persons.get_tense(self.wronged_party) + " you taking it and kill" + persons.get_tense(self.wronged_party) + " you.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, self.wronged_party.name[0].upper() + self.wronged_party.name[1:] + " notice" + persons.get_tense(self.wronged_party) + " you taking it and " + "become" + persons.get_tense(self.wronged_party) + " wroth with you.", new_person=self.wronged_party, threat=True, ), weight=1) class AskAboutAssassins(Action): slot = "a" def __init__(self, state): super(AskAboutAssassins, self).__init__(state) self.name = "Ask about assassins." def execute(self, state): self.outcomes.add(Outcome(state, "The first person you ask about assassins turns " "out to be an assassin. She assassinates you.", clover=True, die=True, ), weight=3) self.outcomes.add(Outcome(state, "You ask around, but nobody has heard anything " "about assassins.", fail=True, ), weight=1) if state.character.place == state.places.places_dict["tavern"] and \ state.persons.persons_dict["olga"].alive: self.outcomes.add(Outcome(state, "During your search, you strike up a conversation " "with a pretty lady.", new_person=state.persons.persons_dict["olga"], ), weight=1) if state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "You ask a servant about assassins. She asks you to wait where " "you are.", ), weight=10) class AskDirections(Action): slot = "a" def __init__(self, state): super(AskDirections, self).__init__(state) self.name = "Ask directions." def execute(self, state): if state.character.person == state.persons.persons_dict["simple_peasant"]: self.outcomes.add(Outcome(state, "He tells you there are four directions, north, south, " "east, and west.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "He tells you the only direction worth going is to Lord " "Bartholomew's house.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "He says the town is yonder.", ), weight=1) if state.character.person == state.persons.persons_dict["peasant_lass"]: self.outcomes.add(Outcome(state, "She says Lord Carlos' manor is in the woods.", actions=[(GoTo(state, state.character.place, specific_dest=state.places.places_dict["lord_carlos_manor"]), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "She says Lord Bartholomew's manor is nearby.", actions=[(GoTo(state, state.character.place, specific_dest=state.places.places_dict["lord_bartholomews_manor"]), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "She says there's good mushroom picking in the woods.", actions=[(GoTo(state, state.character.place, specific_dest=state.places.places_dict["woods"]), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "She babbles incoherently while eating a many-colored " "mushroom.", ), weight=1) class AdmireYourJewels(Action): slot = "a" def __init__(self, state): super(AdmireYourJewels, self).__init__(state) self.name = "Admire your jewels." def execute(self, state): self.outcomes.add(Outcome(state, "You decide that your jewels outclass everything else you have.", ), weight=1) self.outcomes.add(Outcome(state, "You decide to store your jewels in your stomach for safe " "keeping.", remove_item=items.jewels, topic="mules", ), weight=1) self.outcomes.add(Outcome(state, "You find a pearl in your bag of jewels", add_item=items.pearl, topic="pearls", ), weight=1) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "You notice the reflection of a dagger in a particularly " "large ruby.", clover=True, die=True, ), weight=1) if state.character.place in state.places.town: self.outcomes.add(Outcome(state, "The guards catch you with your pants down. They conclude you " "must be a lunatic", new_person=state.persons.persons_dict["guards"], threat=True, topic='curious', ), weight=2) class Apologize(Action): slot = "a" def __init__(self, state): super(Apologize, self).__init__(state) self.name = "Tell him you're sorry." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "\"I'm afraid 'sorry' won't cut it.\" His knife does.", die=True, ), weight=3) self.outcomes.add(Outcome(state, "\"Oh, you're not sorry yet,\" he says as he steps toward you.", threat=True, ), weight=2) self.outcomes.add(Outcome(state, "A bystander notices the assassin threatening you. " "\"The man said he was sorry, isn't that enough?\" " "he says. \"No,\" the assassin replies.", threat=True, ), weight=1) class Attack(Action): slot = "a" def __init__(self, state, person): super(Attack, self).__init__(state) self.name = "Attack " + person.name + "." self.combat_action = True def execute(self, state): if state.character.person.attack >= state.character.get_attack(): self.outcomes.add(state.character.person.preferred_attack(state)) else: self.outcomes.add(Outcome(state, "You kill " + state.character.person.name + ".", unthreat=True, kill=True, ), weight=1) class GoDivingForPearls(Action): slot = "a" def __init__(self, state): super(GoDivingForPearls, self).__init__(state) self.name = "Go diving for pearls." def execute(self, state): self.outcomes.add(Outcome(state, "Lord Arthur's pet shark eats you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You soon find a pearl in an oyster.", add_item=items.pearl, succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "You drown on a fool's errand", die=True, ), weight=2) self.outcomes.add(Outcome(state, "You soon pry open an oyster and find a beautiful pearl. " "It's so dazzling you drown while gazing at it.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You exhaust yourself trying to find pearls and start to drown. " "A beautiful mermaid grabs you and hoists you to safety.", move_to=state.places.places_dict["mermaid_rock"], new_person=state.persons.persons_dict["mermaid"], ), weight=1) class LickTheGround(Action): slot = "a" def __init__(self, state, place): super(LickTheGround, self).__init__(state) self.place = place if place in state.places.inside: self.ground = "floor" elif place == state.places.places_dict["pirate_ship"]: self.ground = "deck" else: self.ground = "ground" self.name = "Lick the {0}.".format(self.ground) def execute(self, state): self.outcomes.add(Outcome(state, "You catch an infection and spend {0} weeks fighting " "it.".format(random.choice(["two", "three", "four", "five", "six"])), clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You find the flavor of the {0} distasteful.".format(self.ground), fail=True, ), weight=3) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "The local guards see you licking the {0} and accuse you of " "being a lunatic.".format(self.ground), new_person=state.persons.persons_dict["guards"], threat=True, ), weight=3) if state.character.place == state.places.places_dict["wizards_lab"]: self.outcomes.add(Outcome(state, "You lick some spilled potion off the floor and start " "growing at a monstrous rate.", funcs=[state.character.monstrosify], ), weight=20) if state.character.place == state.places.places_dict["ocean"]: self.outcomes.add(Outcome(state, "You drown while swimming toward the ocean floor with your " "tongue extended.", die=True, ), weight=10000) if state.character.place == state.places.places_dict["woods"]: self.outcomes.add(Outcome(state, "As you lick the ground, you notice it smells oddly familiar.", ), weight=3) if state.character.place == state.places.places_dict["arctic"]: self.outcomes.add(Outcome(state, "The ice tastes really cold.", ), weight=10) class LookForAWeapon(Action): slot = "a" def __init__(self, state): super(LookForAWeapon, self).__init__(state) self.name = "Look for a weapon." def execute(self, state): if state.persons.persons_dict["wealthy_merchant"].alive: self.outcomes.add(Outcome(state, "You find yourself talking to a wealthy war merchant.", new_person=state.persons.persons_dict["wealthy_merchant"], ), weight=9) self.outcomes.add(Outcome(state, "You find one... in your back as an assassin walks away smoothly.", clover=True, die=True, ), weight=1) class LookForVoidDust(Action): slot = "a" def __init__(self, state): super(LookForVoidDust, self).__init__(state) self.name = "Look for void dust." def execute(self, state): self.outcomes.add(Outcome(state, "The void is very clean. You can't find any.", ), weight=1) self.outcomes.add(Outcome(state, "The void is very dirty. You soon find some.", add_item=items.bottle_of_void_dust, succeed=True, ), weight=1) class GoMushroomPicking(Action): slot = "a" def __init__(self, state): super(GoMushroomPicking, self).__init__(state) self.name = "Go mushroom picking." def execute(self, state): self.outcomes.add(Outcome(state, "You find a yellow mushroom.", add_item=items.yellow_mushroom, ), weight=1) self.outcomes.add(Outcome(state, "You find a white mushroom.", add_item=items.white_mushroom, ), weight=1) self.outcomes.add(Outcome(state, "You find a black mushroom.", add_item=items.black_mushroom, ), weight=1) self.outcomes.add(Outcome(state, "You find a many-colored mushroom.", add_item=items.many_colored_mushroom, succeed=True, ), weight=1) class LookForStGeorge(Action): slot = "a" def __init__(self, state): super(LookForStGeorge, self).__init__(state) self.name = "Look for St. George." def execute(self, state): self.outcomes.add(Outcome(state, "You forget what you were doing.", move=1, ), weight=3) self.outcomes.add(Outcome(state, "You trip over a cat and break your neck.", clover=True, die=True, ), weight=1) if state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "You find St. George at the church.", move_to=state.places.places_dict["church"], new_person=state.persons.persons_dict["st_george"], ), weight=10) self.outcomes.add(Outcome(state, "You find St. George in the streets.", move_to=state.places.places_dict["streets"], new_person=state.persons.persons_dict["st_george"], ), weight=5) self.outcomes.add(Outcome(state, "You find St. George in the market.", move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["st_george"], ), weight=3) class KillYourselfInFrustration(Action): slot = "a" def __init__(self, state): super(KillYourselfInFrustration, self).__init__(state) self.name = "Kill yourself in frustration." def execute(self, state): if state.character.place in [ state.places.places_dict["docks"], state.places.places_dict["mermaid_rock"], state.places.places_dict["arctic"]]: self.outcomes.add(Outcome(state, "You walk into the ocean and are suddenly inspired to write " "a novel. You drown.", die=True, ), weight=5) if state.character.place in [state.places.places_dict["streets"], state.places.places_dict["market"], state.places.places_dict["church"]] \ and state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "You throw yourself off a rooftop, but St. George catches " "you and gives you a large fortune.", get_money=money.large_fortune, new_person=state.persons.persons_dict["st_george"], ), weight=2) if state.character.place in [state.places.places_dict["docks"]]: self.outcomes.add(Outcome(state, "You find Lord Arthur and ask him to kill you with his " "jeweled cutlass. He gladly obliges.", die=True, ), weight=5) self.outcomes.add(Outcome(state, "You perform the ritual of seppuku.", die=True, ), weight=3) if state.character.place != state.places.places_dict["ocean"]: if not state.character.has_item(items.fire_proof_cloak): self.outcomes.add(Outcome(state, "You set yourself on fire and burn to a crisp.", die=True, ), weight=3) else: self.outcomes.add(Outcome(state, "You try to set yourself on fire, but your fancy red " "cloak is fireproof.", fail=True, ), weight=3) else: self.outcomes.add(Outcome(state, "You drown trying to set yourself on fire.", die=True, ), weight=3) if state.character.place == state.places.places_dict["countryside"] or \ state.character.place == state.places.places_dict["lord_bartholomews_manor"] or \ state.character.place == state.places.places_dict["streets"]: self.outcomes.add(Outcome(state, "You are about to impale yourself on a fence post when a " "small boy walks by. By the time he leaves, your stupidity " "is no longer compelling you to kill yourself.", ), weight=3) class KillEverybodyInAFitOfRage(Action): slot = "a" def __init__(self, state): super(KillEverybodyInAFitOfRage, self).__init__(state) self.name = "Kill everybody in a fit of rage." def execute(self, state): self.outcomes.add(Outcome(state, "You start with yourself.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You make no exceptions.", die=True, ), weight=1) if state.character.person == state.persons.persons_dict["pirates"] and \ state.character.place == state.places.places_dict["docks"] and \ state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You kill all the pirates. Lord Arthur says he is impressed " "with your skills and also happens to be in the market for " "a new crew. He forces you into his service.", move_to=state.places.places_dict["pirate_ship"], ), weight=10) class SayYouLoveHer(Action): """ NOTE: right now this is only for Felicity """ slot = "a" def __init__(self, state, person): super(SayYouLoveHer, self).__init__(state) self.name = "Say you love her too." self.person = person def execute(self, state): if self.person == state.persons.persons_dict["felicity"]: self.outcomes.add(Outcome(state, "\"What a shame,\" an assassin says as he steps into the room. " "He shoots you with a crossbow.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Felicity is overjoyed and secretly lets you out of prison " "that night. \"Let's get married!\" she says.", move_to=state.places.places_dict["streets"], new_person=state.persons.persons_dict["felicity"], actions=[ (MarryFelicity(state), 777), (RunLikeTheDevil(state), 666)], ), weight=9) class MarryOlga(Action): slot = "a" def __init__(self, state): super(MarryOlga, self).__init__(state) self.name = "Marry Olga." def execute(self, state): self.outcomes.add(Outcome(state, "A bleary-eyed priestess performs a wedding for you and Olga in " "an alley behind the church. Olga asks the priestess if she would " "like to come along for the honeymoon, but the priestess " "declines.", win=True, ), weight=1) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "Lord Arthur performs a wedding for you and Olga on the deck " "of his pirate ship. By the time the ceremony is over the " "ship has sailed. You are now both members of the crew.", win=True, ), weight=1) if state.persons.persons_dict["wizard"].alive: self.outcomes.add(Outcome(state, "The wizard performs a wedding for you and Olga in the market. " "He turns you both into sheep after the vows, but it is much " "safer being sheep.", win=True, ), weight=1) if state.persons.persons_dict["lord_bartholomew"].alive: self.outcomes.add(Outcome(state, "Lord Bartholomew performs a wedding for you and Olga in the " "countryside. 20,000 people attend your wedding, but you " "suspect they just wanted to see Lord Bartholomew.", win=True, ), weight=1) class MarryFelicity(Action): slot = "a" def __init__(self, state): super(MarryFelicity, self).__init__(state) self.name = "Marry Felicity." def execute(self, state): self.outcomes.add(Outcome(state, "St. George secretly performs a wedding for you and Felicity.", win=True, ), weight=9) class ThumpYourselfOnTheChest(Action): slot = "a" def __init__(self, state): super(ThumpYourselfOnTheChest, self).__init__(state) self.name = "Thump yourself on the chest." def execute(self, state): self.outcomes.add(Outcome(state, "You feel quite manly.", ), weight=9) self.outcomes.add(Outcome(state, "You thump yourself a bit too hard.", die=True, ), weight=1) if state.character.place in state.places.populated or \ state.character.place == state.places.places_dict["countryside"]: self.outcomes.add(Outcome(state, "A peasant woman sees you thump your chest and seems " "impressed. Unfortunately, her husband is not. He ushers her " "away.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "Some peasants laugh at you for acting like a gorilla.", fail=True, ), weight=9) if state.character.person == state.persons.persons_dict["wizard"]: self.outcomes.add(Outcome(state, "The wizard says, \"If you like behaving like a gorilla so " "much why not be a gorilla?\" He tries to turn you into a " "gorilla, but his spell only makes you walk like a gorilla.", grow_stronger=2, ), weight=20) # B slot actions class TrainWithTheGuards(Action): slot = "b" def __init__(self, state): super(TrainWithTheGuards, self).__init__(state) self.name = "Train with the guards." def execute(self, state): self.outcomes.add(Outcome(state, "The guards throw you out for not filling out the proper " "paperwork.", move_to=state.places.places_dict["streets"], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You accidentally break your neck during training.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get the badly beaten in wooden swordplay.", grow_stronger=1, ), weight=1) if state.character.get_attack() > 4: self.outcomes.add(Outcome(state, "You defeat the captain of the guards at wooden " "swordplay. \"Not bad for a {0},\" he says" ".".format(random.choice([ "peasant", "lunatic", "simpleton", ])), succeed=True, ), weight=1) class Beth(Action): """ Used when guessing Eve's name """ slot = "b" def __init__(self, state): super(Beth, self).__init__(state) self.name = "\"Beth.\"" def execute(self, state): self.outcomes.add(Outcome(state, "Wrong answer. Lord Carlos' daughter assassinates you.", die=True, ), weight=1) class HowlWithPain(Action): slot = "b" def __init__(self, state): super(HowlWithPain, self).__init__(state) self.name = "Howl with pain." def execute(self, state): self.outcomes.add(Outcome(state, "The manor's servants rush to your aid and carry you to " "Lord Bartholomew's priest " "to be healed. The priest informs them that it will require " "a true master to save you, so the servants rush you to " "town to be healed by St. George. He informs them " "that nothing is wrong with you. The servants are relieved and " "head back to the manor.", new_person=state.persons.persons_dict["st_george"], move_to=state.places.places_dict["church"], ), weight=1) self.outcomes.add(Outcome(state, "A maid shushes you. She says Lord Bartholomew's children are " "napping.", ), weight=1) class RepayYourDebts(Action): """ Assumes state.character.person == state.state.persons.persons_dict["lord_carlos"] """ slot = "b" def __init__(self, state): super(RepayYourDebts, self).__init__(state) self.name = "Repay your debts." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "He takes your money but says, \"No amount of money can make up " "for what you've done.\"", lose_all_money=True, ), weight=1) self.outcomes.add(Outcome(state, "He informs you that your death is the only form of repayment he " "will accept. Your debts are soon settled.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "He takes your money and kills you.", die=True, ), weight=1) class Nf3(Action): slot = "b" def __init__(self, state): super(Nf3, self).__init__(state) self.name = "Nf3." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "You eventually checkmate Lord Carlos. He tosses the chessboard " "on the floor and pulls out a dagger.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Carlos soon has you backed into a corner. Checkmate.", die=True, ), weight=1) class TryToTakeKeys(Action): slot = "b" def __init__(self, state): super(TryToTakeKeys, self).__init__(state) self.name = "Try to take the keys the next chance you get." def execute(self, state): self.outcomes.add(Outcome(state, "When you try to take the warden's keys, the guards notice and " "beat the life out of you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "When you try to take the warden's keys, the guards notice and " "beat the tar out of you.", ), weight=4) self.outcomes.add(Outcome(state, "It's surprisingly easy to steal keys and get out of prison.", move_to=state.places.places_dict["streets"], succeed=True, ), weight=1) class Grovel(Action): """ Only use when state.character.person == state.persons.persons_dict["lord_carlos"] """ slot = "b" def __init__(self, state): super(Grovel, self).__init__(state) self.name = "Grovel." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "Lord Carlos is having none of it. He kills you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Lord Carlos kills you for being obsequious.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "He is not interested in your tired excuses. He kills you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "He asks a servant to get you out of his sight. You are " "unceremoniously thrown out of the manor.", move_to=state.places.places_dict["woods"], ), weight=1) class ArmWrestle(Action): slot = "b" def __init__(self, state): super(ArmWrestle, self).__init__(state) self.name = "Arm wrestle with them to reclaim your dignity." def execute(self, state): self.outcomes.add(Outcome(state, "Even the lady pirates can easily beat you. They toss you " "in the ocean when they're done humiliating you.", move_to=state.places.places_dict["ocean"], ), weight=1) self.outcomes.add(Outcome(state, "You lose what little dignity you had left.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You manage to hold out long enough for Lord Arthur to " "bark orders at his men to press-gang hands for the voyage.", move_to=state.places.places_dict["pirate_ship"], ), weight=1) class SlurpDown(Action): slot = "b" def __init__(self, state, potion): super(SlurpDown, self).__init__(state) self.potion = potion self.name = "{0}".format(random.choice([ "Slurp down your ", "Take a swig of your ", "Down your ", "Chug your ",])) + self.potion.name + "." def execute(self, state): self.outcomes.add(Outcome(state, "Like homeopathy, the potion does nothing.", ), weight=1) if self.potion.name == "love potion": self.outcomes.add(Outcome(state, "You fall in love with yourself and give yourself a hug.", remove_item=items.love_potion, ), weight=10000) if self.potion.name == "potion of strength": self.outcomes.add(Outcome(state, None, remove_item=items.strength_potion, grow_stronger=4, ), weight=10000) if self.potion.name == "potion of tail growth": self.outcomes.add(Outcome(state, "You now have a tail.", remove_item=items.tail_potion, ), weight=10000) class LookForWitches(Action): slot = "b" def __init__(self, state): super(LookForWitches, self).__init__(state) self.name = "Look for witches" def execute(self, state): self.outcomes.add(Outcome(state, "You find a witch deep in the woods.", new_person=state.persons.persons_dict["witch"], ), weight=1) if state.character.place in state.places.burnable: self.outcomes.add(Outcome(state, "You can't find any witches. Only trees.", fail=True, ), weight=1) else: self.outcomes.add(Outcome(state, "You can't find any witches. Only burnt trees.", fail=True, ), weight=1) class GawkAtWomen(Action): slot = "b" def __init__(self, state): super(GawkAtWomen, self).__init__(state) self.name = "{0} at women.".format(random.choice( ["Gawk", "Leer", "Stare"])) def execute(self, state): self.outcomes.add(Outcome(state, "A fair woman notices you and hastens away.", ), weight=1) self.outcomes.add(Outcome(state, "A woman becomes annoyed with you and throws salt in your eyes.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You are too distracted by all the women to notice the " "assassins closing in on you.", die=True, ), weight=1) if self.name == "Gawk at women.": self.outcomes.add(Outcome(state, "You stop gawking when you realize it wasn't a woman.", fail=True, topic="androgyny", ), weight=1) if self.name == "Stare at women.": self.outcomes.add(Outcome(state, "An equally creepy woman stares back at you before " "disappearing into a dark alley.", actions=[(GoTo(state, state.places.places_dict["dark_alley"]), 5)], ), weight=1) if self.name == "Leer at women.": self.outcomes.add(Outcome(state, "You don't notice any women worth leering at, but you see a " "cat worth leering at.", add_item=items.cat, ), weight=1) class SwingOnARope(Action): """ Note: only use when attacking merchant ship """ slot = "b" def __init__(self, state): super(SwingOnARope, self).__init__(state) self.name = "Swing on a Rope." def execute(self, state): self.outcomes.add(Outcome(state, "You fall into the ocean and no one bothers to save you.", move_to=state.places.places_dict["ocean"], ), weight=1) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You manage to knock a merchant off a rope. Lord Arthur " "rewards your bravery after the battle is over.", succeed=True, add_item=items.fish, ), weight=1) else: self.outcomes.add(Outcome(state, "You manage to knock a merchant off a rope. The captain " "rewards your bravery after the battle is over.", succeed=True, add_item=items.fish, ), weight=1) self.outcomes.add(Outcome(state, "A merchant cuts you down.", die=True, ), weight=1) class Tithe(Action): slot = "b" def __init__(self, state): super(Tithe, self).__init__(state) self.name = "Tithe." def execute(self, state): if state.character.money == money.pittance: state.character.lose_all_money() self.outcomes.add(Outcome(state, "You feel {0}.".format(random.choice( ["like your sins will be forgiven", "holier", "holy", "like a good person"])), succeed=True, ), weight=4) self.outcomes.add(Outcome(state, "You feel {0}.".format(random.choice( ["like you've been cheated", "like you wasted your money", "like the church will waste the money", "unfulfilled"])), fail=True, ), weight=4) self.outcomes.add(Outcome(state, "A priestess blesses you.", ), weight=1) if state.persons.persons_dict["lord_carlos"].alive: self.outcomes.add(Outcome(state, "It was a good time to make peace with God. Lord Carlos steps " "out from behind a pillar and assassinates you.", die=True, ), weight=1) if state.character.get_attack() < 7 and state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "St. George sees that you are a righteous man and gives you an " "iron hammer to help you do God's work.", add_item=items.iron_hammer, new_person=state.persons.persons_dict["st_george"], ), weight=1) class BarterWithInuits(Action): slot = "b" def __init__(self, state): super(BarterWithInuits, self).__init__(state) self.name = "Barter with the Inuits." def execute(self, state): if not state.character.has_any_items: self.outcomes.add(Outcome(state, "You have nothing they want.", fail=True, ), weight=10000) if state.character.has_item(items.seal_carcass): self.outcomes.add(Outcome(state, "You trade your seal for passage back to land.", remove_item=items.seal_carcass, move_to=state.places.places_dict["woods"], topic="the Inuits", ), weight=9) self.outcomes.add(Outcome(state, "The Inuits drive a hard bargain, but take you to land in " "one of their kayaks.", funcs=[state.character.remove_all_items], move_to=state.places.places_dict["woods"], topic="the Inuits", ), weight=1) class BuildAnIgloo(Action): slot = "b" def __init__(self, state): super(BuildAnIgloo, self).__init__(state) self.name = "Build an igloo." def execute(self, state): self.outcomes.add(Outcome(state, "While building your igloo, you slip on some ice.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You can't figure out how to build an igloo.", fail=True, ), weight=1) if not state.character.has_item(items.seal_carcass): self.outcomes.add(Outcome(state, "Your igloo protects you from the elements, " "but not from your hunger.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get bored and hungry inside your igloo.", ), weight=1) else: self.outcomes.add(Outcome(state, "You survive in your igloo until winter by eating your seal. " "The winter ice sheet allows you to get back to land.", move_to=state.places.places_dict["woods"], remove_item=items.seal_carcass, succeed=True, ), weight=50) class Disguise(Action): slot = "b" def __init__(self, state): super(Disguise, self).__init__(state) self.fake_name = random.choice(["St. George.", "Lord Arthur.", "Lord Daniel."]) self.name = "Tell the next person you meet that you are " + \ "{0}".format(self.fake_name) def execute(self, state): if state.character.place == state.places.places_dict["lord_bartholomews_manor"]: self.outcomes.add(Outcome(state, "No one is buying it.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You soon have an audience with Lord Bartholomew. When he " "realizes he's been tricked, he has his servants escort you " "out of the manor.", move_to=state.places.places_dict["countryside"], ), weight=1) if self.fake_name == "Lord Arthur": self.outcomes.add(Outcome(state, "When you tell a gardener that you are Lord Arthur, " "he laughs and says, \"Lord Arthur? This far inland? I " "really doubt it.\"", fail=True, ), weight=3) if self.fake_name == "Lord Daniel": self.outcomes.add(Outcome(state, "When you tell a servant woman you are Lord Daniel, she beats " "do death with a broom.", die=True, ), weight=3) if state.character.place == state.places.places_dict["lord_carlos_manor"]: if state.persons.persons_dict["lord_carlos"].alive: self.outcomes.add(Outcome(state, "You soon have an audience with Lord Carlos. He recognizes you " "when you are admitted to his study.", new_person=state.persons.persons_dict["lord_carlos"], threat=True, ), weight=1) self.outcomes.add(Outcome(state, "No one is buying it. You are soon assassinated.", die=True, ), weight=1) class BurnThePlaceToTheGround(Action): slot = "b" def __init__(self, state, place): super(BurnThePlaceToTheGround, self).__init__(state) self.place = place self.name = "Burn {0} to the ground.".format(place.name) def execute(self, state): if not state.character.has_item(items.fire_proof_cloak): self.outcomes.add(Outcome(state, "You accidentally set yourself on fire and promptly burn to " "the ground.", die=True, ), weight=1) else: if self.place in state.places.burnable: self.outcomes.add(Outcome(state, "You almost perish in the blaze, but your " "fancy red cloak is fireproof.", burn_place=self.place, succeed=True, move_to=self.place, ), weight=1) if self.place in state.places.burnable: self.outcomes.add(Outcome(state, None, burn_place=self.place, succeed=True, move_to=self.place, ), weight=4) if state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "You get assassinated while looking for kindling.", die=True, ), weight=40) if state.character.person == state.persons.persons_dict["st_george"]: self.outcomes.add(Outcome(state, "St. George sees you attempting arson and smites you.", die=True, ), weight=30) if state.character.person == state.persons.persons_dict["st_george"]: self.outcomes.add(Outcome(state, "The wizard sees you attempting arson and turns you into a " "frog. He steps on you.", die=True, ), weight=20) class SetThePlaceOnFire(BurnThePlaceToTheGround): slot = "a" def __init__(self, state, place): super(SetThePlaceOnFire, self).__init__(state, place) self.place = place self.name = "Set {0} ablaze.".format(place.name) class BurnThePlaceToACrisp(BurnThePlaceToTheGround): slot = "c" def __init__(self, state, place): super(BurnThePlaceToACrisp, self).__init__(state, place) self.place = place self.name = "Burn {0} to a crisp.".format(place.name) class LightUpThePlace(BurnThePlaceToTheGround): slot = "d" def __init__(self, state, place): super(LightUpThePlace, self).__init__(state, place) self.place = place self.name = "Light up {0}.".format(place.name) class ClimbIntoTheCrowsNest(Action): slot = "b" def __init__(self, state): super(ClimbIntoTheCrowsNest, self).__init__(state) self.name = "Climb into the crow's nest." def execute(self, state): self.outcomes.add(Outcome(state, "You spot a merchant ship. A raid ensues.", actions=[ (Swashbuckle(state), 1000), (SwingOnARope(state), 1000), (FireACanon(state), 1000), (HideUnderTheDeck(state), 1000)], ), weight=1) self.outcomes.add(Outcome(state, "You are able to help guide the ship to land.", succeed=True, move_to=state.places.places_dict["woods"], ), weight=1) self.outcomes.add(Outcome(state, "You are able to help guide the ship to the docks.", succeed=True, move_to=state.places.places_dict["docks"], ), weight=1) self.outcomes.add(Outcome(state, "You drop your bag on your way up the mast. A pirate takes it.", remove_all_items=True, fail=True, topic="treachery", ), weight=1) self.outcomes.add(Outcome(state, "You fall off the mast on the way up mast.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "A crow in the crow's nest caws in your face, startling " "you. You fall off the mast and land on the deck.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "A crow in the crow's nest caws in your face, startling " "you. You fall off the mast and land in the water.", fail=True, move_to=state.places.places_dict["ocean"], ), weight=1) class RaiseASail(Action): slot = "b" def __init__(self, state): super(RaiseASail, self).__init__(state) self.name = "Raise a sail." def execute(self, state): if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "Lord Arthur has you killed for raising the wrong sail.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "Lord Arthur yells at you to scrub the deck.", ), weight=1) self.outcomes.add(Outcome(state, "As you are raising a sail you see a merchant ship. Lord Arthur " "calls all hands to raid the ship.", actions=[ (Swashbuckle(state), 1000), (SwingOnARope(state), 1000), (FireACanon(state), 1000), (HideUnderTheDeck(state), 1000)], ), weight=1) if not state.character.is_employed_by(state.persons.persons_dict["lord_arthur"]): self.outcomes.add(Outcome(state, "Lord Arthur is impressed by your initiative and makes you a " "member of the crew.", add_employer=state.persons.persons_dict["lord_arthur"], ), weight=1) else: self.outcomes.add(Outcome(state, "As you are raising a sail you see a merchant ship. The " "captain calls all hands to raid the ship.", actions=[ (Swashbuckle(state), 1000), (SwingOnARope(state), 1000), (FireACanon(state), 1000), (HideUnderTheDeck(state), 1000)], ), weight=1) self.outcomes.add(Outcome(state, "You help the ship return to the docks quicker.", succeed=True, move_to=state.places.places_dict["docks"], ), weight=1) class ScrubTheDeck(Action): slot = "b" def __init__(self, state): super(ScrubTheDeck, self).__init__(state) self.name = "Scrub the deck." def execute(self, state): self.outcomes.add(Outcome(state, "You scrub the deck until it sparkles, then you scrub it some " "more.", ), weight=1) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "As you are scrubbing the deck, you hear Lord Arthur calling " "all hands to raid an approaching merchant ship.", actions=[ (Swashbuckle(state), 1000), (SwingOnARope(state), 1000), (FireACanon(state), 1000), (HideUnderTheDeck(state), 1000)], ), weight=1) self.outcomes.add(Outcome(state, "Lord Arthur yells at you to raise a sail.", ), weight=1) self.outcomes.add(Outcome(state, "You dislocate your shoulder scrubbing and Lord Arthur has no " "further use for you. He has you thrown off the ship.", clover=True, die=True, ), weight=1) if state.character.is_employed_by(state.persons.persons_dict["lord_arthur"]): self.outcomes.add(Outcome(state, "Lord Arthur yells at you to scrub harder.", new_person=state.persons.persons_dict["lord_arthur"], ), weight=1) else: self.outcomes.add(Outcome(state, "Lord Arthur is impressed by your initiative and makes you a " "member of the crew.", new_person=state.persons.persons_dict["lord_arthur"], add_employer=state.persons.persons_dict["lord_arthur"], ), weight=1) class PlayDead(Action): slot = "b" def __init__(self, state): super(PlayDead, self).__init__(state) self.name = "Play dead." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "You soon are.", die=True, ), weight=2) if state.character.person != state.persons.persons_dict["lord_carlos"]: self.outcomes.add(Outcome(state, "You are too pathetic for {0} to kill.".format( state.character.person.name), unthreat=True, new_person=None, fail=True, ), weight=1) else: self.outcomes.add(Outcome(state, "Your charade does not soften Lord Carlos' {0} " "heart.".format(random.choice(["stony", "icy", "cold", "evil", "bitter", "cruel",])), die=True, ), weight=1) self.outcomes.add(Outcome(state, "You go the extra mile to make it realistic.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Just to be sure, {0} kill{1} you.".format( state.character.person.name, persons.get_tense(state.character.person)), die=True, ), weight=1) class PrayToAHigherPower(Action): slot = "b" def __init__(self, state): super(PrayToAHigherPower, self).__init__(state) self.name = "Pray to a higher power." def execute(self, state): self.outcomes.add(Outcome(state, "Your prayers go unanswered.", fail=True, ), weight=2) if state.character.has_any_items(): self.outcomes.add(Outcome(state, "God decides to test you.", remove_all_items=True, ), weight=2) self.outcomes.add(Outcome(state, "God speaks to you and shows you the way.", topic="arson", ), weight=1) self.outcomes.add(Outcome(state, "God tells you to marry the nymph queen.", topic="nymphs", ), weight=1) self.outcomes.add(Outcome(state, "Your prayers are answered.", get_money=money.small_fortune, ), weight=1) self.outcomes.add(Outcome(state, "Your prayers for a beautiful wife are answered but she soon " "leaves you.", fail=True, topic="divorce", ), weight=1) self.outcomes.add(Outcome(state, "Your prayers aren't answered, but the assassins' are.", clover=True, die=True, ), weight=1) if state.character.place in state.places.burnable: self.outcomes.add(Outcome(state, "Your prayers are answered.", burn_place=state.character.place, ), weight=1) if state.character.place == state.places.places_dict["tavern"]: self.outcomes.add(Outcome(state, "God does nothing for you, but you do find a small sack of " "jewels someone left on a counter.", add_item=items.jewels, topic='jewels', ), weight=1) if state.character.place in state.places.town and \ state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "St. George joins you in prayer.", new_person=state.persons.persons_dict["st_george"], ), weight=1) class BegForMoney(Action): slot = "b" def __init__(self, state): super(BegForMoney, self).__init__(state) self.name = "Beg for money." def execute(self, state): if state.character.place != state.places.places_dict["church"] and \ state.character.person == state.persons.persons_dict["st_george"]: self.outcomes.add(Outcome(state, "St. George tells you he has lost his wallet in the church.", ), weight=1) if state.character.person == state.persons.persons_dict["st_george"]: if state.persons.persons_dict["st_george"].state.get("given money", False): self.outcomes.add(Outcome(state, "St. George becomes irritated by your begging " "and crushes you with his iron hammer.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "St. George smites you with his saintly wraith " "for being ungrateful.", die=True, ), weight=1) else: self.outcomes.add(Outcome(state, state.character.person.name + " give" + persons.get_tense(state.character.person) + " you a pittance.", beg=True, get_money=money.pittance, ), weight=3) self.outcomes.add(Outcome(state, state.character.person.name + " give" + persons.get_tense(state.character.person) + " you a small fortune.", beg=True, get_money=money.small_fortune, ), weight=2) self.outcomes.add(Outcome(state, state.character.person.name + " give" + persons.get_tense(state.character.person) + " you a large fortune.", beg=True, get_money=money.large_fortune, ), weight=1) else: self.outcomes.add(Outcome(state, "Your begging falls on deaf ears.", fail=True, beg=True, topic="money", ), weight=1) class BideYourTime(Action): slot = "b" def __init__(self, state): super(BideYourTime, self).__init__(state) self.name = "Bide your time." def execute(self, state): self.outcomes.add(Outcome(state, "You die of old age.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "As the days drag on, you go insane.", ), weight=1) self.outcomes.add(Outcome(state, "The days turn to weeks and the weeks turn to months.", ), weight=1) self.outcomes.add(Outcome(state, "You eventually manage to dig a secret passage from your cell " "into a cave network.", move_to=state.places.places_dict["cave"], ), weight=1) self.outcomes.add(Outcome(state, "You notice the warden carries the keys when he " "inspects the cells. He inspects the cells with " "an entourage of guards most weekends, but he " "does it alone on holidays.", actions=[(LaughAboutWarden(state), 100), (TryToTakeKeys(state), 100), (WaitForAHoliday(state), 100), ], ), weight=2) if state.persons.persons_dict["felicity"].attracted > -1 and \ state.persons.persons_dict["felicity"].attracted < 3: self.outcomes.add(Outcome(state, "As the days pass, you find yourself more and more " "attracted to the fat woman who feeds you.", ), weight=2) class BuyBlackMarketItem(Action): slot = "b" def __init__(self, state): super(BuyBlackMarketItem, self).__init__(state) self.item = random.choice( state.persons.persons_dict["black_market_merchant"].get_sells()) self.price = state.persons.persons_dict["black_market_merchant"].get_sell_price(self.item) self.name = "Make a shady deal." def execute(self, state): if state.character.money >= self.price: self.outcomes.add(Outcome(state, "You cut a deal with a {0}.".format(random.choice([ "black market peddler", "merchant witch", "monger of rare items",])), add_item=self.item, lose_money=self.price, ), weight=3) else: self.outcomes.add(Outcome(state, "You try to buy {0} {1}, but you don't have the money.".format( items.a_or_an(self.item), str(self.item)), fail=True, topic="poverty", ), weight=3) self.outcomes.add(Outcome(state, "You find an assassin posing as a black market peddler.", die=True, ), weight=1) class BuyItem(Action): slot = "b" def __init__(self, state): super(BuyItem, self).__init__(state) self.item = random.choice(state.persons.persons_dict["local_merchant"].get_sells()) self.price = state.persons.persons_dict["local_merchant"].get_sell_price(self.item) self.name = "Buy {0} {1}.".format( items.a_or_an(self.item), str(self.item)) def execute(self, state): if state.character.money != money.none: self.outcomes.add(Outcome(state, None, add_item=self.item, new_person=None, lose_money=self.price, ), weight=3) else: self.outcomes.add(Outcome(state, "You can't afford {0} {1}.".format( items.a_or_an(self.item), str(self.item)), new_person=None, fail=True, topic="poverty", ), weight=3) class BuyWeapon(Action): slot = "b" def __init__(self, state): super(BuyWeapon, self).__init__(state) self.weapon = random.choice(state.persons.persons_dict["wealthy_merchant"].get_sells()) self.price = state.persons.persons_dict["wealthy_merchant"].get_sell_price(self.weapon) self.name = "Buy a " + str(self.weapon) + "." def execute(self, state): if state.character.money >= self.price: self.outcomes.add(Outcome(state, None, add_item=self.weapon, lose_money=self.price, ), weight=3) else: self.outcomes.add(Outcome(state, "You can't afford it.", fail=True, topic="poverty", ), weight=3) class BuyADrink(Action): slot = "b" def __init__(self, state): super(BuyADrink, self).__init__(state) self.name = "Buy a drink." def execute(self, state): if state.persons.persons_dict["blind_bartender"].alive: self.outcomes.add(Outcome(state, "The blind bartender grumbles as he passes you a drink.", new_person=state.persons.persons_dict["blind_bartender"], ), weight=4) self.outcomes.add(Outcome(state, "The drink is poisoned.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "An assassin walks up and starts hitting on you... very hard.", die=True, ), weight=2) self.outcomes.add(Outcome(state, "As you drink, you hear a peasant talking about how great " "Lord Bartholomew is.", topic="Lord Bartholomew", ), weight=2) self.outcomes.add(Outcome(state, "A man in a black cloak sits next to you and orders a drink.", new_person=state.persons.persons_dict["assassin"], topic="assassins", ), weight=2) else: self.outcomes.add(Outcome(state, "No one is selling.", fail=True, ), weight=1) class BoastOfYourBravery(Action): slot = "b" def __init__(self, state): super(BoastOfYourBravery, self).__init__(state) self.name = "Boast of your bravery." def execute(self, state): if not state.character.person: self.outcomes.add(Outcome(state, "You impress yourself.", succeed=True, ), weight=1) else: self.outcomes.add(Outcome(state, state.character.person.name[0].upper() + state.character.person.name[1:] + " is not impressed.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "As you boast of your exploits, {0} walks away.".format( state.character.person.name), new_person=None, fail=True, ), weight=1) if state.character.person == state.persons.persons_dict["blind_bartender"]: self.outcomes.add(Outcome(state, "The blind bartender starts pretending to be deaf.", fail=True, ), weight=3) if state.character.person == state.persons.persons_dict["st_george"]: self.outcomes.add(Outcome(state, "St. George warns you of the dangers of hubris.", ), weight=1) self.outcomes.add(Outcome(state, "You tell St. George about the time you burnt a house " "down and he slays you for your wicked ways.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "St. George lauds your noble deeds and rewards you.", get_money=money.large_fortune, ), weight=1) self.outcomes.add(Outcome(state, "St. George becomes irate when you claim to have slain a " "dragon. He obliterates you.", die=True, ), weight=1) if state.character.person == state.persons.persons_dict["olga"]: self.outcomes.add(Outcome(state, "Her eyes glaze over as you struggle to remember times " "you were brave.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "She sees through your lies.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "She seems interested in your stories.", ), weight=1) if state.character.person == state.persons.persons_dict["felicity"]: self.outcomes.add(Outcome(state, "She points out several inconsistencies in your story.", flirt=-1, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "She seems to buy it.", flirt=1, ), weight=1) if state.character.person == state.persons.persons_dict["guards"]: self.outcomes.add(Outcome(state, "You tell the guards that you are brave.\n 'A brave " "lunatic,' they say and they throw you in prison.", new_person=state.persons.persons_dict["other_lunatics"], move_to=state.places.places_dict["prison"], ), weight=1) class LookForACat(Action): slot = "b" def __init__(self, state): super(LookForACat, self).__init__(state) self.name = "Look for a cat." def execute(self, state): if state.character.has_item(items.fish): self.outcomes.add(Outcome(state, "A cat smells your fish and approaches you.", succeed=True, add_item=items.cat, ), weight=20) self.outcomes.add(Outcome(state, "After days of searching, you manage to find a cat.", succeed=True, add_item=items.cat, ), weight=14) self.outcomes.add(Outcome(state, "Your efforts to find a cat are fruitless.", fail=True, ), weight=6) self.outcomes.add(Outcome(state, "You see something out of the corner of your eye that looks like " "a cat. You chase it to no avail.", fail=True, topic="cats", ), weight=6) self.outcomes.add(Outcome(state, "You find a ferocious cat. It kills you.", clover=True, die=True, ), weight=1) if state.character.place in state.places.burnable and \ state.character.place in state.places.town: self.outcomes.add(Outcome(state, "You knock a lantern over as you chase a cat.", burn_place=state.character.place, ), weight=4) if state.character.place in state.places.populated and not \ state.character.place in state.places.locked: self.outcomes.add(Outcome(state, "You follow a cat through the streets but " "eventually lose track of it.", move_to=state.places.places_dict["dark_alley"], ), weight=6) self.outcomes.add(Outcome(state, "The local guards notice you searching for a cat " "and conclude that you must be a lunatic.", new_person=state.persons.persons_dict["guards"], threat=True, topic="lonely", ), weight=6) if state.character.place == state.places.places_dict["pirate_ship"]: self.outcomes.add(Outcome(state, "You find Lord Arthur's freakish cat. The cat has " "eight more tails than a normal cat.", ), weight=20) class TellThemYouAreNotALunatic(Action): slot = "b" def __init__(self, state, topic): super(TellThemYouAreNotALunatic, self).__init__(state) self.topic = topic self.name = "Tell them you are not a lunatic, " + \ "you're just {0}.".format(topic) def execute(self, state): if self.topic[0] in "aeiou": self.outcomes.add(Outcome(state, "\"An {0} lunatic,\" they say.".format(self.topic), fail=True, move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=1) else: self.outcomes.add(Outcome(state, "\"A {0} lunatic,\" they say.".format(self.topic), fail=True, move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=1) class TipACow(Action): slot = "b" def __init__(self, state): super(TipACow, self).__init__(state) self.name = "Tip a cow." def execute(self, state): self.outcomes.add(Outcome(state, "You are disappointed to find out that cows can get back up " "easily.", new_person=None, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "Some peasants see you trying to tip a cow and laugh at you.", new_person=None, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You can't find any cows. Only sheep.", new_person=None, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You pull a cow on top of yourself and it crushes you.", new_person=None, clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You're not strong enough to push the cow over.", new_person=None, topic="cows", ), weight=1) self.outcomes.add(Outcome(state, "Some peasants mistake you for a cow thief and form a lynch mob.", threat=True, new_person=state.persons.persons_dict["mob"], ), weight=1) class LookForSeaTurtles(Action): slot = "b" def __init__(self, state): super(LookForSeaTurtles, self).__init__(state) self.name = "Look for sea turtles." def execute(self, state): self.outcomes.add(Outcome(state, "You see one. You also drown because you are in the ocean.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Your efforts to find a sea turtle are fruitless.", fail=True, ), weight=2) self.outcomes.add(Outcome(state, "You find a sea turtle and follow it to shore.", move_to=state.places.places_dict["woods"], topic="sea turtles", succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "You can't find a sea turtle. Everywhere looks the same.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You find a shark instead. It minds its own business.", ), weight=1) class LookForMermaids(Action): slot = "b" def __init__(self, state): super(LookForMermaids, self).__init__(state) self.name = "Look for mermaids." def execute(self, state): self.outcomes.add(Outcome(state, "You find a wooden mermaid figurehead on the front of Lord " "Arthur's ship. The crew hoists you abroad.", move_to=state.places.places_dict["pirate_ship"], ), weight=1) self.outcomes.add(Outcome(state, "You are taken out by a storm during your search.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You find a sea turtle instead.", fail=True, ), weight=1) if state.character.place == state.places.places_dict["mermaid_rock"]: self.outcomes.add(Outcome(state, "{0} {1}".format(random.choice([ "You almost step on one", "You find one putting seashells in her hair.", "There are mermaids everywhere, there's one next to you.", "After hours of climbing around on the rocks you find " "one.", ]), random.choice([ "She spits water in your face and laughs.", "She trips you with her fish tail.", "She gives you some nasty tasting seaweed.", "She's beautiful, but smells terrible.", "She sings a song about Lord Arthur.", ])), new_person=state.persons.persons_dict["mermaid"], ), weight=3) self.outcomes.add(Outcome(state, "You don't find any mermaids, but you find a shiny foreign " "coin.", add_item=items.foreign_coin, ), weight=1) self.outcomes.add(Outcome(state, "You don't find any mermaids, but you find a small fortune " "in lost treasure.", get_money=money.small_fortune, ), weight=1) self.outcomes.add(Outcome(state, "You slip on a rock.", clover=True, die=True, ), weight=1) if state.character.place == state.places.places_dict["ocean"]: self.outcomes.add(Outcome(state, "You find a mermaid. She leads you back to her rock.", move_to=state.places.places_dict["mermaid_rock"], new_person=state.persons.persons_dict["mermaid"], ), weight=1) self.outcomes.add(Outcome(state, "You are not sure where to look.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "After days of searching, you are not sure mermaids exist.", fail=True, ), weight=2) # C slot actions class ChatWithLordBartholomew(Action): slot = "c" def __init__(self, state): super(ChatWithLordBartholomew, self).__init__(state) self.name = "Chat with Lord Bartholomew." def execute(self, state): self.outcomes.add(Outcome(state, "Lord Bartholomew talks about the injustices in the world " "and how action is needed to set them right.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew is genuinly interested in your life story.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says that a cause is the only " "thing worth dying for.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew stresses the value of hard work and the " "importance of the peasant class.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew takes you on a walk and shows you the " "sights around the countryside. You don't get much of a chance " "to talk to him because too many peasants are clamoring to get " "his autograph.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says the only man of any value in the town is " "St. George.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says Lord Arthur is a rascal who will be " "dealt with when the time comes.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says Lord Daniel is a tyrant who will be " "dealt with when the time comes.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says Lord Carlos is a thug who will be " "dealt with when the time comes. You couldn't agree more.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says the wizard is a dangerous man who will be " "dealt with when the time comes.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Bartholomew says family is the only thing worth living " "for.", ), weight=1) class Eve(Action): """ Used when guessing Eve's name """ slot = "c" def __init__(self, state): super(Eve, self).__init__(state) self.name = "\"Eve.\"" def execute(self, state): self.outcomes.add(Outcome(state, "She gives you the evil eye.", flirt=(state.persons.persons_dict["eve"], 2), ), weight=1) class WaitForAHoliday(Action): slot = "c" def __init__(self, state): super(WaitForAHoliday, self).__init__(state) self.name = "Wait for a holiday to make your move." def execute(self, state): self.outcomes.add(Outcome(state, "You manage to swipe the keys off the warden during his " "inspection. You make your escape that night.", move_to=state.places.places_dict["streets"], ), weight=2) self.outcomes.add(Outcome(state, "You almost get the keys off the warden.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You manage to snatch the keys off the warden, but he notices and " "has you thrown in a deep dark dungeon. However, you end up in " "a cell with some of Lord Bartholomew's men. They are soon rescued " "and so are you.", move_to=state.places.places_dict["lord_bartholomews_manor"], ), weight=1) class ChallengeThemToAGameOfChess(Action): slot = "c" def __init__(self, state): super(ChallengeThemToAGameOfChess, self).__init__(state) self.name = "Challenge them to a game of chess." def execute(self, state): self.outcomes.add(Outcome(state, "Their opening move is smashing a bottle of rum over your head " ". You aren't thinking too straight during the game and " "quickly lose.", ), weight=1) self.outcomes.add(Outcome(state, "The pirates slash the chessboard in half with a cutlass and " "leave.", add_item=items.cutlass, new_person=None, ), weight=1) self.outcomes.add(Outcome(state, "You beat all the pirates easily. Lord Arthur says your " "wits could be invaluable on the high seas. They soon are.", move_to=state.places.places_dict["pirate_ship"], actions=[(LickTheGround(state, state.places.places_dict["pirate_ship"]), 1000)], ), weight=1) class SunYourselfOnARock(Action): slot = "c" def __init__(self, state): super(SunYourselfOnARock, self).__init__(state) self.name = "Sun yourself on a rock." def execute(self, state): self.outcomes.add(Outcome(state, "You get sunburnt.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You get bronzed.", succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "A roc snatches you and carries you 2000 miles before feeding " "you to its hatchlings.", clover=True, die=True, ), weight=1) if state.character.person != state.persons.persons_dict["mermaid"]: self.outcomes.add(Outcome(state, "When you open your eyes you see a mermaid sunbathing next to " "you.", new_person=state.persons.persons_dict["mermaid"], ), weight=1) class ComplainAboutUnfairImprisonment(Action): slot = "c" def __init__(self, state): super(ComplainAboutUnfairImprisonment, self).__init__(state) self.name = "Complain about unfair imprisonment." def execute(self, state): if state.character.person != state.persons.persons_dict["lord_daniel"]: self.outcomes.add(Outcome(state, "The guards say it's fair if Lord Daniel says it's fair.", new_person=state.persons.persons_dict["guards"], ), weight=1) self.outcomes.add(Outcome(state, "The guards argue with you about the finer points " "of the justice system.", ), weight=1) self.outcomes.add(Outcome(state, "The guards arrest you on charges of lunacy and throw you in " "prison with the other lunatics.", new_person=state.persons.persons_dict["other_lunatics"], move_to=state.places.places_dict["prison"], ), weight=1) self.outcomes.add(Outcome(state, "A bureaucrat says she'll let Lord Daniel know of your " "concerns.", succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "A cook assures you that Lord Bartholomew will set things right.", ), weight=1) else: # when talking wiht Lord Daniel self.outcomes.add(Outcome(state, "Lord Daniel has his guards carry out of the tower and dump " "in a pile of manure.", move_to=state.places.places_dict["streets"], ), weight=1) self.outcomes.add(Outcome(state, "Lord Daniel explains to you that your lack of mental " "capacity would never allow you to understand his complex " "policies.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Daniel gives you a lengthy lecture about how life " "isn't fair.", topic="boredom", ), weight=1) class Hide(Action): slot = "c" def __init__(self, state): super(Hide, self).__init__(state) self.name = "Hide." def execute(self, state): self.outcomes.add(Outcome(state, "You hide from the assassins, but not from your own " "dark thoughts.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You hide for a couple of days, long enough " "that you think the whole assassin thing has probably " "blown over.", ), weight=1) self.outcomes.add(Outcome(state, "You try to hide in the sewer, but you end up " "drowning in filth.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You try to hide in the sewer, but you are killed " "by a rat.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You trip in the darkness and break your neck.", clover=True, die=True, ), weight=1) class E4(Action): slot = "c" def __init__(self, state): super(E4, self).__init__(state) self.name = "e4." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "The game ends when Lord Carlos pins you with three queens.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You beat Lord Carlos in chess. He beats you in life.", die=True, ), weight=1) class LookForAWayOut(Action): slot = "c" def __init__(self, state): super(LookForAWayOut, self).__init__(state) self.name = "Look for a way out." def execute(self, state): self.outcomes.add(Outcome(state, "You fumble around in the darkness.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You think you're going around in circles.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You can't see anything, so you only manage to bump your head " "on a rock.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You slip on a slippery slope and fall to your death.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You don't find a way out, but you find a deep-cave newt.", add_item=items.deep_cave_newt, ), weight=1) self.outcomes.add(Outcome(state, "You find your way out of the cave.", move_to=state.places.places_dict["woods"], succeed=True, ), weight=1) class ClimbUpTheTopSails(Action): slot = "c" def __init__(self, state): super(ClimbUpTheTopSails, self).__init__(state) self.name = "Climb up the top sails." def execute(self, state): self.outcomes.add(Outcome(state, "Your sailor peg falls into the ocean while you work on the sails.", remove_item=items.sailor_peg, funcs=[state.character.depegify], ), weight=1) class TellAPriest(Action): slot = "c" def __init__(self, state): super(TellAPriest, self).__init__(state) self.idea = random.choice([ "that God doesn't exist", "that he's fat", "that you are the chosen one"]) self.name = "Tell a priest " + self.idea + "." def execute(self, state): if self.idea == "that God doesn't exist": self.outcomes.add(Outcome(state, "The priest thinks for a moment and realizes you're " "right. \"What a fool I've been,\" he says. \"I'll go and " "become a peasant.\"", ), weight=1) self.outcomes.add(Outcome(state, "The priest thinks for a moment and realizes you're " "right. \"What a fool I've been,\" he says. \"I'm going to " "go and find a wife.\"", ), weight=1) self.outcomes.add(Outcome(state, "God smites you for your {0}.".format(random.choice([ "arrogance", "foolishness", "rudeness", "heresy", "tactlessness", "faithlessness"])), clover=True, die=True, ), weight=2) if self.idea == "that he's fat": self.outcomes.add(Outcome(state, "He runs off crying.", succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "\"Only God can judge me,\" he says.", ), weight=1) self.outcomes.add(Outcome(state, "\"Food is my only indulgence,\" he says proudly.", ), weight=1) self.outcomes.add(Outcome(state, "St. George overhears your comment and agrees with you, " "but throws you out of the church for rudeness.", move_to=state.places.places_dict["streets"], ), weight=1) if self.idea == "that you are the chosen one": self.outcomes.add(Outcome(state, "The priest finds your arguments so pitiful that he gives " "you a pittance and sends you on your way.", get_money=money.pittance, move_to=state.places.places_dict["streets"], ), weight=1) self.outcomes.add(Outcome(state, "He says he has his doubts.", ), weight=1) self.outcomes.add(Outcome(state, "\"I would know it when I see it,\" he says.", ), weight=1) if state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "St. George overhears your comment and turns you, " "over to the guards on charges of lunacy.", move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=1) class FireACanon(Action): """ Note: only use when attacking merchant ship """ slot = "c" def __init__(self, state): super(FireACanon, self).__init__(state) self.name = "Fire a cannon." def execute(self, state): self.outcomes.add(Outcome(state, "You manage to knock the merchant ship's mast down. " "It falls on you.", clover=True, die=True, ), weight=1) if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "You sink the merchant ship, plunder and all. Lord Arthur " "is not pleased, so he flogs you with his cat. The cat seems " "more traumatized than you, but you get fairly scratched up.", new_person=state.persons.persons_dict["lord_arthur"], ), weight=1) self.outcomes.add(Outcome(state, "You fumble around with the cannon, but Lord Arthur is " "convinced you contributed to his victory and gives you a bag " "of jewels.", add_item=items.jewels, ), weight=1) class ClubASeal(Action): slot = "c" def __init__(self, state): super(ClubASeal, self).__init__(state) self.name = "Club a seal." def execute(self, state): self.outcomes.add(Outcome(state, "After a few days of waiting at a hole in the ice, you freeze " "do death.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "The local polar bears aren't happy with you on their turf. " "You are soon mauled.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "After a few days of waiting at a hole in the ice, you manage " "to club a seal.", add_item=items.seal_carcass, succeed=True, ), weight=2) self.outcomes.add(Outcome(state, "You manage " "to club a seal, but it swims away.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "While waiting for a seal, you are very cold.", ), weight=1) class CelebrateYourSuccess(Action): slot = "c" def __init__(self, state): super(CelebrateYourSuccess, self).__init__(state) self.name = "Celebrate your success." def execute(self, state): self.outcomes.add(Outcome(state, "You can't think of a better way to celebrate than twiddling " "your thumbs.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You dance a jig.", ), weight=1) self.outcomes.add(Outcome(state, "You sing a song.", ), weight=1) if state.character.place in state.places.burnable: self.outcomes.add(Outcome(state, None, burn_place=state.character.place, succeed=True, move_to=state.character.place, ), weight=1) if state.character.place in state.places.town: self.outcomes.add(Outcome(state, "You go see a play in the market.", move_to=state.places.places_dict["market"], ), weight=1) self.outcomes.add(Outcome(state, "You go to a brothel and admire the decorations.", move_to=state.places.places_dict["streets"], ), weight=1) if state.character.place in state.places.populated and \ state.character.money != money.none: self.outcomes.add(Outcome(state, "You wander around throwing all of your money in the air.", funcs=[state.character.lose_all_money], ), weight=1) if state.character.place == state.places.places_dict["arctic"]: self.outcomes.add(Outcome(state, "You make a snow woman.", ), weight=1) self.outcomes.add(Outcome(state, "You make a snow angel.", ), weight=1) if state.character.place == state.places.places_dict["tavern"]: self.outcomes.add(Outcome(state, "You drink until you black out.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You drink until you black out. You wake up weary and " "penniless.", move_to=state.places.places_dict["dark_alley"], funcs=[state.character.lose_all_money], ), weight=1) self.outcomes.add(Outcome(state, "You drink until you black out. " "Lord Arthur wakes you by yelling that you need to get on " "with your duties.", move_to=state.places.places_dict["pirate_ship"], ), weight=1) self.outcomes.add(Outcome(state, "You drink until you black out. " "You wake up in bed next to a peasant woman. " "Once the hangover wears off, you " "both live happily ever after.", win=True, ), weight=1) class ChopDownATree(Action): slot = "c" def __init__(self, state): super(ChopDownATree, self).__init__(state) self.name = "Chop down a tree." def execute(self, state): self.outcomes.add(Outcome(state, "The tree falls on you.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "A nymph hexes you. " "Throwing yourself in a pond suddenly seems like a good idea.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "The tree makes a loud noise as it falls.", ), weight=1) self.outcomes.add(Outcome(state, "A tree falls in the forest. You hear it.", ), weight=1) self.outcomes.add(Outcome(state, "The tree starts to bleed and you collect its blood.", add_item=items.bottle_of_sap, succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "You get your ax stuck in the tree and can't get it out.", remove_item=items.ax, fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You enjoy chopping down the tree so much that you chop down " "many more trees and build a cabin.", succeed=True, ), weight=1) class ChowDown(Action): slot = "c" def __init__(self, state, food): super(ChowDown, self).__init__(state) self.food = food self.name = "Chow down on the " + str(food) + "." def execute(self, state): if self.food == items.many_colored_mushroom: if not state.character.trip: self.outcomes.add(Outcome(state, "Your perception of the world begins to change.", remove_item=items.many_colored_mushroom, funcs=[state.character.start_tripping], ), weight=1) else: self.outcomes.add(Outcome(state, "You feel normal again.", remove_item=items.many_colored_mushroom, funcs=[state.character.stop_tripping], ), weight=1) if self.food == items.yellow_mushroom: self.outcomes.add(Outcome(state, "You find the mushroom distasteful.", remove_item=items.yellow_mushroom, ), weight=1) if self.food == items.black_mushroom: self.outcomes.add(Outcome(state, "The mushroom tastes bittersweet.", remove_item=items.black_mushroom, die=True, ), weight=1) if self.food == items.white_mushroom: self.outcomes.add(Outcome(state, "You grow larger.", remove_item=items.white_mushroom, grow_stronger=1, ), weight=2) if state.character.place == state.places.places_dict["woods"] or \ state.character.place == state.places.places_dict["countryside"]: self.outcomes.add(Outcome(state, "You shrink to the size of a peanut. A weasel " "soon comes along and eats you.", remove_item=items.white_mushroom, die=True, ), weight=1) class FlirtWith(Action): slot = "c" def __init__(self, state, person): super(FlirtWith, self).__init__(state) self.person = person self.name = "Flirt with {0}.".format(person.name) def execute(self, state): if self.person == state.persons.persons_dict["mermaid"]: self.outcomes.add(Outcome(state, "You run into the mermaid problem.", fail=True, ), weight=1000) if self.person == state.persons.persons_dict["felicity"] and \ state.persons.persons_dict["felicity"].name != "Felicity": self.outcomes.add(Outcome(state, "She ignores your hoots.", flirt=(state.persons.persons_dict["felicity"], -1), ), weight=1) self.outcomes.add(Outcome(state, "She ignores your whistling.", ), weight=1) self.outcomes.add(Outcome(state, "She ignores you when you say \"Hello,\" but " "you catch her glancing at you throughout the day.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She smiles, but doesn't reply to the love " "poem you recite to her.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She ignores you, but wears a low-cut blouse the next day.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She ignores you, but gives you more food the next day.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) if state.persons.persons_dict["felicity"].attracted > 3: def change_name(): state.persons.persons_dict["felicity"].name = "Felicity" self.outcomes.add(Outcome(state, "You strike up a conversation and learn that her name is " "Felicity.", flirt=(state.persons.persons_dict["felicity"], 2), funcs=[change_name], ), weight=1000) elif self.person == state.persons.persons_dict["felicity"]: # We know her name self.outcomes.add(Outcome(state, "Felicity blows you kisses.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity leans in close and kisses your cheek.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity talks with you for hours. She only " "stops when the warden barks at her to get " "back to work.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity tells you she asked the warden to " "let you out, but he has a strict \"No lunatics " "on the streets\" policy.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity says she thinks about you a lot.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity laughs at all your jests, even the bad ones.", flirt=(state.persons.persons_dict["felicity"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Felicity asks if she looks fat in her new dress. " "You say \"Yes.\" She doesn't speak to you for several days.", flirt=(state.persons.persons_dict["felicity"], -1), ), weight=1) if state.persons.persons_dict["felicity"].attracted > 10: self.outcomes.add(Outcome(state, "Felicity whispers that she loves you.", love_confessor=state.persons.persons_dict["felicity"], ), weight=100) if self.person == state.persons.persons_dict["olga"] and \ state.persons.persons_dict["olga"].name != "Olga": self.outcomes.add(Outcome(state, "When you squeeze her butt, she stabs you in the heart with a " "poisoned dagger.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You play a game of darts together, but you get upset when " "you lose and ruin the mood.", flirt=(state.persons.persons_dict["olga"], -1), fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You find out that you both like " "cats. She says her cat loves being petted.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "You amuse her with realistic impreesions of bird " "songs. She says she likes a man who's good with his tongue.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She is impressed with your juggling and says she likes a man " "with skilled hands.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "You say the flower in her hair goes well with " "her eyes. She says you can smell her flower if you like.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She sits on your lap when you buy her a drink.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) if state.persons.persons_dict["blind_bartender"].alive: self.outcomes.add(Outcome(state, "You both laugh about how bad the ale is. The blind bartender " "is not pleased.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "You have a meal together.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "She plays with your hair while you talk of your exploits.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) if state.persons.persons_dict["olga"].attracted > 3: def change_name(): state.persons.persons_dict["olga"].name = "Olga" self.outcomes.add(Outcome(state, "She says her name is Olga. You also tell your name.", flirt=(state.persons.persons_dict["olga"], 2), funcs=[change_name], ), weight=10000) elif self.person == state.persons.persons_dict["olga"] and \ state.character.place == state.places.places_dict["tavern"]: self.outcomes.add(Outcome(state, "You follow Olga to her room, " "where she shows you some paintings she's borrowing " "from Lord Carlos.", new_person=state.persons.persons_dict["olga"], move_to=state.places.places_dict["upstairs"], flirt=(state.persons.persons_dict["olga"], 2), ), weight=3) self.outcomes.add(Outcome(state, "You follow her to her room upstairs. Lots of passionate " "stabbing ensues.", clover=True, die=True, ), weight=1) elif self.person == state.persons.persons_dict["olga"] and \ state.character.place == state.places.places_dict["upstairs"]: self.outcomes.add(Outcome(state, "{0}".format(random.choice([ "You make passionate love together.", "You sleep together.", "Olga does lots of nice things to you.", ])), succeed=True, flirt=(state.persons.persons_dict["olga"], 2), ), weight=4) self.outcomes.add(Outcome(state, "Olga whispers that she's been stalking you.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "You both stay up late talking by candlelight.", flirt=(state.persons.persons_dict["olga"], 3), ), weight=1) self.outcomes.add(Outcome(state, "Olga tells you her life story. Half of it seems made up.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "You compliment her on her borrowed paintings. She is pleased.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=1) self.outcomes.add(Outcome(state, "Olga turns out to be an assassin. She assassinates you.", clover=True, die=True, ), weight=1) if state.persons.persons_dict["olga"].attracted > 10: self.outcomes.add(Outcome(state, "Olga grabs your hand. \"Life's too short, " "let's get married!\"", love_confessor=state.persons.persons_dict["olga"], ), weight=1000) if self.person == state.persons.persons_dict["eve"]: self.outcomes.add(Outcome(state, "She asks if you even remember her name. " "You say, \"Of course I remember your name. It's...\"", actions=[(Anne(state), 10000), (Beth(state), 10000), (Eve(state), 10000), (Donna(state), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "You look at her bookshelf and compliment her on her choice " "of books. She casts doubt on your ability to read.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "When you try to get close to her, she trips you and laughs.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "She lets you live with her for a few months under the " "condition that she gets to treat you poorly.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "You say she has pretty lips. She says your lips are only " "pretty when they're shut.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "She ignores your innuendos, but lets you come to the river " "with her so she can drown a bag of kittens.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "She tells you to hide in a chest, because she thinks her " "father is coming. She locks you in the chest and doesn't " "let you out for a week.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "She says she wants to make love to you in the woods, " "but you lose track of her in the darkness. She doesn't " "come back for you.", move_to=state.places.places_dict["woods"], fail=True, flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) self.outcomes.add(Outcome(state, "She asks you to prove your devotion to her by cleaning her " "room. She seems pleased with your work.", flirt=(state.persons.persons_dict["eve"], 1), ), weight=1) if state.persons.persons_dict["eve"].attracted > 3: self.outcomes.add(Outcome(state, "Your suave advances lead to several rounds of passionate " "sex with Lord Carlos' daughter that night. Unfortunately, " "you don't wake up at " "dawn. You wake up in the middle of the night when two " "hooded assassins kidnap you take you to a dungeon full " "of torture devices. They are about to put you in an " "iron maiden when they take off their hoods and reveal " "that they are Lord Carlos' daughter and a priest. The " "priest officiates your wedding.", win=True, ), weight=10000) class GoToSleep(Action): slot = "c" def __init__(self, state): super(GoToSleep, self).__init__(state) self.name = "Go to sleep." def execute(self, state): self.outcomes.add(Outcome(state, "You wake up dead.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You have a nightmare about weasels.", topic="weasels", new_person=None, ), weight=2) self.outcomes.add(Outcome(state, "You dream of fire.", topic="fire", new_person=None ), weight=1) self.outcomes.add(Outcome(state, "You have a wonderful dream that you married a nymph and took her " "to bed in Lord Carlos' manor.", new_person=None, ), weight=2) self.outcomes.add(Outcome(state, "You wake up well-rested some hours later.", new_person=None, ), weight=2) if state.character.place == state.places.places_dict["prison"]: self.outcomes.add(Outcome(state, "You wake up just in time to see an assassin slip a weasel " "between the bars of your cell. The weasel kills you.", clover=True, die=True, ), weight=3) if state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "You wake up in Lord Carlos' dungeon. You never leave.", die=True, ), weight=100) if state.character.place == state.places.places_dict["ocean"]: self.outcomes.add(Outcome(state, "You drown in your sleep.", die=True, ), weight=100) if not state.character.place in state.places.locked: self.outcomes.add(Outcome(state, "You wake up some hours later.", move=2, new_person=None, ), weight=3) if state.character.place in state.places.populated and not \ state.character.place in state.places.locked: self.outcomes.add(Outcome(state, "You are pleasantly awakened by a cat rubbing itself against " "you.", add_item=items.cat, new_person=None, ), weight=2) self.outcomes.add(Outcome(state, "You wake up robbed of all your worldly possessions.", remove_all_items=True, funcs=[state.character.lose_all_money], new_person=None, ), weight=2) self.outcomes.add(Outcome(state, "You are rudely awakened by an assassin's dagger.", clover=True, die=True, ), weight=2) self.outcomes.add(Outcome(state, "You wake up with some coins on your cloak.", get_money=money.pittance, topic="money", new_person=None, ), weight=2) class LookForTheWizard(Action): slot = "c" def __init__(self, state): super(LookForTheWizard, self).__init__(state) self.name = "Look for the wizard." def execute(self, state): if state.persons.persons_dict["wizard"].alive: if state.character.has_item(items.yellow_mushroom): self.outcomes.add(Outcome(state, "When you find him, he can smell that you have a yellow " "mushroom. He asks if he can have it.", move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["wizard"], actions=[(GiveHimTheYellowMushroom(state), 100)], ), weight=100) self.outcomes.add(Outcome(state, "You find him. He turns you into a frog and steps on you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You find him. He turns you into a frog and tries to step on you " "but you manage to hop away.", funcs=[state.character.frogify], ), weight=3) self.outcomes.add(Outcome(state, "When you find him. He gives you a frog.", add_item=items.frog, move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["wizard"], ), weight=1) self.outcomes.add(Outcome(state, "You find the wizard. He is telling a woman how he " "cursed the icicles in the arctic.", move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["wizard"], ), weight=2) self.outcomes.add(Outcome(state, "You find the wizard. He is telling a woman about " "a mesmerizing pearl.", move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["wizard"], ), weight=2) self.outcomes.add(Outcome(state, "You see the wizard emptying a flask into a well.", move_to=state.places.places_dict["market"], new_person=state.persons.persons_dict["wizard"], ), weight=1) self.outcomes.add(Outcome(state, "You look for the wizard, but the assassins are looking for you.", die=True, ), weight=1) if state.persons.persons_dict["st_george"].alive: if state.persons.persons_dict["wizard"].alive: self.outcomes.add(Outcome(state, "You can't find the wizard, but you find St. George. " "He says the wizard is a little testy.", new_person=state.persons.persons_dict["st_george"], ), weight=1) else: self.outcomes.add(Outcome(state, "You can't find the wizard since the wizard is dead, " "but you find St. George. He says the wizard was a " "complicated man.", new_person=state.persons.persons_dict["st_george"], ), weight=1) class LeaveInAHuff(Action): slot = "c" def __init__(self, state): super(LeaveInAHuff, self).__init__(state) self.name = "Leave in a huff." def execute(self, state): self.outcomes.add(Outcome(state, None, move=1, ), weight=49) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "The huffy manner in which you left causes some assassins to " "notice you. They assassinate you.", die=True, ), weight=1) class LeaveInAPuff(Action): slot = "c" def __init__(self, state): super(LeaveInAPuff, self).__init__(state) self.name = "Leave in a puff." self.combat_action = True def execute(self, state): place = state.character.place while place == state.character.place: place = state.places.places_dict[random.choice( list(state.places.places_dict.keys()))] self.outcomes.add(Outcome(state, None, move_to=place, ), weight=3) class FleeTheScene(Action): slot = "c" def __init__(self, state): super(FleeTheScene, self).__init__(state) self.name = "Flee the scene." def execute(self, state): self.outcomes.add(Outcome(state, None, move=2, new_person=None, unthreat=True, ), weight=3) class GoTo(Action): slot = "c" def __init__(self, state, place, specific_dest=None): super(GoTo, self).__init__(state) if specific_dest: self.dest = specific_dest else: self.dest = random.sample(place.connections, 1)[0] self.name = "Go to " + str(self.dest) + "." def execute(self, state): self.outcomes.add(Outcome(state, None, move_to=self.dest, new_person=None, ), weight=3) if self.dest == state.places.places_dict["dark_alley"]: self.outcomes.add(Outcome(state, "You go into a dark alley. You do not come out.", die=True, ), weight=3) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "On your way out of {0} you run headlong into some guards. " "They say you must be a lunatic.".format(state.character.place), new_person=state.persons.persons_dict["guards"], threat=True, topic="oblivious", ), weight=3) self.outcomes.add(Outcome(state, "As you are entering {0}, you notice an assassin following " "you.".format(self.dest), move_to=self.dest, threat=True, new_person=state.persons.persons_dict["assassin"], ), weight=2) if state.character.has_item(items.cat): self.outcomes.add(Outcome(state, "Your cat notices an assassin approaching. You do not.", clover=True, die=True, ), weight=1) overhear_template = "As you leave " + state.character.place.name + \ " you overhear {0}." self.outcomes.add(Outcome(state, overhear_template.format("someone say that the town's well " "has been poisoned"), move_to=self.dest, ), weight=1) self.outcomes.add(Outcome(state, overhear_template.format("someone talking about how nice St. " "George was to them"), move_to=self.dest, ), weight=1) self.outcomes.add(Outcome(state, overhear_template.format("a man talking about being a pirate on " "Lord Arthur's ship"), move_to=self.dest, ), weight=1) self.outcomes.add(Outcome(state, overhear_template.format("a woman asking around about " "assassins"), move_to=self.dest, ), weight=1) self.outcomes.add(Outcome(state, overhear_template.format("some men are planning a trip to " "the woods to look for nymphs"), move_to=self.dest, ), weight=1) class RunLikeTheDevil(Action): slot = "c" def __init__(self, state): super(RunLikeTheDevil, self).__init__(state) self.name = "Run like the Devil." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "The Devil is very fast, so you manage to get away.", move=2, succeed=True, ), weight=9) self.outcomes.add(Outcome(state, "You run like the Devil, but " + state.character.person.name + " also run" + persons.get_tense(state.character.person) + " like the Devil " "and overtake" + persons.get_tense(state.character.person) + " you.", die=True, ), weight=1) if state.character.person == state.persons.persons_dict["felicity"] and \ state.persons.persons_dict["felicity"].attracted > 9: self.outcomes.add(Outcome(state, "The Devil is very fast and not very fat, so you manage to get " "away unmarried.", new_person=None, move=2, succeed=True, flirt=(state.persons.persons_dict["felicity"], -666), ), weight=666) if state.character.person == \ state.persons.persons_dict["olga"] and \ state.persons.persons_dict["olga"].attracted > 9: self.outcomes.add(Outcome(state, "The Devil is pretty fast but Olga is prettier and faster. " "She strangles you to death.", die=True, flirt=(state.persons.persons_dict["olga"], -666), ), weight=666) self.outcomes.add(Outcome(state, "The Devil is very fast, so you manage to get away unmarried.", new_person=None, move=2, succeed=True, flirt=(state.persons.persons_dict["olga"], -666) ), weight=666) class WaddleLikeGod(Action): slot = "c" def __init__(self, state): super(WaddleLikeGod, self).__init__(state) self.name = "Waddle like God." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "God is very slow, so you don't manage to get away.", die=True, ), weight=9) self.outcomes.add(Outcome(state, "You waddle like God, but " + state.character.person.name + " also waddle" + persons.get_tense(state.character.person) + " like God and " "fail to overtake" + persons.get_tense(state.character.person) + " you. You " "slowly get away.", move=1, ), weight=1) class WanderTheCountryside(Action): slot = "c" def __init__(self, state): super(WanderTheCountryside, self).__init__(state) self.name = "Wander the countryside." def execute(self, state): self.outcomes.add(Outcome(state, "Not all those who wander are lost, but you are.", fail=True, new_person=None, ), weight=1) self.outcomes.add(Outcome(state, "You find a mob of peasants about to perform a witch burning.", actions=[(SaveTheWitch(state), 30)], new_person=None, ), weight=1) self.outcomes.add(Outcome(state, "You find a mob of peasant children about to perform a cat " "burning.", actions=[(SaveTheCat(state), 30)], new_person=None, ), weight=1) self.outcomes.add(Outcome(state, "All of the peasants you meet talk about Lord Bartholomew like " "he's God's gift to the world.", new_person=None, ), weight=1) self.outcomes.add(Outcome(state, "You find a mob of peasants burning Lord Daniel in effigy.", new_person=None, ), weight=1) if state.persons.persons_dict["simple_peasant"].alive: self.outcomes.add(Outcome(state, "You find a simple peasant.", new_person=state.persons.persons_dict["simple_peasant"], ), weight=1) if state.persons.persons_dict["peasant_lass"].alive: self.outcomes.add(Outcome(state, "You find a peasant lass.", new_person=state.persons.persons_dict["peasant_lass"], ), weight=1) class Swim(Action): slot = "c" def __init__(self, state): super(Swim, self).__init__(state) self.name = "Swim." def execute(self, state): if type(self) == JustKeepSwimming or type(self) == KeepSwimming: next_action = JustKeepSwimming(state) else: next_action = KeepSwimming(state) self.outcomes.add(Outcome(state, "You manage to stay afloat.", actions=[(next_action, 10000)], ), weight=1) self.outcomes.add(Outcome(state, "You keep your head up.", actions=[(next_action, 10000)], ), weight=1) self.outcomes.add(Outcome(state, "You see a ship in the distance. You are unable to reach it.", actions=[(next_action, 10000)], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You make very little progress.", actions=[(next_action, 10000)], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You die of dehydration.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You are in way over your head.", die=True, ), weight=1) class KeepSwimming(Swim): slot = "c" def __init__(self, state): super(KeepSwimming, self).__init__(state) self.name = "Keep swimming." def execute(self, state): super(KeepSwimming, self).execute(state) self.outcomes.add(Outcome(state, "You die of exhaustion.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You are picked up by Lord Arthur's pirate ship.", move_to=state.places.places_dict["pirate_ship"], ), weight=1) self.outcomes.add(Outcome(state, "You find a mermaid sitting on a rock.", move_to=state.places.places_dict["mermaid_rock"], new_person=state.persons.persons_dict["mermaid"], ), weight=1) if state.character.has_item(items.cat): self.outcomes.add(Outcome(state, "Your cat dies.", actions=[(JustKeepSwimming(state), 10000)], remove_item=items.cat, fail=True, ), weight=1) class JustKeepSwimming(KeepSwimming): slot = "c" def __init__(self, state): super(JustKeepSwimming, self).__init__(state) self.name = "Just keep swimming." def execute(self, state): super(JustKeepSwimming, self).execute(state) self.outcomes.add(Outcome(state, "You die of exhaustion.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You finally find land.", move_to=state.places.places_dict["docks"], ), weight=1) self.outcomes.add(Outcome(state, "As you swim, you notice the water getting colder. You eventually " "find ice.", move_to=state.places.places_dict["arctic"], fail=True ), weight=1) class E4(Action): slot = "c" def __init__(self, state): super(E4, self).__init__(state) self.name = "E4." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "You lose the game. Lord Carlos celebrates his victory by " "assassinating you.", die=True, ), weight=1) class ChallengeHimToAGameOfChess(Action): """ Used with Lord Carlos and Lord Bartholomew """ slot = "c" def __init__(self, state): super(ChallengeHimToAGameOfChess, self).__init__(state) self.name = "Challenge him to a game of chess." self.combat_action = True def execute(self, state): if state.character.person == state.persons.persons_dict["lord_carlos"]: self.outcomes.add(Outcome(state, "Lord Carlos says he has no time to waste on fools, but " "when you imply that he's afraid he'll lose, " "he has his servants set up a chessboard.", actions=[(A3(state), 10000), (Nf3(state), 10000), (E4(state), 10000), (AskForADraw(state), 10000)], ), weight=1) if state.character.person == state.persons.persons_dict["lord_bartholomew"]: self.outcomes.add(Outcome(state, "Lord Bartholomew says there's always time for a little fun " "in his life. He takes you to his chess parlor and sets up " "a board.", #actions=[(A3(state), 10000), # (Nf3(state), 10000), # (E4(state), 10000), # (TurnBoard(state), 10000)], ), weight=1) class WalkThePlank(Action): slot = "c" def __init__(self, state): super(WalkThePlank, self).__init__(state) self.name = "Walk the plank." def execute(self, state): self.outcomes.add(Outcome(state, "You walk across one of the planks on the deck.", topic="walking the plank", ), weight=2) self.outcomes.add(Outcome(state, "You fall into the ocean.", move_to=state.places.places_dict["ocean"], ), weight=3) self.outcomes.add(Outcome(state, "Lord Arthur's pet shark emerges from the depths and snatches you " "as you fall.", clover=True, die=True, ), weight=1) class TrashThePlace(Action): slot = "c" def __init__(self, state): super(TrashThePlace, self).__init__(state) self.name = "Trash the place." def execute(self, state): self.outcomes.add(Outcome(state, None, trash_place=state.character.place, succeed=True, move_to=state.character.place, ), weight=4) self.outcomes.add(Outcome(state, "You find a fancy red cloak in the wreckage.", add_item=items.fire_proof_cloak, trash_place=state.character.place, succeed=True, move_to=state.character.place, ), weight=1) if state.character.place == state.places.places_dict["market"]: self.outcomes.add(Outcome(state, "You get trampled to death by a spooked horse.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You are arrested on charges of lunacy and get " "thrown in prison with the other lunatics.", move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=1) if state.character.place == state.places.places_dict["wizards_lab"]: if not state.character.has_item(items.fire_proof_cloak): self.outcomes.add(Outcome(state, "One of the potions you break blows up the lab.", die=True, ), weight=2) else: self.outcomes.add(Outcome(state, "One of the potions you break blows up the lab, but " "Your fancy red cloak protects you from annihilation.", ), weight=2) self.outcomes.add(Outcome(state, "You snap a staff in half and a dark spirit escapes from the " "staff.", die=True, ), weight=2) if state.character.person == state.persons.persons_dict["wizard"]: self.outcomes.add(Outcome(state, "The wizard incinerates you.", die=True, ), weight=20) if state.character.place == state.places.places_dict["wizards_lab"] and \ state.character.person != state.persons.persons_dict["wizard"] and \ state.persons.persons_dict["wizard"].alive: self.outcomes.add(Outcome(state, "The wizard walks in and starts yelling obscenities.", new_person=state.persons.persons_dict["wizard"], threat=True, ), weight=1) # D slot actions class TurnBoard(Action): """ Used for chess games against Lord Bartholomew """ slot = "d" def __init__(self, state): super(TurnBoard, self).__init__(state) self.name = "Play poorly and turn the board around once you're losing." def execute(self, state): self.outcomes.add(Outcome(state, "Lord Bartholomew laughs and concedes.", succeed=True, ), weight=1) class Donna(Action): """ Used when guessing Eve's name """ slot = "d" def __init__(self, state): super(Donna, self).__init__(state) self.name = "\"Donna.\"" def execute(self, state): self.outcomes.add(Outcome(state, "She snakes her head. \"What a shame. I was starting to like " "you.\" She throws a dagger into your guts.", die=True, ), weight=1) class AskForAsylum(Action): slot = "d" def __init__(self, state): super(AskForAsylum, self).__init__(state) self.name = "Ask for asylum." def execute(self, state): self.outcomes.add(Outcome(state, "Lord Bartholomew grants you asylum and gives you work shoveling " "coal into ovens. After a few years, you fall in love with a cook " "who also works in the kitchens. You eventually win her heart and " "live happily ever after.", win=True, ), weight=1) if state.character.place in state.places.burnable: self.outcomes.add(Outcome(state, "Lord Bartholomew grants you asylum, but his manor is soon " "stormed by Lord Daniel's guards. You are arrested for " "treason.", burn_place=state.character.place, kill=state.persons.persons_dict["lord_bartholomew"], move_to=state.places.places_dict["prison"], ), weight=1000) else: self.outcomes.add(Outcome(state, "Lord Bartholomew grants you asylum, but his manor is soon " "stormed by Lord Daniel's guards. You are arrested for " "treason.", kill=state.persons.persons_dict["lord_bartholomew"], move_to=state.places.places_dict["prison"], ), weight=1000) class AskForADraw(Action): slot = "d" def __init__(self, state): super(AskForADraw, self).__init__(state) self.name = "Ask for a draw." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "Lord Carlos has you drawn and quartered for your impudence.", die=True, ), weight=1) class MakeItHard(Action): slot = "d" def __init__(self, state): super(MakeItHard, self).__init__(state) self.name = "Make it hard for Lord Carlos to kill you." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, "Lord Carlos is no slouch, he kills you anyway.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Screaming gibberish in his face only stuns him for so long.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Lord Carlos is better at killing than you are at not " "being killed.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You prevent Lord Carlos from killing you, but he calls in " "one of his assassins and has her do it.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You tell Lord Carlos that you're his son, he doesn't care.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You spit in his eyes.", actions=[RunLikeTheDevil(state), 666], ), weight=1) self.outcomes.add(Outcome(state, "You hide behind a painting Lord Carlos that is loathe " "to destroy. He loathes you more.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "He kills you as you try to get into an suit of " "armor.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You flee the country.", move_to=state.places.places_dict["arctic"], ), weight=1) self.outcomes.add(Outcome(state, "You flee to the woods and hide in a deep cave... " "perhaps a little too deep.", move_to=state.places.places_dict["cave"], ), weight=1) class ShowYourForeignCoin(Action): slot = "d" def __init__(self, state): super(ShowYourForeignCoin, self).__init__(state) self.name = "Show him your shiny foreign coin." def execute(self, state): if state.character.person == state.persons.persons_dict["lord_bartholomew"]: self.outcomes.add(Outcome(state, "\"Damn, son. Where'd you find this?\" Lord Bartholomew asks. " "He doesn't wait for your answer. Instead he takes the " "coin and gives you a small fortune.", remove_item=items.foreign_coin, get_money=money.small_fortune, ), weight=1) elif state.character.person == state.persons.persons_dict["lord_daniel"]: self.outcomes.add(Outcome(state, "Lord Daniel has his guards seize you and take your coin. " "They then defenestrate you. Fortunately, you land in a pile " "hay.", move_to=state.places.places_dict["streets"], remove_item=items.foreign_coin, ), weight=1) class DouseHerWithYourLovePotion(Action): slot = "d" def __init__(self, state, lady): super(DouseHerWithYourLovePotion, self).__init__(state) self.lady = lady self.name = "Douse " + self.lady.name + " with your love potion." def execute(self, state): if state.character.person == state.persons.persons_dict["mermaid"]: self.outcomes.add(Outcome(state, "The mermaid falls madly in love with you. " "You run into the mermaid problem, but {0}, " "so you still live happily ever after." ".".format(random.choice([ "she has a mouth", "she has breasts", "she is fun to be around", ])), win=True, ), weight=1) elif state.character.person == state.persons.persons_dict["eve"]: self.outcomes.add(Outcome(state, "She dodges the potion and starts screaming. You are " "soon assassinated.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Lord Carlos' daughter falls madly in love with you. " "You flee to another country and get married. She is fun " "to be around since she's magically enchanted to always be " "nice to you. However, she is still horrible to everyone " "else. So your life is always filled with adventure and " "danger.", win=True, ), weight=1) elif state.character.person == state.persons.persons_dict["nymph_queen"]: self.outcomes.add(Outcome(state, "You miss. The nymph queen giggles and turns you into a shrub.", score=100, lose=True, ), weight=1) self.outcomes.add(Outcome(state, "The nymph queen falls madly in love with you. All of the " "woodland creatures attend your wedding.", win=True, ), weight=1) else: self.outcomes.add(Outcome(state, "You fumble around in your bags looking for the love potion, " "but your lack of organization hinders you.", ), weight=1) class DrugHerWithYourLovePotion(Action): slot = "d" def __init__(self, state, lady): super(DrugHerWithYourLovePotion, self).__init__(state) self.lady = lady self.name = "Drug " + self.lady.name + " with your love potion." def execute(self, state): if state.character.person == state.persons.persons_dict["olga"]: self.outcomes.add(Outcome(state, "The pretty lady notices you slipping the potion into her " "drink. She stabs you in the gut and leaves.", die=True, ), weight=1) if state.persons.persons_dict["blind_bartender"].alive: self.outcomes.add(Outcome(state, "You distract her by pointing out a wart on the blind " "bartender's nose. After she takes a drink, she looks " "back at the blind bartender and falls in love with him.", new_person=None, fail=True, remove_item=items.love_potion, ), weight=1) self.outcomes.add(Outcome(state, "You manage to drug her. She becomes very flirty with you.", flirt=(state.persons.persons_dict["olga"], 10), remove_item=items.love_potion, succeed=True, ), weight=1) class LookForNymphs(Action): slot = "d" def __init__(self, state): super(LookForNymphs, self).__init__(state) self.name = "Look for nymphs." def execute(self, state): self.outcomes.add(Outcome(state, "You find the nymph queen {0}. Her beauty is " "{1}.".format(random.choice(["watering flowers in a meadow", "levitating above a pond", "feeding a stag", "teaching a goblin to read", "tanning in a ray of sunshine", "doing tai chi in a meadow"]), random.choice(["intoxicating", "dazzling", "exhilarating", "overwhelming", "only rivaled by her attractiveness" ])), new_person=state.persons.persons_dict["nymph_queen"], ), weight=1) self.outcomes.add(Outcome(state, "You see some nymphs bathing in a waterfall, but they hex " "you for gawking. You climb a ridge and throw yourself " "to your death.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You see some nymphs but they fade away before you can get close.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You slip and tumble into a hole in the ground.", move_to=state.places.places_dict["cave"], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "{0}.".format(random.choice(["You can't find any", "Your efforts to find nymphs are fruitless", "You find an apple tree instead", "You don't see any nymphs. Only trees", ])), ), weight=4) self.outcomes.add(Outcome(state, "You find a witch instead.", new_person=state.persons.persons_dict["witch"], ), weight=1) self.outcomes.add(Outcome(state, "You notice a man in a dark cloak stalking you.", new_person=state.persons.persons_dict["assassin"], threat=True, ), weight=1) class GiveCat(Action): slot = "d" def __init__(self, state, woman): super(GiveCat, self).__init__(state) self.woman = woman self.name = "Give " + woman.name + " your cat." def execute(self, state): self.outcomes.add(Outcome(state, "She thinks the cat is adorable.", remove_item=items.cat, flirt=(state.character.person, 3), ), weight=3) class GiveFlowers(Action): slot = "d" def __init__(self, state, woman): super(GiveFlowers, self).__init__(state) self.woman = woman self.name = "Give " + woman.name + " your bouquet of flowers." def execute(self, state): self.outcomes.add(Outcome(state, "She is pleased with your gift.", remove_item=items.bouquet_of_flowers, flirt=(state.character.person, 3), ), weight=3) class Loot(Action): slot = "d" def __init__(self, state): super(Loot, self).__init__(state) self.name = "Loot." def execute(self, state): item = random.choice(state.persons.persons_dict["local_merchant"].get_sells() + state.persons.persons_dict["wealthy_merchant"].get_sells()) if item.name[0] in "aeiou": self.outcomes.add(Outcome(state, "You get away with an " + item.name + ".", add_item=item, move=1, ), weight=3) else: self.outcomes.add(Outcome(state, "You get away with a " + item.name + ".", add_item=item, move=1, ), weight=3) self.outcomes.add(Outcome(state, "You are killed by a merchant defending her store.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You are arrested for attempting to steal an apple.", move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=1) class WatchAPlay(Action): slot = "d" def __init__(self, state): super(WatchAPlay, self).__init__(state) self.name = "Watch a play." def execute(self, state): self.outcomes.add(Outcome(state, "The play satirizes Lord Daniel's policy on lunacy. " "The actors are arrested at the end of the play.", ), weight=1) self.outcomes.add(Outcome(state, "The play portrays Lord Bartholomew in a glorious light. The " "audience is very pleased and claps for so long that it becomes " "awkward.", ), weight=1) if state.places.places_dict["market"] in state.places.burnable: self.outcomes.add(Outcome(state, "The play is put on by some of Lord Daniel's guards. The " "acting is horrible and the play portrays Lord Bartholomew in " "a negative light. The audience starts a riot.", new_person=state.persons.persons_dict["guards"], actions=[ (Attack(state, state.persons.persons_dict["guards"]), 10000), (BurnThePlaceToTheGround(state, state.places.places_dict["market"]), 10000), (TrashThePlace(state), 10000), (Loot(state), 10000) ], ), weight=1) else: self.outcomes.add(Outcome(state, "The play is put on by some Lord Daniel's guards, the acting is " "terrible and the play portrays Lord Bartholomew in a negative " "light. The audience starts a riot.", new_person=state.persons.persons_dict["guards"], actions=[ (Attack(state, state.persons.persons_dict["guards"]), 10000), (TrashThePlace(state), 10000), (Loot(state), 10000) ], ), weight=1) class FlauntYourWealth(Action): slot = "d" def __init__(self, state): super(FlauntYourWealth, self).__init__(state) self.name = "Flaunt your wealth." def execute(self, state): self.outcomes.add(Outcome(state, "The local peasants mob you. They take your money and your life.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "The guards notice you and conclude you must be rich.", new_person=state.persons.persons_dict["guards"], actions=[(TellThemYouAreALunatic(state), 10000)], ), weight=1) if state.persons.persons_dict["st_george"].alive: self.outcomes.add(Outcome(state, "St. George notices you and warns you of the dangers of " "flamboyance.", new_person=state.persons.persons_dict["st_george"], ), weight=1) self.outcomes.add(Outcome(state, "Some truly wealthy people see you and sneer.", fail=True, ), weight=1) class FreezeToDeath(Action): slot = "d" def __init__(self, state): super(FreezeToDeath, self).__init__(state) self.name = "Freeze to death." def execute(self, state): self.outcomes.add(Outcome(state, "While you're trying to freeze to death, you notice some " "penguins nearby.", actions=[(Yell(state, "that there aren't penguins in the arctic"), 100)], ), weight=1) self.outcomes.add(Outcome(state, "While you are waiting to freeze to death, you notice " "the wizard dropping off a boatload of penguins.", actions=[(Yell(state, "Don't leave without me"), 10000)], ), weight=1) self.outcomes.add(Outcome(state, "It's easy.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get sleepy.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You do.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You freeze to death.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get mauled by a polar bear before you get a chance to " "freeze to death.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Some Inuits save you from the cold and take you back to land " "in a kayak. They also give you a fish.", add_item=items.fish, move_to=state.places.places_dict["countryside"], succeed=True, ), weight=2) class Panic(Action): slot = "d" def __init__(self, state): super(Panic, self).__init__(state) self.name = "Panic!" self.combat_action = True def execute(self, state): #under revision """ options = state.places.Place.instances - set([state.character.place]) place = random.sample(options, 1)[0] self.outcomes.add(Outcome(state, "You don't remember what you did, but you seem to have gotten " "away.", move_to=place, succeed=True, ), weight=1) """ self.outcomes.add(Outcome(state, "Panicking doesn't help.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Panicking doesn't save you.", die=True, ), weight=1) class SingASong(Action): slot = "d" def __init__(self, state, topic=None): super(SingASong, self).__init__(state) self.topic = topic if topic: self.name = "Sing a song about {0}.".format(topic) else: self.name = "Sing a song." def execute(self, state): if state.character.place == state.places.places_dict["church"]: self.outcomes.add(Outcome(state, "A priestess finds your lyrics {0} and has you thrown out of " "the church.".format(random.choice( ["blasphemous", "crude", "idiotic", "offensive", "mildly offensive", "uncreative"])), fail=True, move_to=state.places.places_dict["streets"], ), weight=10) if state.character.place == state.places.places_dict["docks"]: self.outcomes.add(Outcome(state, "You are soon joined in song by a gang of drunken pirates. " "They spill rum on you and ruin your song.", new_person=state.persons.persons_dict["pirates"], ), weight=5) if state.character.place == state.places.places_dict["upstairs"] and \ state.character.person == state.persons.persons_dict["olga"]: self.outcomes.add(Outcome(state, "You sing a romantic ballad. Olga is impressed.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=20) self.outcomes.add(Outcome(state, "Olga interrupts your song by kissing you.", flirt=(state.persons.persons_dict["olga"], 2), ), weight=20) if state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "This is no place for merry-making. You are soon " "assassinated.", die=True, ), weight=5) self.outcomes.add(Outcome(state, "Your singing alerts Lord Carlos' assassins to your " "presence.", die=True, ), weight=10) if state.character.place == state.places.places_dict["mermaid_rock"]: self.outcomes.add(Outcome(state, "As you sing, a ship sails by. The " "captain is tied to the mast. He is not " "impressed.", fail=True, ), weight=10) if state.character.person == state.persons.persons_dict["mermaid"]: self.outcomes.add(Outcome(state, "The mermaid enjoys your singing and sings with you.", flirt=(state.persons.persons_dict["mermaid"], 2), ), weight=20) self.outcomes.add(Outcome(state, "The mermaid is displeased with your choice of lyrics and " "pushes you into the ocean.", move_to=state.places.places_dict["ocean"], flirt=(state.persons.persons_dict["mermaid"], -1), fail=True, ), weight=10) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "Your singing is too loud for you to hear the footsteps of an " "assassin. He assassinates you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "A crowd gathers to hear your music and throws you a small " "fortune in coins.", get_money=money.small_fortune, ), weight=2) self.outcomes.add(Outcome(state, "The locals hate your voice and soon mob you.", clover=True, die=True, ), weight=1) if not state.character.person: self.outcomes.add(Outcome(state, "While you're singing, some men in black cloaks start to " "edge their way toward you.", new_person=state.persons.persons_dict["assassins"], threat=True, ), weight=3) if self.topic == "assassins": self.outcomes.add(Outcome(state, "An assassin notices you singing about assassins and " "assassinates you", die=True, ), weight=5) if state.character.person == state.persons.persons_dict["wizard"]: self.outcomes.add(Outcome(state, "The wizard complains that you are singing off-key. He turns " "you into a frog and steps on you.", die=True, ), weight=20) if not state.character.place in state.places.locked: self.outcomes.add(Outcome(state, "You wander aimlessly as you work your way through an epic " "ballad.", move=1, ), weight=1) if self.topic is None: self.outcomes.add(Outcome(state, "You sing a song about Lord Arthur, captain of the pirates.", ), weight=1) self.outcomes.add(Outcome(state, "You sing a song about Lord Bartholomew, leader of the " "peasants.", ), weight=1) self.outcomes.add(Outcome(state, "You sing a song about Lord Carlos, kingpin of the assassins.", ), weight=1) self.outcomes.add(Outcome(state, "You sing a song about Lord Daniel, leader of the guards.", ), weight=1) self.outcomes.add(Outcome(state, "You sing your favorite song. No one cares.", ), weight=2) class SwingYourCat(Action): slot = "d" def __init__(self, state): super(SwingYourCat, self).__init__(state) self.name = "Swing your cat." def execute(self, state): self.outcomes.add(Outcome(state, "Your cat manages to escape.", remove_item=items.cat, ), weight=2) if state.character.place in state.places.populated: self.outcomes.add(Outcome(state, "You hit an assassin with your cat.", new_person=state.persons.persons_dict["assassin"], threat=True, ), weight=3) self.outcomes.add(Outcome(state, "The local guards notice you swinging your cat around and " "conclude that you must be a lunatic.", new_person=state.persons.persons_dict["guards"], threat=True, topic="mad", ), weight=3) class LookThroughSomeTrash(Action): slot = "d" def __init__(self, state): super(LookThroughSomeTrash, self).__init__(state) self.name = "Look through some trash." def execute(self, state): self.outcomes.add(Outcome(state, "You attempt to look through the trash, but an assassin takes it " "out.", die=True, ), weight=2) self.outcomes.add(Outcome(state, "While you are searching through the trash you find a somewhat " "agreeable cat.", add_item=items.cat, ), weight=1) self.outcomes.add(Outcome(state, "The local guards see you searching through the trash and accuse " "you of being a lunatic.", add_item=items.cat, new_person=state.persons.persons_dict["guards"], threat=True, topic="curious", ), weight=1) self.outcomes.add(Outcome(state, "You do not find anything useful in the trash.", fail=True, topic="trash", ), weight=1) self.outcomes.add(Outcome(state, "You find a mirror in the trash. You see nothing of value.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You find a bad smell.", fail=True, ), weight=1) self.outcomes.add(Outcome(state, "You find an old ax.", succeed=True, add_item=items.ax, ), weight=1) class DanceAJig(Action): slot = "d" def __init__(self, state): super(DanceAJig, self).__init__(state) self.name = "Dance a jig." def execute(self, state): if state.character.place != state.places.places_dict["ocean"]: self.outcomes.add(Outcome(state, "You get sweaty.", ), weight=9) self.outcomes.add(Outcome(state, "You have a grand old time.", ), weight=5) if state.character.place != state.places.places_dict["void"]: self.outcomes.add(Outcome(state, "You step in a puddle and get your britches wet.", fail=True, ), weight=3) self.outcomes.add(Outcome(state, "You break your ankle and fall to the ground. You catch " "yourself but break your wrist, hit your head on the " "ground and your neck.", die=True, ), weight=1) else: self.outcomes.add(Outcome(state, "You drown trying to dance.", die=True, ), weight=5) self.outcomes.add(Outcome(state, "You swim a jig", topic="jigs", ), weight=2) self.outcomes.add(Outcome(state, "You can't dance a jig, you're in the ocean.", fail=True, ), weight=2) if state.character.place == state.places.places_dict["woods"]: fae = random.choice(["fairaes", "sprites", "pixies", "dryads", "nymphs", "spirits"]) self.outcomes.add(Outcome(state, "Some {0} dance with you and then fade away.".format(fae), ), weight=20) self.outcomes.add(Outcome(state, "Some goblins dance with you and then kill you.", die=True, ), weight=3) if (state.character.place in state.places.town and \ state.character.place not in state.places.locked) or \ state.character.place == state.places.places_dict["countryside"]: self.outcomes.add(Outcome(state, "The local peasants are entertained by your antics and toss " "you some coins.", get_money=money.pittance, ), weight=10) self.outcomes.add(Outcome(state, "Many peasants start dancing with you and begin singing about " "Lord Bartholomew.", ), weight=15) if state.character.place == state.places.places_dict["countryside"]: self.outcomes.add(Outcome(state, "Many peasants start dancing with you and begin singing an " "ode to Lord Bartholomew.", ), weight=25) if state.character.person == state.persons.persons_dict["mermaid"]: self.outcomes.add(Outcome(state, "She laughs and claps and seems completely in awe of your " "legs.", ), weight=1) if state.character.person == state.persons.persons_dict["guards"]: # TODO these don't happen # because this is not a # combat action self.outcomes.add(Outcome(state, "\"We got a dancer,\" one of them says. They throw you in " "prison.", move_to=state.places.places_dict["prison"], new_person=state.persons.persons_dict["other_lunatics"], ), weight=100) self.outcomes.add(Outcome(state, "\"Eh, he's all right,\" one of them says. The guards " "go on their way.", topic="guards", ), weight=100) if state.character.place == state.places.places_dict["arctic"]: self.outcomes.add(Outcome(state, "You get sweaty. The sweat freezes on you. " "You freeze to death.", die=True, ), weight=30) if state.character.place == state.places.places_dict["cave"]: self.outcomes.add(Outcome(state, "Dancing fails to cheer you up.", fail=True, ), weight=10) self.outcomes.add(Outcome(state, "You slip on a rock and fall to your death.", clover=True, die=True, ), weight=15) if state.character.place == state.places.places_dict["tavern"] or \ state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "Some assassins immediately notice you dancing and assassinate " "you.", die=True, ), weight=15) class Drown(Action): slot = "d" def __init__(self, state): super(Drown, self).__init__(state) self.name = "Drown." def execute(self, state): self.outcomes.add(Outcome(state, "You drown.", die=True, ), weight=1) class Sink(Drown): def __init__(self, state): super(Sink, self).__init__(state) self.name = "Sink." class SaveTheCat(Action): slot = "d" def __init__(self, state): super(SaveTheCat, self).__init__(state) self.name = "Save the cat." def execute(self, state): self.outcomes.add(Outcome(state, "You escape with the cat.", succeed=True, add_item=items.cat, ), weight=1) self.outcomes.add(Outcome(state, "You escape with the cat, but the cat escapes you.\n" "You almost got a cat", fail=True, ), weight=1) class YellAPiratePhrase(Action): slot = "d" def __init__(self, state): super(YellAPiratePhrase, self).__init__(state) self.phrase = random.choice( ["Shiver me timbers", "Dead men tell no tales", "Arr Matey", "Avast", "Aye Aye", "Send 'em to Davy Jones' locker", "Thare she blows", "Hoist the Jolly Roger", "Walk the plank", "Yo, ho, ho, and a bottle of rum", "All hands on deck", "Land ho", "X marks the spot", "Ahoy"]) self.name = "Yell \"{0}!\"".format(self.phrase) def execute(self, state): if state.persons.persons_dict["lord_arthur"].alive: self.outcomes.add(Outcome(state, "Lord Arthur has you thrown off the ship.", move_to=state.places.places_dict["ocean"], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "Lord Arthur tells you that no true pirate " "says \"{0}.\"".format( self.phrase), new_person=state.persons.persons_dict["lord_arthur"], fail=True, ), weight=1) if state.character.is_employed_by(state.persons.persons_dict["lord_arthur"]): self.outcomes.add(Outcome(state, "Lord Arthur tells you that you are no longer a member of " "the crew.", remove_employer=state.persons.persons_dict["lord_arthur"], fail=True, ), weight=1) else: self.outcomes.add(Outcome(state, "Lord Arthur is impressed by your enthusiasm and makes " "you a member of the crew.", new_person=state.persons.persons_dict["lord_arthur"], add_employer=state.persons.persons_dict["lord_arthur"], topic="piracy", ), weight=1) else: self.outcomes.add(Outcome(state, "Since Lord Arthur is dead, you get away with it.", succeed=True, ), weight=1) class SaveTheWitch(Action): slot = "d" def __init__(self, state): super(SaveTheWitch, self).__init__(state) self.name = "Save the witch." def execute(self, state): self.outcomes.add(Outcome(state, "You have trouble untying her and the peasants kill you for " "meddling.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You escape with her. She thanks you and gives you " "a deep-cave newt before you part ways.", add_item=items.deep_cave_newt, move_to=state.places.places_dict["woods"], topic=random.choice(["heroism", "newts", "witches"]), ), weight=1) self.outcomes.add(Outcome(state, "In your rush to save the witch, you trip over a rock. " "You wake up near the smoldering remains of the witch's " "pyre.", fail=True, ), weight=1) class DoSomeFarmWork(Action): slot = "d" def __init__(self, state): super(DoSomeFarmWork, self).__init__(state) self.name = "Do some farm work." def execute(self, state): self.outcomes.add(Outcome(state, "You spend a season picking apples.", get_money=money.pittance, ), weight=1) self.outcomes.add(Outcome(state, "You spend a season milking cows for a farmer woman. " "She keeps trying to marry you to her attractive " "daughter, but her daughter is having none of it.", get_money=money.pittance, ), weight=1) self.outcomes.add(Outcome(state, "You spend a season bailing hay.", get_money=money.pittance, add_item=items.pitchfork, ), weight=1) self.outcomes.add(Outcome(state, "You spend a season harvesting wheat. You enjoy the change of " "pace.", get_money=money.pittance, ), weight=1) self.outcomes.add(Outcome(state, "You spend a season slaughtering hogs. You find a shiny foreign " "coin in one of the hogs.", get_money=money.pittance, add_item=items.foreign_coin, ), weight=1) self.outcomes.add(Outcome(state, "You find work, but the assassins find you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "During your duties, you get kicked by a mule. You somehow don't " "die.", get_money=money.pittance, topic="mules", ), weight=1) class DoSomeGambling(Action): slot = "d" def __init__(self, state): super(DoSomeGambling, self).__init__(state) self.name = "Do some gambling." def execute(self, state): self.outcomes.add(Outcome(state, "You win.", get_money=random.choice([money.pittance, money.small_fortune]), succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "You lose.", funcs=[state.character.lose_all_money], fail=True, ), weight=1) if state.character.place == state.places.places_dict["tavern"]: self.outcomes.add(Outcome(state, "You get cleaned out by a pretty lady.", new_person = state.persons.persons_dict["olga"], funcs=[state.character.lose_all_money], fail=True, ), weight=1) if state.character.place == state.places.places_dict["tavern"] or \ state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "It was a gamble to stay here. The assassins find you.", die=True, ), weight=1) if state.character.place == state.places.places_dict["docks"]: self.outcomes.add(Outcome(state, "You play some dice with Lord Arthur. He whips you soundly. " "However, you win and earn a small fortune.", get_money=money.small_fortune, succeed=True, ), weight=1) self.outcomes.add(Outcome(state, "You play some dice with Lord Arthur. He whips you soundly.", funcs=[state.character.lose_all_money], fail=True, ), weight=2) self.outcomes.add(Outcome(state, "You dice with some pirates. They easily beat you.", new_person=state.persons.persons_dict["pirates"], funcs=[state.character.lose_all_money], fail=True, ), weight=1) class SneakAround(Action): """ Only use in Lord Carlos' manor """ slot = "d" def __init__(self, state): super(SneakAround, self).__init__(state) self.name = "Sneak around." def execute(self, state): if state.character.place == state.places.places_dict["lord_bartholomews_manor"]: self.outcomes.add(Outcome(state, "While prowling in the shadows of a hallway, you stub your " "pinkie toe.", actions=[(HowlWithPain(state), 10000)], fail=True, ), weight=1) self.outcomes.add(Outcome(state, "While lurking in a shrub, you catch sight of the fair Lady " "Beatrice.", ), weight=1) self.outcomes.add(Outcome(state, "While hiding behind a door, you overhear Lord Bartholomew " "and his men plotting insurrection.", ), weight=1) self.outcomes.add(Outcome(state, "While creeping around in the stables, you find a long " "pitchfork.", add_item=items.long_pitchfork ), weight=1) self.outcomes.add(Outcome(state, "An old man notices you skulking around and starts yelling " "about an assassin. You look behind you, but the old " "man stabs you in the front.", die=True, ), weight=1) if state.character.place == state.places.places_dict["lord_carlos_manor"]: self.outcomes.add(Outcome(state, "One of the assassin guards sees you tiptoeing around in " "broad daylight. He assassinates you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "Your smell gives you away. You are soon assassinated.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get the hiccups. You are soon assassinated.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You are sneaking through the stables when a man too fat to " "avoid bumps into you. You are soon assassinated.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You find a poisoned dagger in a glass case.", add_item=items.poisoned_dagger, succeed=True, ), weight=1) if state.persons.persons_dict["eve"].alive: self.outcomes.add(Outcome(state, "You manage to sneak into Lord Carlos' " "daughter's bedroom. She is {0}".format(random.choice( ["reading at her desk.", "sharpening a dagger.", "petting her cat.", "putting on jewelry.", "painting a picture of you getting assassinated.",])), new_person=state.persons.persons_dict["eve"], ), weight=2) if state.persons.persons_dict["lord_carlos"].alive: self.outcomes.add(Outcome(state, "Lord Carlos jumps down from some rafters and assassinates you.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You manage to sneak into Lord Carlos' " "study. He is {0}".format(random.choice( ["writing a letter.", "reading a book.", "looking straight at you.", "eating a heart.", "training a weasel.", "pacing around."])), new_person=state.persons.persons_dict["lord_carlos"], threat=True, ), weight=2) class HideUnderTheDeck(Action): """ Note: only use when attacking merchant ship """ slot = "d" def __init__(self, state): super(HideUnderTheDeck, self).__init__(state) self.name = "Hide under the deck." def execute(self, state): self.outcomes.add(Outcome(state, "You miss all of the action." ), weight=2) self.outcomes.add(Outcome(state, "You fight an epic battle against one of the rats on the " "lower decks.", ), weight=1) self.outcomes.add(Outcome(state, "Lord Arthur has you killed when he hears of your cowardice.", die=True, ), weight=1) class SnoopAround(Action): """ Only use in Wizard's lab """ slot = "d" def __init__(self, state): super(SnoopAround, self).__init__(state) self.name = "Snoop around." def execute(self, state): self.outcomes.add(Outcome(state, "The Wizard finds you and conks you on the head with his staff.", move_to=state.places.places_dict["arctic"], ), weight=1) self.outcomes.add(Outcome(state, "You accidentally knock over a bottle of roiling black vapor.", clover=True, die=True, ), weight=1) self.outcomes.add(Outcome(state, "You find a fancy red cloak.", actions=[(TakeIt(state, state.persons.persons_dict["wizard"], items.fire_proof_cloak), 100)], topic="cloaks", ), weight=1) self.outcomes.add(Outcome(state, "You find a frog.", add_item=items.frog, topic="frogs", ), weight=1) # E slot actions class EnterTheVoid(Action): slot = "e" def __init__(self, state): super(EnterTheVoid, self).__init__(state) self.name = "Enter the void." self.combat_action = True def execute(self, state): self.outcomes.add(Outcome(state, None, move_to=state.places.places_dict["void"], ), weight=3) self.outcomes.add(Outcome(state, "There's no air in the void.", die=True, ), weight=1) self.outcomes.add(Outcome(state, "You get lost in limbo forever.", lose=True, ), weight=1)
apache-2.0
-5,675,993,034,953,079,000
32.135994
105
0.531437
false
3.815392
false
false
false
MustafaMustafa/dcgan-tf-benchmark
run_dcgan.py
1
1596
import os import subprocess datafile = 'data/benchmark_GAN_generated_cosmo_maps_128.npy' output_size = 128 epoch = 1 flip_labels = 0.01 batch_size = 128 z_dim = 64 nd_layers = 4 ng_layers = 4 gf_dim = 64 df_dim = 64 save_every_step = 'False' data_format = 'NHWC' transpose_matmul_b = False verbose = 'False' arch = 'KNL' #default, KNL or HSW experiment = 'cosmo_primary_256_200k_batchSize%i_flipLabel%0.3f_'\ 'nd%i_ng%i_gfdim%i_dfdim%i_zdim%i'%(batch_size, flip_labels, nd_layers,\ ng_layers, gf_dim, df_dim, z_dim) command = 'python dcgan/main.py --dataset cosmo --datafile %s '\ '--output_size %i --flip_labels %f --experiment %s '\ '--epoch %i --batch_size %i --z_dim %i '\ '--nd_layers %i --ng_layers %i --gf_dim %i --df_dim %i --save_every_step %s '\ '--data_format %s --transpose_matmul_b %s --verbose %s --arch %s'%(datafile, output_size, flip_labels, experiment,\ epoch, batch_size, z_dim,\ nd_layers, ng_layers, gf_dim, df_dim, save_every_step,\ data_format, transpose_matmul_b, verbose, arch) # if not os.path.isdir('output'): # os.mkdir('output') if os.path.exists('logs'): subprocess.call('rm -rf ./logs'.split()) print(command.split()) # f_out = open('output/'+experiment+'.log', 'w') # subprocess.call(command.split(), stdout=f_out) subprocess.call(command.split())
mit
-5,536,285,681,664,096,000
37.926829
125
0.543233
false
3.297521
false
true
false
Klafyvel/Sivigik
gallery/migrations/0001_initial.py
1
1051
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-07-25 14:02 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import gallery.models class Migration(migrations.Migration): initial = True dependencies = [ ('article', '0001_initial'), ] operations = [ migrations.CreateModel( name='Attachment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('attachment_type', models.CharField(choices=[('IMG', 'Image'), ('FILE', 'Fichier')], default='FILE', max_length=30)), ('file', models.FileField(null=True, upload_to=gallery.models.Attachment.get_upload_to)), ('image', models.ImageField(null=True, upload_to=gallery.models.Attachment.get_upload_to)), ('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='article.Article')), ], ), ]
agpl-3.0
-7,081,145,542,296,569,000
35.241379
134
0.616556
false
4.01145
false
false
false
thaumos/ansible
lib/ansible/modules/remote_management/redfish/redfish_facts.py
1
12031
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2017-2018 Dell EMC Inc. # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: redfish_facts version_added: "2.7" short_description: Manages Out-Of-Band controllers using Redfish APIs description: - Builds Redfish URIs locally and sends them to remote OOB controllers to get information back. - Information retrieved is placed in a location specified by the user. options: category: required: false description: - List of categories to execute on OOB controller default: ['Systems'] command: required: false description: - List of commands to execute on OOB controller baseuri: required: true description: - Base URI of OOB controller username: required: true description: - User for authentication with OOB controller version_added: "2.8" password: required: true description: - Password for authentication with OOB controller timeout: description: - Timeout in seconds for URL requests to OOB controller default: 10 type: int version_added: '2.8' author: "Jose Delarosa (@jose-delarosa)" ''' EXAMPLES = ''' - name: Get CPU inventory redfish_facts: category: Systems command: GetCpuInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - debug: msg: "{{ redfish_facts.cpu.entries | to_nice_json }}" - name: Get CPU model redfish_facts: category: Systems command: GetCpuInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - debug: msg: "{{ redfish_facts.cpu.entries.0.Model }}" - name: Get memory inventory redfish_facts: category: Systems command: GetMemoryInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get fan inventory with a timeout of 20 seconds redfish_facts: category: Chassis command: GetFanInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" timeout: 20 - name: Get default inventory information redfish_facts: baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - debug: msg: "{{ redfish_facts | to_nice_json }}" - name: Get several inventories redfish_facts: category: Systems command: GetNicInventory,GetBiosAttributes baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get default system inventory and user information redfish_facts: category: Systems,Accounts baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get default system, user and firmware information redfish_facts: category: ["Systems", "Accounts", "Update"] baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get Manager NIC inventory information redfish_facts: category: Manager command: GetManagerNicInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get boot override information redfish_facts: category: Systems command: GetBootOverride baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get chassis inventory redfish_facts: category: Chassis command: GetChassisInventory baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get all information available in the Manager category redfish_facts: category: Manager command: all baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get firmware update capability information redfish_facts: category: Update command: GetFirmwareUpdateCapabilities baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" - name: Get all information available in all categories redfish_facts: category: all command: all baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" ''' RETURN = ''' result: description: different results depending on task returned: always type: dict sample: List of CPUs on system ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.redfish_utils import RedfishUtils CATEGORY_COMMANDS_ALL = { "Systems": ["GetSystemInventory", "GetPsuInventory", "GetCpuInventory", "GetMemoryInventory", "GetNicInventory", "GetStorageControllerInventory", "GetDiskInventory", "GetBiosAttributes", "GetBootOrder", "GetBootOverride"], "Chassis": ["GetFanInventory", "GetPsuInventory", "GetChassisPower", "GetChassisThermals", "GetChassisInventory"], "Accounts": ["ListUsers"], "Update": ["GetFirmwareInventory", "GetFirmwareUpdateCapabilities"], "Manager": ["GetManagerNicInventory", "GetLogs"], } CATEGORY_COMMANDS_DEFAULT = { "Systems": "GetSystemInventory", "Chassis": "GetFanInventory", "Accounts": "ListUsers", "Update": "GetFirmwareInventory", "Manager": "GetManagerNicInventory" } def main(): result = {} resource = {} category_list = [] module = AnsibleModule( argument_spec=dict( category=dict(type='list', default=['Systems']), command=dict(type='list'), baseuri=dict(required=True), username=dict(required=True), password=dict(required=True, no_log=True), timeout=dict(type='int', default=10) ), supports_check_mode=False ) # admin credentials used for authentication creds = {'user': module.params['username'], 'pswd': module.params['password']} # timeout timeout = module.params['timeout'] # Build root URI root_uri = "https://" + module.params['baseuri'] rf_uri = "/redfish/v1/" rf_utils = RedfishUtils(creds, root_uri, timeout) # Build Category list if "all" in module.params['category']: for entry in CATEGORY_COMMANDS_ALL: category_list.append(entry) else: # one or more categories specified category_list = module.params['category'] for category in category_list: command_list = [] # Build Command list for each Category if category in CATEGORY_COMMANDS_ALL: if not module.params['command']: # True if we don't specify a command --> use default command_list.append(CATEGORY_COMMANDS_DEFAULT[category]) elif "all" in module.params['command']: for entry in range(len(CATEGORY_COMMANDS_ALL[category])): command_list.append(CATEGORY_COMMANDS_ALL[category][entry]) # one or more commands else: command_list = module.params['command'] # Verify that all commands are valid for cmd in command_list: # Fail if even one command given is invalid if cmd not in CATEGORY_COMMANDS_ALL[category]: module.fail_json(msg="Invalid Command: %s" % cmd) else: # Fail if even one category given is invalid module.fail_json(msg="Invalid Category: %s" % category) # Organize by Categories / Commands if category == "Systems": # execute only if we find a Systems resource resource = rf_utils._find_systems_resource(rf_uri) if resource['ret'] is False: module.fail_json(msg=resource['msg']) for command in command_list: if command == "GetSystemInventory": result["system"] = rf_utils.get_multi_system_inventory() elif command == "GetCpuInventory": result["cpu"] = rf_utils.get_multi_cpu_inventory() elif command == "GetMemoryInventory": result["memory"] = rf_utils.get_multi_memory_inventory() elif command == "GetNicInventory": result["nic"] = rf_utils.get_multi_nic_inventory(category) elif command == "GetStorageControllerInventory": result["storage_controller"] = rf_utils.get_multi_storage_controller_inventory() elif command == "GetDiskInventory": result["disk"] = rf_utils.get_multi_disk_inventory() elif command == "GetBiosAttributes": result["bios_attribute"] = rf_utils.get_multi_bios_attributes() elif command == "GetBootOrder": result["boot_order"] = rf_utils.get_multi_boot_order() elif command == "GetBootOverride": result["boot_override"] = rf_utils.get_multi_boot_override() elif category == "Chassis": # execute only if we find Chassis resource resource = rf_utils._find_chassis_resource(rf_uri) if resource['ret'] is False: module.fail_json(msg=resource['msg']) for command in command_list: if command == "GetFanInventory": result["fan"] = rf_utils.get_fan_inventory() elif command == "GetPsuInventory": result["psu"] = rf_utils.get_psu_inventory() elif command == "GetChassisThermals": result["thermals"] = rf_utils.get_chassis_thermals() elif command == "GetChassisPower": result["chassis_power"] = rf_utils.get_chassis_power() elif command == "GetChassisInventory": result["chassis"] = rf_utils.get_chassis_inventory() elif category == "Accounts": # execute only if we find an Account service resource resource = rf_utils._find_accountservice_resource(rf_uri) if resource['ret'] is False: module.fail_json(msg=resource['msg']) for command in command_list: if command == "ListUsers": result["user"] = rf_utils.list_users() elif category == "Update": # execute only if we find UpdateService resources resource = rf_utils._find_updateservice_resource(rf_uri) if resource['ret'] is False: module.fail_json(msg=resource['msg']) for command in command_list: if command == "GetFirmwareInventory": result["firmware"] = rf_utils.get_firmware_inventory() elif command == "GetFirmwareUpdateCapabilities": result["firmware_update_capabilities"] = rf_utils.get_firmware_update_capabilities() elif category == "Manager": # execute only if we find a Manager service resource resource = rf_utils._find_managers_resource(rf_uri) if resource['ret'] is False: module.fail_json(msg=resource['msg']) for command in command_list: if command == "GetManagerNicInventory": result["manager_nics"] = rf_utils.get_multi_nic_inventory(category) elif command == "GetLogs": result["log"] = rf_utils.get_logs() # Return data back module.exit_json(ansible_facts=dict(redfish_facts=result)) if __name__ == '__main__': main()
gpl-3.0
3,895,431,353,467,819,500
33.771676
118
0.591721
false
4.384475
false
false
false
ricardogsilva/PyWPS
pywps/inout/formats/lists.py
1
1471
"""List of supported formats """ from collections import namedtuple _FORMAT = namedtuple('FormatDefinition', 'mime_type,' 'extension, schema') _FORMATS = namedtuple('FORMATS', 'GEOJSON, JSON, SHP, GML, GEOTIFF, WCS,' 'WCS100, WCS110, WCS20, WFS, WFS100,' 'WFS110, WFS20, WMS, WMS130, WMS110,' 'WMS100') FORMATS = _FORMATS( _FORMAT('application/vnd.geo+json', '.geojson', None), _FORMAT('application/json', '.json', None), _FORMAT('application/x-zipped-shp', '.zip', None), _FORMAT('application/gml+xml', '.gml', None), _FORMAT('image/tiff; subtype=geotiff', '.tiff', None), _FORMAT('application/xogc-wcs', '.xml', None), _FORMAT('application/x-ogc-wcs; version=1.0.0', '.xml', None), _FORMAT('application/x-ogc-wcs; version=1.1.0', '.xml', None), _FORMAT('application/x-ogc-wcs; version=2.0', '.xml', None), _FORMAT('application/x-ogc-wfs', '.xml', None), _FORMAT('application/x-ogc-wfs; version=1.0.0', '.xml', None), _FORMAT('application/x-ogc-wfs; version=1.1.0', '.xml', None), _FORMAT('application/x-ogc-wfs; version=2.0', '.xml', None), _FORMAT('application/x-ogc-wms', '.xml', None), _FORMAT('application/x-ogc-wms; version=1.3.0', '.xml', None), _FORMAT('application/x-ogc-wms; version=1.1.0', '.xml', None), _FORMAT('application/x-ogc-wms; version=1.0.0', '.xml', None) )
mit
7,804,016,274,088,547,000
48.033333
73
0.583956
false
3.136461
false
true
false
mark-me/Pi-Jukebox
venv/Lib/site-packages/pygame/freetype.py
1
1814
"""Enhanced Pygame module for loading and rendering computer fonts""" import sys from pygame._freetype import ( Font, STYLE_NORMAL, STYLE_OBLIQUE, STYLE_STRONG, STYLE_UNDERLINE, STYLE_WIDE, STYLE_DEFAULT, init, quit, get_init, was_init, get_cache_size, get_default_font, get_default_resolution, get_error, get_version, set_default_resolution, _PYGAME_C_API, __PYGAMEinit__, ) from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont from pygame import compat def SysFont(name, size, bold=0, italic=0, constructor=None): """pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font create a pygame Font from system font resources This will search the system fonts for the given font name. You can also enable bold or italic styles, and the appropriate system font will be selected if available. This will always return a valid Font object, and will fallback on the builtin pygame font if the given font is not found. Name can also be a comma separated list of names, in which case set of names will be searched in order. Pygame uses a small set of common font aliases, if the specific font you ask for is not available, a reasonable alternative may be used. if optional contructor is provided, it must be a function with signature constructor(fontpath, size, bold, italic) which returns a Font instance. If None, a pygame.freetype.Font object is created. """ if constructor is None: def constructor(fontpath, size, bold, italic): font = Font(fontpath, size) font.strong = bold font.oblique = italic return font return _SysFont(name, size, bold, italic, constructor)
agpl-3.0
-821,348,237,561,825,800
39.311111
92
0.690187
false
3.909483
false
false
false
dnussbaum/classy-python-client-library
classyclient/ClassyClientResponse.py
1
2895
import json from Exceptions import * class ClassyClientResponse: """Returned after an API request by ClassyClient. Allows for paging of results.""" def __init__(self, response, session): self.total = None self.per_page = None self.last_page = None self.current_page = None self.__response = response self.__session = session self.__refresh() def __refresh(self): """Refresh the ClassyClientResponse object after paging.""" json = self.__response.json() if "total" in json: self.collection = True self.total = json["total"] self.per_page = json["per_page"] self.current_page = json["current_page"] self.last_page = json["last_page"] self.data = json["data"] self.__next_page_url = json["next_page_url"] self.__prev_page_url = json["prev_page_url"] self.cursor = 0 else: self.collection = False self.data = json def __str__(self): return json.dumps(self.data) def __len__(self): """Return the length of the result""" if self.collection: return self.total raise ClassyNotACollection("Not a collection") def __iter__(self): return self def next(self): """Get the next item. Handles paging when needed.""" if not self.collection: raise ClassyNotACollection("Not a collection") if self.cursor < len(self.data): item = self.data[self.cursor] self.cursor += 1 return item if self.next_page(): self.cursor = 0 item = self.data[self.cursor] self.cursor += 1 return item else: raise StopIteration def next_page(self): """ Get the next page of results. Returns True on success, False is there isn't a next page. """ if not self.collection: raise ClassyNotACollection("Not a collection") if self.current_page is self.last_page or not self.__next_page_url: return False self.__response.request.url = self.__next_page_url self.__response = self.__session.send(self.__response.request) self.__refresh() return True def previous_page(self): """ Get the previous page of results. Returns True on success, False is there isn't a previous page. """ if not self.collection: raise ClassyNotACollection("Not a collection") if self.current_page is 1 or not self.__previous_page_url: return False self.__response.request.url = self.__prev_page_url self.__response = self.__session.send(self.__response.request) self.__refresh() return True
mit
911,507,018,896,841,900
29.797872
86
0.561313
false
4.346847
false
false
false
tu-darmstadt-ros-pkg/hector_flexbe_behavior
hector_flexbe_states/src/hector_flexbe_states/set_mapping_state.py
1
1316
#!/usr/bin/env python import rospy from flexbe_core import EventState, Logger from flexbe_core.proxy import ProxyPublisher from flexbe_core.proxy import ProxyServiceCaller from ethzasl_icp_mapper.srv import SetMode, GetMode, SetModeRequest from smach import CBState class SetMappingState(EventState): ''' Activate or deactivate mapping. -- active bool Mapping changed to active or inactive. <= succeeded Mapping changed. ''' def __init__(self, active): # Declare outcomes, input_keys, and output_keys by calling the super constructor with the corresponding arguments. super(SetMappingState, self).__init__(outcomes = ['succeeded']) self._mappingTopicSet = '/mapper/set_mode' self._srvSet = ProxyServiceCaller({self._mappingTopicSet: SetMode}) #self.set_mapper_mode = rospy.ServiceProxy('/mapper/set_mode', SetMode) self._switch = active def execute(self, userdata): return 'succeeded' def on_enter(self, userdata): #map_state = userdata.switch #resp = self.set_mapper_mode(True, map_state, True) request = SetModeRequest(True,self._switch, True) #request.map = self._switch resp = self._srvSet.call(self._mappingTopicSet, request) def on_exit(self, userdata): pass def on_start(self): pass def on_stop(self): pass
bsd-3-clause
3,987,962,716,252,878,300
20.225806
116
0.712766
false
3.281796
false
false
false
dlukes/pyvert
pyvert/_pyvert.py
1
12731
from itertools import chain from lazy import lazy from tempfile import NamedTemporaryFile as NamedTempFile import regex as re import random from lxml import etree from html import unescape __all__ = ["Structure", "iterstruct", "config"] __version__ = "0.0.0" # disable security preventing DoS attacks with huge files etree.set_default_parser(etree.ETCompatXMLParser(huge_tree=True)) STRUCTS = None class Structure(): """A structure extracted from a vertical. """ def __init__(self, raw_vert, structs): self.raw = raw_vert.strip() + "\n" self.structs = structs first_line = self.raw.split("\n", maxsplit=1)[0] self.name = re.search(r"\w+", first_line).group() self.attr = dict(re.findall(r'(\w+)="(.*?)"', first_line)) @lazy def xml(self): """The structure represented as an ElementTree. """ xml = self._xmlize() try: xml = etree.fromstring(xml) xml.tail = "\n" return xml except etree.XMLSyntaxError as e: with NamedTempFile(mode="w", suffix=".xml", delete=False) as fh: fh.write(xml) e = str(e) e += "\nAn XMLSyntaxError occurred while processing a document. " \ "It has been dumped to {} for inspection.".format(fh.name) raise Exception(e) def chunk(self, child, name, minmax, fallback_orig_id=None): """Split the structure into chunks of a given size. :param name: The name to give to the XML element representing the chunks. :type name: str :param child: The child element of which the chunks will be composed and whose boundaries will be respected. :type child: str :param minmax: The length range the individual chunks should roughly fall into. :type minmax: (int, int) :param fallback_orig_id: If structure has no @id attribute, this will be used instead to generate the @ids of the chunks. :rtype: etree.Element """ def chunk_pos(idx, total): if total <= 2: return "beginning" if idx == 0 else "end" else: # the threshold is 1 non-inclusive for a total of 3, 1 for 4, 2 # for 5 etc. if idx < round(total / 3): return "beginning" # the threshold is 2 non-inclusive for a total of 3, 2 for 4, 4 # for 5 etc. elif idx < round(2 * total / 3): return "middle" else: return "end" root = etree.Element(self.xml.tag, attrib=self.xml.attrib) root.text = root.tail = "\n" def loop_vars(name, attrib, minmax): chunk = etree.Element(name, attrib=attrib) chunk.text = chunk.tail = "\n" chunk_length = random.randint(*minmax) return chunk, chunk_length, 0 chunk, chunk_length, positions = loop_vars(name, root.attrib, minmax) for child in self.xml.iter(child): # we'll be modifying this, so no choice but to make a new one, or # else self.xml would end up modified new_child = etree.SubElement(chunk, child.tag, attrib=child.attrib) # the child might not directly contain text -- it might be a # paragraph, not a sentence; or, as in SYN2015, sentences might # contain lower-level structures like <hi> -- so first, get ALL the # text dominated by the child node text = child.xpath("string()") text = text.strip("\n") # collapse newlines text = re.sub(r"\n{2,}", "\n", text) # only NOW compute the number of positions positions += len(text.splitlines()) # set the child's text (remember, if it was a paragraph to start # with, it only had newlines in its text) new_child.text = "\n" + text + "\n" new_child.tail = child.tail if positions >= chunk_length: root.append(chunk) chunk, chunk_length, positions = loop_vars(name, root.attrib, minmax) # after the for-loop ends, check if there's a final non-empty chunk # that didn't accumulate the required number of positions else: if positions > 0: root.append(chunk) # annotate chunks with metadata chunk_count = len(root) for i, chunk in enumerate(root): orig_id = chunk.get("id", fallback_orig_id) if orig_id is None: raise RuntimeWarning( "Original structure has no @id attribute, the @id attributes " "of groups under it might therefore not be unique. Specify a " "``fallback_root_id`` to bypass the issue.") chunk.set(root.tag + "_id", orig_id) chunk.set("id", "{}_{}".format(orig_id, i)) chunk.set("position_in_text", chunk_pos(i, chunk_count)) return root def group(self, target, attr, as_struct, fallback_root_id=False): """Group target structures under the root according to attribute values. :param target: The structures to group. :param attr: Iterable of attributes by whose values to group them by. :param as_struct: The tag name to use for the groups. :param fallback_root_id: If structure has no @id attribute, this will be used instead to generate the @ids of the groups. If None, the @ids will consist solely of the grouping attributes and it is the user's responsibility to ensure they're unique. :rtype: etree.Element """ root = etree.Element(self.xml.tag, attrib=self.xml.attrib) root.text = root.tail = "\n" root_id = root.get("id", fallback_root_id) if root_id is False: raise RuntimeWarning( "Parent structure has no @id attribute, the @id attributes of " "groups under it might therefore not be unique. Specify a " "``fallback_root_id`` to bypass the issue, or set it to None " "if uniqueness is ensured otherwise.") def new_group(attrib, id): g = etree.SubElement(root, as_struct, attrib=attrib) g.set("id", id) g.text = g.tail = "\n" return g groups = {} for target in self.xml.iter(target): t_val = tuple(target.get(a, None) for a in attr) id = ",".join(map(str, t_val)) if fallback_root_id is not None: id = root_id + "/" + id if t_val not in groups: # new Python 3.5 syntax; in case of dupes, the last occurrence # of a key takes precedence: # attrib = {**root.attrib, **target.attrib} attrib = dict(root.attrib.items()) attrib.update(target.attrib) group = groups.setdefault(t_val, new_group(attrib, id)) else: group = groups.get(t_val) group.append(target) return root def project(self, child): """Project the root structure's metadata onto its children. The structure is *modified in place*. Projected attributes are prefixed with the parent structure's name, and if necessary, postfixed with underscores so as to avoid collisions with any existing attributes in the child structure. :param child: The child structure onto which to project. """ attrib = self.xml.attrib for child in self.xml.iter(child): for key in attrib: ckey = self.name + "_" + key while ckey in child.attrib: ckey += "_" if key not in child.attrib: child.attrib[ckey] = attrib[key] def _xmlize(self): """Transform vertical into marginally valid XML. """ # get rid of all XML entities and HTML entity references vert = unescape(self.raw) # escape only the bare minimum necessary for successful parsing as XML vert = re.sub(r"&", r"&amp;", vert) vert = re.sub(r"<", "&lt;", vert) vert = re.sub(r">", "&gt;", vert) # now put pointy brackets back where they belong (= only on lines which # we are reasonably sure are structure start / end tags) match = r"^&lt;(/?({})[^\t]*?)&gt;$".format("|".join(self.structs)) vert = re.sub(match, r"<\1>", vert, flags=re.M) return vert class ValidTags: """Keep track of valid tag names in a vertical file. """ def __init__(self, start=None, end=None, void=None, structs=None): self.start = start if start else re.compile(r"<(\w+).*?(?<!/)>") self.end = end if end else re.compile(r"</(\w+)>") self.void = void if void else re.compile(r"<(\w+).*?/>") self.stags = set() self.etags = set() self.vtags = set() def add(self, line): s = self.start.fullmatch(line) e = self.end.fullmatch(line) v = self.void.fullmatch(line) if s: self.stags.add(s.group(1)) elif e: self.etags.add(e.group(1)) elif v: self.vtags.add(v.group(1)) return bool(s or e or v) def resolve(self): return self.stags.intersection(self.etags).union(self.vtags) class DummyValidTags: """Anamorphous to ValidTags but returns structs passed to constructor on self.resolve(). """ def __init__(self, structs): self.structs = structs def add(self, line): pass def resolve(self): return self.structs def iterstruct(vert_file, struct=None, structs=None): """Yield input vertical one struct at a time. :param vert_file: Input vertical. :param struct: The name of the struct into which the vertical will be chopped. If None, the whole vertical is returned, wrapped in a ``<root/>`` element. :param structs: A set of tag names to be considered as valid nested structures under ``struct``. When in doubt, leave ``None`` (automatic discovery), otherwise those you missed might be XML-escaped. :rtype: Structure """ # override structs with global STRUCTS if they aren't set (STRUCTS in turn # might not be set, in which case this is a no-op) if structs is None: structs = STRUCTS # if the whole input vertical is to be wrapped and structs were provided, # we can take a shortcut if struct is None and structs: structs.add("root") yield Structure("<root>\n" + vert_file.read().strip() + "\n</root>", structs) raise StopIteration # else, we'll just surround the vertical with <root/> tags and go the # regular way (line by line) elif struct is None: struct = "root" vert_file = chain(["<root>"], vert_file, ["</root>"]) # NOTE: string concatenation inside a for-loop is supposedly slow in # python, but building and then joining lists was comparably slow (mostly # even slower); unless there's a point where it starts to make a big # difference (in terms of the number of concatenations / length of the list # required), there's no real incentive to change this code buffer = "" structs = DummyValidTags(structs) if structs else ValidTags() start = re.compile(r"<{}.*?>".format(struct)) end = re.compile(r"</{}>".format(struct)) for line in vert_file: line = line.strip() # if the buffer already contains something or if the current line # starts with the given structure name, then we're inside a target # structure that we want to collect; otherwise, just skip to the next # line if buffer or start.fullmatch(line): structs.add(line) buffer += line + "\n" if end.fullmatch(line): yield Structure(buffer, structs.resolve()) # NOTE: it might be a good idea to reset structs to a new # ValidTags object at this point, if we truly want to allow for # the possibility that different structures in the same # vertical might allow different nested substructures; in # practice though, indexing tools like manatee have just one # set of valid tag names per vertical buffer = "" def config(**kwargs): for k, v in kwargs.items(): globals()[k.upper()] = v
gpl-3.0
-5,762,219,757,734,732,000
38.660436
85
0.578352
false
4.109425
false
false
false
ttm/music
sandbox/thirdchant.py
1
1450
import sys keys=tuple(sys.modules.keys()) for key in keys: if "music" in key: del sys.modules[key] import music as M from percolation.rdf import c H = M.utils.H durs = 1, 1, 1, 1 ,1,1, 1, 1 notes = 0, 2, 4, 5, 7,9, 11, 12 text = 'sin-ging my ass out for you yes' sonic_vector=M.singing.sing(text = text, notes=notes,reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) sonic_vector_=M.singing.sing(text = text, notes=notes[::-1],reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) M.utils.write(H(sonic_vector,sonic_vector_) ,filename="some.wav",samplerate=44100) durs = 1, 1, 1, 1 ,1,1, 1, 1 notes = 0, 2, 3, 5, 7,9, 11, 12 notes_ = 12, 10, 8, 7, 5, 3, 2, 0 text = 'sin-ging my ass out for you yes' sonic_vector=M.singing.sing(text = text, notes=notes,reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) sonic_vector_=M.singing.sing(text = text, notes=notes_,reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) M.utils.write(H(sonic_vector,sonic_vector_) ,filename="some_.wav",samplerate=44100) sonic_vector=M.singing.sing(text = text, notes=notes,reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) sonic_vector_=M.singing.sing(text = text, notes=notes_,reference=60, durs=durs,Q=120,lang='en',M="4/4",transpose=0) M.utils.write(H(sonic_vector,sonic_vector_) ,filename="some__.wav",samplerate=44100)
gpl-3.0
-7,002,696,181,112,037,000
38.189189
73
0.644828
false
2.420701
false
false
false
sbc/django-uploadify-s3-example
src/apps/examples/urls.py
1
3353
# Copyright (c) 2010, Sam Charrington (@samcharrington), http://geekfactor.charrington.com # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author nor the names of other contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from django.conf.urls.defaults import * from models import Example, ExampleForm urlpatterns = patterns('', (r'^$', 'django.views.generic.simple.redirect_to', { 'url': '/examples' }), # # The upload_example view renders the Uploadify file upload form. # url(r'^examples/(?P<object_id>[\d]*)/upload$', 'examples.views.upload_example', name='examples_example_upload'), # # The upload_example_done view is a callback that receives POST data # from uploadify when the download is complete. # See also /media/js/uploadify_event_handlers.js. # url(r'^examples/(?P<object_id>[\d]*)/upload/done$', 'examples.views.upload_example_done', name='examples_example_upload_done'), # The rest are boring generic views url(r'^examples/$', 'django.views.generic.list_detail.object_list', { 'queryset': Example.objects.all() }, name='examples_example_list'), url(r'^examples/new$', 'django.views.generic.create_update.create_object', { 'model': Example }, name='examples_example_new'), url(r'^examples/(?P<object_id>[\d]*)$', 'django.views.generic.list_detail.object_detail', { 'queryset': Example.objects.all() }, name='examples_example_detail'), url(r'^examples/(?P<object_id>[\d]*)/edit$', 'django.views.generic.create_update.update_object', { 'form_class': ExampleForm }, name='examples_example_edit'), url(r'examples/(?P<object_id>[\d]*)/delete$', 'django.views.generic.create_update.delete_object', { 'model': Example, 'post_delete_redirect': '/examples' }, name='examples_example_delete'), )
bsd-3-clause
-9,205,390,484,445,760,000
43.131579
90
0.682374
false
4.124231
false
false
false
bcharlas/mytrunk
py/timing.py
3
6985
# encoding: utf-8 # 2008 © Václav Šmilauer <[email protected]> """Functions for accessing timing information stored in engines and functors. See :ref:`timing` section of the programmer's manual, `wiki page <http://yade-dem.org/index.php/Speed_profiling_using_TimingInfo_and_TimingDeltas_classes>`_ for some examples. """ from yade.wrapper import * def _resetEngine(e): if e.timingDeltas: e.timingDeltas.reset() if isinstance(e,Functor): return if isinstance(e,Dispatcher): for f in e.functors: _resetEngine(f) elif isinstance(e,ParallelEngine): for s in e.slaves: _resetEngine(s) e.execTime,e.execCount=0,0 def reset(): "Zero all timing data." for e in O.engines: _resetEngine(e) _statCols={'label':40,'count':20,'time':20,'relTime':20} _maxLev=3 def _formatLine(label,time,count,totalTime,level): sp,negSp=' '*level*2,' '*(_maxLev-level)*2 raw=[] raw.append(label) raw.append(str(count) if count>=0 else '') raw.append((str(time/1000)+u'us') if time>=0 else '') raw.append(('%6.2f%%'%(time*100./totalTime)) if totalTime>0 else '') return u' '.join([ (sp+raw[0]).ljust(_statCols['label']), (raw[1]+negSp).rjust(_statCols['count']), (raw[2]+negSp).rjust(_statCols['time']), (raw[3]+negSp).rjust(_statCols['relTime']), ]) def _delta_stats(deltas,totalTime,level): ret=0 deltaTime=sum([d[1] for d in deltas.data]) for d in deltas.data: print _formatLine(d[0],d[1],d[2],totalTime,level); ret+=1 if len(deltas.data)>1: print _formatLine('TOTAL',deltaTime,sum(d[2] for d in deltas.data),totalTime,level); ret+=1 return ret def _engines_stats(engines,totalTime,level): lines=0; hereLines=0 for e in engines: if not isinstance(e,Functor): print _formatLine(u'"'+e.label+'"' if e.label else e.__class__.__name__,e.execTime,e.execCount,totalTime,level); lines+=1; hereLines+=1 if e.timingDeltas: if isinstance(e,Functor): print _formatLine(e.__class__.__name__,sum(d[1] for d in e.timingDeltas.data),sum(d[2] for d in e.timingDeltas.data),totalTime,level); lines+=1; hereLines+=1 execTime=sum([d[1] for d in e.timingDeltas.data]) else: execTime=e.execTime lines+=_delta_stats(e.timingDeltas,execTime,level+1) if isinstance(e,Dispatcher): lines+=_engines_stats(e.functors,e.execTime,level+1) if isinstance(e,InteractionLoop): lines+=_engines_stats(e.geomDispatcher.functors,e.execTime,level+1) lines+=_engines_stats(e.physDispatcher.functors,e.execTime,level+1) lines+=_engines_stats(e.lawDispatcher.functors,e.execTime,level+1) elif isinstance(e,ParallelEngine): lines+=_engines_stats(e.slave,e.execTime,level+1) if hereLines>1 and not isinstance(e,Functor): print _formatLine('TOTAL',totalTime,-1,totalTime,level); lines+=1 return lines def stats(): """Print summary table of timing information from engines and functors. Absolute times as well as percentages are given. Sample output: .. code-block:: none Name Count Time Rel. time ------------------------------------------------------------------------------------------------------- ForceResetter 102 2150us 0.02% "collider" 5 64200us 0.60% InteractionLoop 102 10571887us 98.49% "combEngine" 102 8362us 0.08% "newton" 102 73166us 0.68% "cpmStateUpdater" 1 9605us 0.09% PyRunner 1 136us 0.00% "plotDataCollector" 1 291us 0.00% TOTAL 10733564us 100.00% sample output (compiled with -DENABLE_PROFILING=1 option): .. code-block:: none Name Count Time Rel. time ------------------------------------------------------------------------------------------------------- ForceResetter 102 2150us 0.02% "collider" 5 64200us 0.60% InteractionLoop 102 10571887us 98.49% Ig2_Sphere_Sphere_ScGeom 1222186 1723168us 16.30% Ig2_Sphere_Sphere_ScGeom 1222186 1723168us 100.00% Ig2_Facet_Sphere_ScGeom 753 1157us 0.01% Ig2_Facet_Sphere_ScGeom 753 1157us 100.00% Ip2_CpmMat_CpmMat_CpmPhys 11712 26015us 0.25% end of Ip2_CpmPhys 11712 26015us 100.00% Ip2_FrictMat_CpmMat_FrictPhys 0 0us 0.00% Law2_ScGeom_CpmPhys_Cpm 3583872 4819289us 45.59% GO A 1194624 1423738us 29.54% GO B 1194624 1801250us 37.38% rest 1194624 1594300us 33.08% TOTAL 3583872 4819289us 100.00% Law2_ScGeom_FrictPhys_CundallStrack 0 0us 0.00% "combEngine" 102 8362us 0.08% "newton" 102 73166us 0.68% "cpmStateUpdater" 1 9605us 0.09% PyRunner 1 136us 0.00% "plotDataCollector" 1 291us 0.00% TOTAL 10733564us 100.00% """ print 'Name'.ljust(_statCols['label'])+' '+'Count'.rjust(_statCols['count'])+' '+'Time'.rjust(_statCols['time'])+' '+'Rel. time'.rjust(_statCols['relTime']) print '-'*(sum([_statCols[k] for k in _statCols])+len(_statCols)-1) _engines_stats(O.engines,sum([e.execTime for e in O.engines]),0) print
gpl-2.0
-721,391,456,232,729,100
55.764228
175
0.456603
false
3.759828
false
false
false
razzius/PyClassLessons
instructors/course-2015/functions_gens_and_ducks/examples/in_class/color_choice_refactor.py
3
1418
"""Choose 3 colors at random from the six color options - "red","blue","green","purple","yellow","orange" """ import random myColors=(["red","blue","green","purple","yellow","orange"]) a=random.choice(myColors) b=random.choice(myColors) c=random.choice(myColors) d=random.choice(myColors) line=[a,b,c,d] D=dict() for color in line: if color not in D: D[color]=1 else: D[color] +=1 guess=[0]*4 C=dict() def guess_colors(): w=input("Enter a color") x=input("Enter a second color") y=input("Enter a third color") z=input("Enter a fourth color") guess[0]=w guess[1]=x guess[2]=y guess[3]=z for color in guess: if color not in C: C[color]=1 else: C[color] +=1 return(w,x,y,z) def cf_colors(A): results=0 for i,j in zip(guess,line): q=D.get(i) r=C.get(i) if i==j: results+=1 print("Black", None) elif i not in line: print("Wrong", None) else: if r<=q: print("White", None) else: print("Wrong", None) return (results) B=0 n=10 for i in range (n): print("You have "+str(n-i) +" guesses left") if B<4: A=guess_colors() B=cf_colors(A) C.clear() elif B==4: print("You win - you don't need them") break
mit
1,235,833,155,699,136,000
18.985915
60
0.51622
false
3.179372
false
false
false
rilutham/HAC-DM
src/MainWindow.py
1
26850
#!/usr/bin/python # -*- coding: utf-8 -*- """MainWindow.py @author: rilutham """ from PyQt4 import QtGui from RawData import RawData from Preprocessing import Bining, DeriveAttribute from Segmentation import Segmentation from About import About import matplotlib.pyplot as plt from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas from matplotlib.backends.backend_qt4agg import NavigationToolbar2QTAgg as NavigationToolbar class MainWindow(QtGui.QMainWindow): ''' classdocs ''' def __init__(self): ''' Constructor ''' super(MainWindow, self).__init__() self.init_ui() def init_ui(self): # Main window setting self.setGeometry(0, 0, 980, 768) self.setWindowTitle("Sistem Segmentasi Pelanggan") self.center_on_screen() # Menu action import_action = QtGui.QAction(QtGui.QIcon('icons/new.png'), 'Impor data', self) import_action.setShortcut('Ctrl+i') import_action.setStatusTip('Impor data pelanggan') import_action.triggered.connect(self.show_import) exit_action = QtGui.QAction('Keluar', self) exit_action.setShortcut('Ctrl+q') exit_action.setStatusTip('Keluar dari aplikasi') exit_action.triggered.connect(self.close) fill_zero_act = QtGui.QAction('Isi dengan nilai 0', self) fill_zero_act.triggered.connect(self.fill_with_zero) delete_row_act = QtGui.QAction('Hapus baris', self) delete_row_act.triggered.connect(self.delete_missing_row) miss_value_action = QtGui.QMenu('Tangani nilai kosong', self) miss_value_action.addAction(fill_zero_act) miss_value_action.addAction(delete_row_act) bining_act = QtGui.QAction('Bining atribut', self) bining_act.triggered.connect(self.show_bining) derive_act = QtGui.QAction('Penurunan atribut', self) derive_act.triggered.connect(self.derive_attribute) self.seg_action = QtGui.QAction(QtGui.QIcon('icons/run.png'),'Proses', self) self.seg_action.setShortcut('F5') self.seg_action.setStatusTip('Jalankan proses segmentasi') self.seg_action.setEnabled(False) self.seg_action.triggered.connect(self.segmen_customer) self.save_result_action = QtGui.QAction(QtGui.QIcon('icons/save.png'),\ 'Simpan data hasil', self) self.save_result_action.setShortcut('Ctrl+Shift+S') self.save_result_action.setStatusTip('Simpan data hasil segmentasi') self.save_result_action.setEnabled(False) self.save_result_action.triggered.connect(self.save_result_data) about_action = QtGui.QAction('Tentang Aplikasi', self) about_action.triggered.connect(self.show_about) # Menubar setting menubar = self.menuBar() data_menu = menubar.addMenu('&Data') data_menu.addAction(import_action) data_menu.addAction(exit_action) preprocess_menu = menubar.addMenu('&Preprocessing') preprocess_menu.addMenu(miss_value_action) preprocess_menu.addAction(bining_act) preprocess_menu.addAction(derive_act) segmen_menu = menubar.addMenu('&Segmentasi') segmen_menu.addAction(self.seg_action) result_menu = menubar.addMenu('&Hasil Segmentasi') result_menu.addAction(self.save_result_action) help_menu = menubar.addMenu('&Bantuan') help_menu.addAction(about_action) # Toolbar setting toolbar = self.addToolBar('Exit') toolbar.addAction(import_action) toolbar.addAction(self.seg_action) toolbar.addAction(self.save_result_action) # Show statusbar self.statusBar() # Setting TabWidget self.tabs = QtGui.QTabWidget() self.tab1 = QtGui.QWidget() self.tab2 = QtGui.QWidget() self.tab3 = QtGui.QWidget() self.tab4 = QtGui.QWidget() ### Setting Tab 1 ### self.tabs.addTab(self.tab1, "Pengolahan Data") ## Left side # Frame "data detail" self.stat_frame = QtGui.QFrame() self.stat_frame.setMaximumHeight(200) self.txt_data_detail = QtGui.QLabel("Rincian Data:") self.txt_data_detail.setStyleSheet("font: bold; ") self.txt_stats = QtGui.QLabel('', self) self.txt_note = QtGui.QLabel("Keterangan Tabel: ", self) self.txt_note.setStyleSheet("font: bold") txt_label_1 = QtGui.QLabel(" Label/ Meta atribut", self) txt_label_1.setStyleSheet("background-color:#19B5FE; color:black") # VLayout for frame "data detail" self.stat_layout = QtGui.QVBoxLayout() self.stat_frame.setLayout(self.stat_layout) self.stat_layout.insertWidget(0, self.txt_data_detail) self.stat_layout.insertWidget(1, self.txt_stats) self.stat_layout.insertWidget(2, self.txt_note) self.stat_layout.insertWidget(3, txt_label_1) # Empty frame self.empty_frame_1 = QtGui.QFrame() # Left side frame in Tab 1 self.left_frame = QtGui.QFrame() self.left_frame.setMaximumWidth(275) self.left_frame.setMinimumWidth(275) self.left_frame.setStyleSheet("background-color:#dadfe1; color:black") self.left_side_layout = QtGui.QVBoxLayout() self.left_frame.setLayout(self.left_side_layout) self.left_side_layout.addWidget(self.stat_frame) self.left_side_layout.addWidget(self.empty_frame_1) ## Tab 1 layout self.txt_table_exist = QtGui.QLabel("""Tidak ada data yang ditampilkan.\n Pilih menu Data > Impor data (Ctrl+i) untuk mengimpor data""", self) self.txt_table_exist.setStyleSheet("color: gray; font: italic;") self.raw_data_table = QtGui.QTableWidget(self) self.v_box_layout_1 = QtGui.QHBoxLayout() self.v_box_layout_1.addWidget(self.txt_table_exist) self.v_box_layout_1.addWidget(self.left_frame) self.v_box_layout_1.addWidget(self.raw_data_table) self.left_frame.hide() self.raw_data_table.hide() ### Setting Tab 2 ### self.tabs.addTab(self.tab2, "Visualisasi Model") self.txt_visual_exist = QtGui.QLabel("""Tidak ada visualisasi model yang ditampilkan.\n Impor data pelanggan, kemudian lakukan proses segmentasi dengan menekan F5 atau pilih menu Segmentasi > Proses""", self) self.txt_visual_exist.setStyleSheet("color: gray; font: italic;") self.txt_set_distance = QtGui.QLabel('Masukkan jarak potong dendrogram: ', self) self.treshold_edit = QtGui.QLineEdit() self.btn_treshold = QtGui.QPushButton("Submit", self) self.btn_treshold.clicked.connect(self.set_treshold) self.figure = plt.figure(facecolor='#dadfe1') self.canvas_for_dendrogram = FigureCanvas(self.figure) self.toolbar = NavigationToolbar(self.canvas_for_dendrogram, self) self.treshold_frame = QtGui.QFrame() self.treshold_layout = QtGui.QHBoxLayout() self.treshold_frame.setLayout(self.treshold_layout) self.treshold_layout.addWidget(self.txt_set_distance) self.treshold_layout.addWidget(self.treshold_edit) self.treshold_layout.addWidget(self.btn_treshold) #self.treshold_layout.addWidget(self.txt_n_cluster) self.v_box_layout_2 = QtGui.QVBoxLayout() self.v_box_layout_2.addWidget(self.txt_visual_exist) self.v_box_layout_2.addWidget(self.treshold_frame) self.v_box_layout_2.addWidget(self.canvas_for_dendrogram) self.v_box_layout_2.addWidget(self.toolbar) self.treshold_frame.hide() self.canvas_for_dendrogram.hide() self.toolbar.hide() ### Setting Tab 3 ### self.tabs.addTab(self.tab3, "Informasi Hasil Segmentasi") self.txt_information_exist = QtGui.QLabel("""Tidak ada informasi yang ditampilkan.\n Impor data pelanggan, kemudian lakukan proses segmentasi dengan menekan F5 atau pilih menu Segmentasi > Proses.""", self) self.txt_information_exist.setStyleSheet("color: gray; font: italic;") self.txt_summary = QtGui.QLabel("Hasil Segmentasi Pelanggan: ", self) self.txt_summary.setStyleSheet("font: bold;") self.txt_n_cluster = QtGui.QLabel('',self) self.cluster_list = QtGui.QListWidget() self.cluster_list.setMaximumHeight(225) txt_note_3 = QtGui.QLabel("Keterangan Tabel: ", self) txt_note_3.setStyleSheet("font: bold") txt_label_3 = QtGui.QLabel(" ID_Segmen", self) txt_label_3.setStyleSheet("background-color:#3FC380; color:black") self.summary_frame = QtGui.QFrame() self.summary_frame.setMaximumHeight(300) self.summary_layout = QtGui.QVBoxLayout() self.summary_frame.setLayout(self.summary_layout) self.summary_layout.insertWidget(0, self.txt_summary) self.summary_layout.insertWidget(1, self.txt_n_cluster) self.summary_layout.insertWidget(2, self.cluster_list) self.summary_layout.insertWidget(3, txt_note_3) self.summary_layout.insertWidget(4, txt_label_3) # Empty frame self.empty_frame_3 = QtGui.QFrame() # Left side frame in Tab 3 self.left_frame_3 = QtGui.QFrame() self.left_frame_3.setMaximumWidth(275) self.left_frame_3.setMinimumWidth(275) self.left_frame_3.setStyleSheet("background-color:#dadfe1; color:black") self.left_side_3_layout = QtGui.QVBoxLayout() self.left_frame_3.setLayout(self.left_side_3_layout) self.left_side_3_layout.addWidget(self.summary_frame) self.left_side_3_layout.addWidget(self.empty_frame_3) # Right side self.txt_summary_stat = QtGui.QLabel("Statistik: ", self) self.txt_summary_stat.setStyleSheet("font: bold;") self.knowledge_table = QtGui.QTableWidget(self) self.txt_summary_exp = QtGui.QLabel("Keterangan: ", self) self.txt_summary_exp.setStyleSheet("font: bold;") self.knowledge_text = QtGui.QTextBrowser(self) self.right_frame_3 = QtGui.QFrame() self.right_side_3_layout = QtGui.QVBoxLayout() self.right_frame_3.setLayout(self.right_side_3_layout) self.right_side_3_layout.addWidget(self.txt_summary_stat) self.right_side_3_layout.addWidget(self.knowledge_table) self.right_side_3_layout.addWidget(self.txt_summary_exp) self.right_side_3_layout.addWidget(self.knowledge_text) self.v_box_layout_3 = QtGui.QHBoxLayout() self.v_box_layout_3.addWidget(self.txt_information_exist) self.v_box_layout_3.addWidget(self.left_frame_3) self.v_box_layout_3.addWidget(self.right_frame_3) self.left_frame_3.hide() self.knowledge_table.hide() self.knowledge_text.hide() self.txt_summary_exp.hide() self.txt_summary_stat.hide() ### Setting Tab 4 ### self.tabs.addTab(self.tab4, "Data Hasil Segmentasi") self.txt_result_exist = QtGui.QLabel("""Tidak ada data hasil yang ditampilkan.\n Impor data pelanggan, kemudian lakukan proses segmentasi dengan menekan F5 atau pilih menu Segmentasi > Proses.""", self) self.txt_result_exist.setStyleSheet("color: gray; font: italic;") self.result_data_table = QtGui.QTableWidget(self) self.v_box_layout_4 = QtGui.QHBoxLayout() self.v_box_layout_4.insertWidget(0, self.txt_result_exist) self.v_box_layout_4.insertWidget(1, self.result_data_table) self.result_data_table.hide() #Set Layout for each tab self.tab1.setLayout(self.v_box_layout_1) self.tab2.setLayout(self.v_box_layout_2) self.tab3.setLayout(self.v_box_layout_3) self.tab4.setLayout(self.v_box_layout_4) # main layout self.main_layout = QtGui.QVBoxLayout() # add all main to the main vLayout self.main_layout.addWidget(self.tabs) # central widget self.central_widget = QtGui.QWidget() self.central_widget.setLayout(self.main_layout) # set central widget self.setCentralWidget(self.central_widget) def show_import(self): self.imp = RawData() self.imp.exec_() if self.imp.display_table == True: if len(self.imp.selected_col) == 0: msgBox = QtGui.QMessageBox(self) msgBox.setText("Tidak ada atribut yang dipilih!") msgBox.setInformativeText("Silahkan pilih minimal 2 atribut") msgBox.setIcon(2) msgBox.exec_() elif len(self.imp.selected_col) == 1: msgBox = QtGui.QMessageBox(self) msgBox.setText("Hanya satu atribut yang dipilih!") msgBox.setInformativeText("Silahkan pilih minimal 2 atribut") msgBox.setIcon(2) msgBox.exec_() else: self.display_raw_data(self.imp.df_selected_data) # Count data statistics self.imp.count_stats(self.imp.df_selected_data) self.txt_stats.setText(self.imp.stats) # Set to Tab 1 self.tabs.setCurrentWidget(self.tab1) self.left_frame.show() self.raw_data_table.show() self.txt_visual_exist.show() self.txt_information_exist.show() self.txt_result_exist.show() # Close widget self.txt_table_exist.hide() self.treshold_frame.hide() self.canvas_for_dendrogram.hide() self.toolbar.hide() self.figure.clf() self.left_frame_3.hide() self.result_data_table.hide() self.knowledge_table.hide() self.knowledge_text.hide() self.txt_summary_exp.hide() self.txt_summary_stat.hide() # Enable/ disable some menu self.seg_action.setEnabled(True) self.save_result_action.setEnabled(False) def display_raw_data(self, data): self.ready_data = data # Specify the number of rows and columns of table self.raw_data_table.setRowCount(len(self.ready_data.index)) self.raw_data_table.setColumnCount(len(self.ready_data.columns)) # Set cell value of table for i in range(len(self.ready_data.index)): for j in range(len(self.ready_data.columns)): self.raw_data_table.setItem(i, j, QtGui.QTableWidgetItem(str(self.ready_data.iget_value(i, j)))) # Color first column for i in range(len(self.ready_data.index)): for j in range(len(self.ready_data.columns)): self.raw_data_table.item(i,0).setBackground(QtGui.QColor(25,181,254)) # Create the columns header self.raw_data_table.setHorizontalHeaderLabels(list(self.ready_data.columns.values)) return self.ready_data def fill_with_zero(self): self.tabs.setCurrentWidget(self.tab1) if self.imp.missing_row_num == 0: msgBox = QtGui.QMessageBox(self) msgBox.setText("Tidak terdapat data kosong!") msgBox.setIcon(2) msgBox.exec_() elif self.imp.missing_row_num > 0: self.df_clean_data = self.imp.df_selected_data.fillna(0) # Count data statistics self.imp.count_stats(self.df_clean_data) self.txt_stats.setText(self.imp.stats) self.display_raw_data(self.df_clean_data) def delete_missing_row(self): self.tabs.setCurrentWidget(self.tab1) msgBox = QtGui.QMessageBox(self) msgBox.setInformativeText("Apakah Anda yakin untuk menghapus data kosong?") msgBox.setIcon(4) msgBox.setStandardButtons(QtGui.QMessageBox.Ok | QtGui.QMessageBox.Cancel) ret = msgBox.exec_() if ret == QtGui.QMessageBox.Ok: #Save was clicked if self.imp.missing_row_num == 0: msgBox = QtGui.QMessageBox(self) msgBox.setText("Tidak terdapat data kosong!") msgBox.setIcon(2) msgBox.exec_() elif self.imp.missing_row_num > 0: self.df_clean_data = self.imp.df_selected_data.dropna(axis=0) # Count data statistics self.imp.count_stats(self.df_clean_data) self.txt_stats.setText(self.imp.stats) self.display_raw_data(self.df_clean_data) def show_bining(self): self.tabs.setCurrentWidget(self.tab1) self.bin = Bining() self.bin.add_attribute_to_list(self.ready_data) self.bin.exec_() if self.bin.display_table: if not self.bin.selected_col: msgBox = QtGui.QMessageBox(self) msgBox.setText("Tidak ada atribut yang dipilih!") msgBox.setInformativeText("Silahkan pilih atribut") msgBox.setIcon(2) msgBox.exec_() else: self.display_raw_data(self.bin.data) def derive_attribute(self): self.tabs.setCurrentWidget(self.tab1) self.derv = DeriveAttribute() self.derv.add_attribute_to_list(self.ready_data) self.derv.exec_() if self.derv.display_table: if not self.derv.selected_col: print("Tidak ada atribut dipilih!") #Seharusnya tampilkan dalam dialog else: self.display_raw_data(self.derv.data) def segmen_customer(self): if self.imp.missing_num > 0: msgBox = QtGui.QMessageBox(self) msgBox.setText("Masih terdapat data kosong!") msgBox.setIcon(3) msgBox.exec_() elif self.imp.missing_num == 0: self.sgm = Segmentation(self.ready_data) self.sgm # Set Tab 2 and Tab 3 self.left_frame_3.show() self.txt_n_cluster.setText(self.sgm.n_cluster) self.txt_visual_exist.hide() self.treshold_frame.show() plt.ylabel("Jarak antar pelanggan") plt.xlabel("Pelanggan") self.canvas_for_dendrogram.show() self.toolbar.show() self.txt_result_exist.hide() self.result_data_table.show() self.txt_information_exist.hide() self.knowledge_table.show() self.knowledge_text.show() self.txt_summary_exp.show() self.txt_summary_stat.show() self.show_result_summary() # Draw dendrogram on canvas self.canvas_for_dendrogram.draw() # Set to Tab 2 self.tabs.setCurrentWidget(self.tab2) # Display result data in QTableWidget self.display_result_data(self.sgm.df_result_data) # Enable some menu item self.save_result_action.setEnabled(True) self.show_knowledge(self.sgm.df_result_data) def show_result_summary(self): self.cluster_list.clear() # Segment result summary for x in self.sgm.summary_list: self.item = QtGui.QListWidgetItem(x) self.cluster_list.addItem(self.item) def set_treshold(self): self.figure.clf() self.treshold_text = str(self.treshold_edit.text()) self.treshold_value = float(self.treshold_text) self.sgm.refresh_result_data(self.treshold_value) self.txt_n_cluster.setText(self.sgm.n_cluster) self.show_result_summary() # Draw dendrogram on canvas plt.ylabel("Jarak antar pelanggan") plt.xlabel("Pelanggan") self.canvas_for_dendrogram.draw() # Set to Tab 2 self.tabs.setCurrentWidget(self.tab2) # Display result data in QTableWidget self.display_result_data(self.sgm.df_result_data) self.show_knowledge(self.sgm.df_result_data) def display_result_data(self, data): # Specify the number of rows and columns of table self.result_data_table.setRowCount(len(data.index)) self.result_data_table.setColumnCount(len(data.columns)) # Set cell value of table for i in range(len(data.index)): for j in range(len(data.columns)): self.result_data_table.setItem\ (i, j, QtGui.QTableWidgetItem(str(data.iget_value(i, j)))) if (self.sgm.cluster_index[i] % 10 == 1): # Green self.result_data_table.item(i,j).setBackground(QtGui.QColor(177, 245, 67)) elif (self.sgm.cluster_index[i] % 10 == 2): # Blue self.result_data_table.item(i,j).setBackground(QtGui.QColor(0, 214, 221)) elif (self.sgm.cluster_index[i] % 10 == 3): # Pink self.result_data_table.item(i,j).setBackground(QtGui.QColor(255, 53, 139)) elif (self.sgm.cluster_index[i] % 10 == 4): # Yellow self.result_data_table.item(i,j).setBackground(QtGui.QColor(242, 255, 46)) elif (self.sgm.cluster_index[i] % 10 == 5): # Fresh Orange self.result_data_table.item(i,j).setBackground(QtGui.QColor(255, 94, 0)) elif (self.sgm.cluster_index[i] % 10 == 6): # Gray self.result_data_table.item(i,j).setBackground(QtGui.QColor(151, 104, 209)) elif (self.sgm.cluster_index[i] % 10 == 7): # Cream self.result_data_table.item(i,j).setBackground(QtGui.QColor(252, 125, 73)) elif (self.sgm.cluster_index[i] % 10 == 8): # Dark gray self.result_data_table.item(i,j).setBackground(QtGui.QColor(126, 138, 162)) elif (self.sgm.cluster_index[i] % 10 == 9): # Dark Green self.result_data_table.item(i,j).setBackground(QtGui.QColor(2, 166, 118)) elif (self.sgm.cluster_index[i] % 10 == 0): # Blue self.result_data_table.item(i,j).setBackground(QtGui.QColor(102, 12, 232)) # Create the columns header self.result_data_table.setHorizontalHeaderLabels(list(data.columns.values)) def show_knowledge(self, data): # Show knowledge result grouped_data = data.groupby('ID_Segmen') summed_data = grouped_data.sum() # Add new column (cluster_ID) to knowledge data #summed_data['ID_Segmen'] = #self.cluster_index # Specify the number of rows and columns of table self.knowledge_table.setRowCount(grouped_data.ngroups) self.knowledge_table.setColumnCount(len(grouped_data.sum().columns) + 1) # Set cell value of table for i in range(grouped_data.ngroups): self.knowledge_table.setItem(i,0,QtGui.QTableWidgetItem(str(i+1))) for j in range(len(grouped_data.sum().columns)): self.knowledge_table.setItem\ (i, j+1, QtGui.QTableWidgetItem(str(summed_data.get_values()[i][j]))) # Create the columns header knowledge_column_name = list(grouped_data.sum().columns.values) knowledge_column_name.insert(0,"Segmen ke-") self.knowledge_table.setHorizontalHeaderLabels(knowledge_column_name) # Color first column for i in range(grouped_data.ngroups): for j in range(len(grouped_data.sum().columns) + 1): self.knowledge_table.item(i,0).setBackground(QtGui.QColor(63,195,128)) self.knowledge_text.clear() for i in range(grouped_data.ngroups): size_of_each_group = list(grouped_data.size())[i] item1 = int(summed_data['jumlah_item_1'].get_values()[i]) item2 = int(summed_data['jumlah_item_2'].get_values()[i]) item3 = int(summed_data['jumlah_item_lbs3'].get_values()[i]) transaksi1 = int(summed_data['jumlah_transaksi_1'].get_values()[i]) transaksi2 = int(summed_data['jumlah_transaksi_lbs2'].get_values()[i]) custom_name = int(summed_data['custom_name'].get_values()[i]) segmen_title = "Segmen ke-{0}".format(i+1) self.knowledge_text.append(segmen_title) ket1 = "> Pelanggan pada segmen ke-{0} membeli: ".format(i+1) self.knowledge_text.append(ket1) if item1 > 0: item1_note = " 1 jersey: {0} pelanggan ({1:.2f}%)".format(item1, float(item1) / size_of_each_group * 100) self.knowledge_text.append(item1_note) if item2 > 0: item2_note = " 2 jersey: {0} pelanggan ({1:.2f}%)".format(item2, float(item2) / size_of_each_group * 100) self.knowledge_text.append(item2_note) if item3 > 0: item3_note = " lebih dari 2 jersey: {0} pelanggan ({1:.2f}%)".format(item3, float(item3) / size_of_each_group * 100) self.knowledge_text.append(item3_note) self.knowledge_text.append("dalam:") if transaksi1 > 0: transaksi1_note = " 1x transaksi: {0} pelanggan ({1:.2f}%)".format(transaksi1, float(transaksi1) / size_of_each_group * 100) self.knowledge_text.append(transaksi1_note) if transaksi2 > 0: transaksi2_note = " lebih dari 1x transaksi: {0} pelanggan ({1:.2f}%)".format(transaksi2, float(transaksi2) / size_of_each_group * 100) self.knowledge_text.append(transaksi2_note) if custom_name > 0: custom_name_note = "> Pelanggan yang membeli custom name sebanyak {0} pelanggan ({1:.2f}%)".format(custom_name, float(custom_name) / size_of_each_group * 100) self.knowledge_text.append(custom_name_note) new_line = "\n" self.knowledge_text.append(new_line) def save_result_data(self): # Provides a dialog that allow users to give file name and location on disk. self.file_name_save = QtGui.QFileDialog.getSaveFileName\ (self, "Save File", ".", "(Comma Separated Value *.csv)") # Write DataFrame into *.csv file. self.sgm.df_result_data.to_csv(self.file_name_save, sep=',', index=False) def show_about(self): abt = About() abt.exec_() def center_on_screen(self): # Centers the window on the screen. size = self.frameSize() desktop_size = QtGui.QDesktopWidget().screenGeometry() left = (desktop_size.width()/2)-(size.width()/2) top = (desktop_size.height()/2)-(size.height()/2) self.move(left, top)
mit
8,529,156,216,676,085,000
44.508475
174
0.6127
false
3.474379
false
false
false
levilucio/SyVOLT
ExFamToPerson/transformation/HCityCompany2Association.py
1
3243
from core.himesis import Himesis import uuid class HCityCompany2Association(Himesis): def __init__(self): """ Creates the himesis graph representing the DSLTrans rule CityCompany2Association. """ # Flag this instance as compiled now self.is_compiled = True super(HCityCompany2Association, self).__init__(name='HCityCompany2Association', num_nodes=0, edges=[]) # Set the graph attributes self["mm__"] = ['HimesisMM'] self["name"] = """CityCompany2Association""" self["GUID__"] = uuid.uuid3(uuid.NAMESPACE_DNS,'CityCompany2Association') # match model. We only support one match model self.add_node() self.vs[0]["mm__"] = """MatchModel""" # apply model node self.add_node() self.vs[1]["mm__"] = """ApplyModel""" # paired with relation between match and apply models self.add_node() self.vs[2]["mm__"] = """paired_with""" # match class City() node self.add_node() self.vs[3]["mm__"] = """City""" self.vs[3]["attr1"] = """+""" # match_contains node for class City() self.add_node() self.vs[4]["mm__"] = """match_contains""" # match class Company() node self.add_node() self.vs[5]["mm__"] = """Company""" self.vs[5]["attr1"] = """+""" # match_contains node for class Company() self.add_node() self.vs[6]["mm__"] = """match_contains""" # apply class Association() node self.add_node() self.vs[7]["mm__"] = """Association""" self.vs[7]["attr1"] = """1""" # apply_contains node for class Association() self.add_node() self.vs[8]["mm__"] = """apply_contains""" # match association City--companies-->Company node self.add_node() self.vs[9]["attr1"] = """companies""" self.vs[9]["mm__"] = """directLink_S""" # match association Company--isIn-->City node self.add_node() self.vs[10]["attr1"] = """isIn""" self.vs[10]["mm__"] = """directLink_S""" # Add the edges self.add_edges([ (0,4), # matchmodel -> match_contains (4,3), # match_contains -> match_class City() (0,6), # matchmodel -> match_contains (6,5), # match_contains -> match_class Company() (1,8), # applymodel -> apply_contains (8,7), # apply_contains -> apply_class Association() (3,9), # match_class City() -> association companies (9,5), # association companies -> match_class Company() (5,10), # match_class Company() -> association isIn (10,3), # association isIn -> match_class City() (0,2), # matchmodel -> pairedwith (2,1) # pairedwith -> applyModel ]) # Add the attribute equations self["equations"] = [((7,'name'),('concat',((3,'name'),(5,'name')))), ]
mit
-7,335,360,096,594,106,000
31.43
110
0.488437
false
3.998767
false
false
false
emacsmirror/stgit
stgit/commands/common.py
1
16342
"""Function/variables common to all the commands""" import email.utils import os import re import sys from stgit import templates from stgit.compat import decode_utf8_with_latin1 from stgit.config import config from stgit.exception import StgException from stgit.lib.git import CommitData, MergeException, RepositoryException from stgit.lib.stack import StackRepository from stgit.lib.transaction import ( StackTransaction, TransactionException, TransactionHalted, ) from stgit.out import out from stgit.run import Run, RunException from stgit.utils import ( EditorException, add_trailer, edit_string, get_hook, run_hook_on_bytes, strip_prefix, ) __copyright__ = """ Copyright (C) 2005, Catalin Marinas <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see http://www.gnu.org/licenses/. """ class CmdException(StgException): pass def parse_rev(rev): """Parse a revision specification into its branch:patch parts.""" try: branch, patch = rev.split(':', 1) except ValueError: branch = None patch = rev return (branch, patch) def git_commit(name, repository, branch_name=None): """Return the a Commit object if 'name' is a patch name or Git commit. The patch names allowed are in the form '<branch>:<patch>' and can be followed by standard symbols used by ``git rev-parse``. If <patch> is '{base}', it represents the bottom of the stack. """ # Try a [branch:]patch name first branch, patch = parse_rev(name) if not branch: branch = branch_name or repository.current_branch_name # The stack base if patch.startswith('{base}'): base_id = repository.get_stack(branch).base.sha1 return repository.rev_parse(base_id + strip_prefix('{base}', patch)) # Other combination of branch and patch try: return repository.rev_parse( 'patches/%s/%s' % (branch, patch), discard_stderr=True ) except RepositoryException: pass # Try a Git commit try: return repository.rev_parse(name, discard_stderr=True) except RepositoryException: raise CmdException('%s: Unknown patch or revision name' % name) def color_diff_flags(): """Return the Git flags for coloured diff output if allowed.""" stdout_is_tty = (sys.stdout.isatty() and 'true') or 'false' if config.get_colorbool('color.diff', stdout_is_tty) == 'true': return ['--color'] else: return [] def check_local_changes(repository): out.start('Checking for changes in the working directory') iw = repository.default_iw iw.refresh_index() tree = repository.refs.get(repository.head_ref).data.tree local_changes = iw.changed_files(tree) out.done() if local_changes: raise CmdException('local changes in the tree. Use "refresh" or "reset --hard"') def check_head_top_equal(stack): if stack.head != stack.top: raise CmdException( 'HEAD and top are not the same. This can happen if you modify a ' 'branch with git. "stg repair --help" explains more about what to ' 'do next.' ) def check_conflicts(iw): if iw.index.conflicts(): raise CmdException( 'Unsolved conflicts. Please fix the conflicts then use "git add ' '--update <files>" or revert the changes with "reset --hard".' ) def print_current_patch(stack): if stack.patchorder.applied: out.info('Now at patch "%s"' % stack.patchorder.applied[-1]) else: out.info('No patches applied') def parse_patches(patch_args, patch_list, boundary=0, ordered=False): """Parse patch_args list for patch names in patch_list. The names can be individual patches and/or in the 'patch1..patch2' format. """ # in case it receives a tuple patch_list = list(patch_list) patches = [] for name in patch_args: pair = name.split('..') for p in pair: if p and p not in patch_list: raise CmdException('Unknown patch name: %s' % p) if len(pair) == 1: # single patch name pl = pair elif len(pair) == 2: # patch range [p1]..[p2] # inclusive boundary if pair[0]: first = patch_list.index(pair[0]) else: first = -1 # exclusive boundary if pair[1]: last = patch_list.index(pair[1]) + 1 else: last = -1 # only cross the boundary if explicitly asked if not boundary: boundary = len(patch_list) if first < 0: if last <= boundary: first = 0 else: first = boundary if last < 0: if first < boundary: last = boundary else: last = len(patch_list) if last > first: pl = patch_list[first:last] else: pl = patch_list[(last - 1) : (first + 1)] pl.reverse() else: raise CmdException('Malformed patch name: %s' % name) for p in pl: if p in patches: raise CmdException('Duplicate patch name: %s' % p) patches += pl if ordered: patches = [p for p in patch_list if p in patches] return patches def name_email(address): p = email.utils.parseaddr(address) if p[1]: return p else: raise CmdException( 'Incorrect "name <email>"/"email (name)" string: %s' % address ) def address_or_alias(addr_pair): """Pass-through name/email address or lookup alias from config. Returns a name-email tuple if the e-mail address is valid, otherwise looks up the alias in the config files. """ addr = addr_pair[1] if '@' in addr: # it's an e-mail address return addr_pair alias = config.get('mail.alias.' + addr) if alias: # it's an alias return name_email(alias) raise CmdException('unknown e-mail alias: %s' % addr) def apply_patch(stack, diff, base=None, reject=False, strip=None, context_lines=None): iw = stack.repository.default_iw iw.refresh_index() if base: orig_head = stack.head iw.checkout(orig_head.data.tree, base.data.tree) stack.set_head(base, msg='apply patch') try: iw.apply( diff, quiet=False, reject=reject, strip=strip, context_lines=context_lines ) except MergeException: if base: iw.checkout_hard(orig_head.data.tree) raise if base: iw.update_index(iw.changed_files(base.data.tree)) top = stack.repository.commit( CommitData( tree=stack.repository.default_index.write_tree(), message='temporary commit used for applying a patch', parents=[base], ) ) iw.checkout(top.data.tree, orig_head.data.tree) stack.set_head(orig_head, msg='post apply') iw.merge(base.data.tree, orig_head.data.tree, top.data.tree) def prepare_rebase(stack, cmd_name): # pop all patches iw = stack.repository.default_iw trans = StackTransaction(stack, '%s (pop)' % cmd_name, check_clean_iw=iw) out.start('Popping all applied patches') try: trans.reorder_patches( applied=[], unapplied=trans.applied + trans.unapplied, iw=iw, allow_interactive=True, ) except TransactionException: pass retval = trans.run(iw, print_current_patch=False) if retval: out.done('Failed to pop applied patches') else: out.done() return retval def rebase(stack, iw, target_commit=None): command = config.get('branch.%s.stgit.rebasecmd' % stack.name) or config.get( 'stgit.rebasecmd' ) if not command and not target_commit: raise CmdException('Default rebasing requires a commit') elif target_commit: out.start('Rebasing to "%s"' % target_commit.sha1) else: out.start('Rebasing to the default target') if command: command = command.split() if target_commit is not None: command.append(target_commit.sha1) iw.run(command).run() else: iw.checkout_hard(target_commit) stack.set_head(target_commit, 'rebase') out.done() def post_rebase(stack, applied, cmd_name, check_merged): iw = stack.repository.default_iw trans = StackTransaction(stack, '%s (reapply)' % cmd_name) try: if check_merged: merged = set(trans.check_merged(applied)) else: merged = set() for pn in applied: trans.push_patch( pn, iw, allow_interactive=True, already_merged=pn in merged ) except TransactionHalted: pass return trans.run(iw) def delete_patches(stack, iw, patches): def allow_conflicts(trans): # Allow conflicts if the topmost patch stays the same. if stack.patchorder.applied: return trans.applied and trans.applied[-1] == stack.patchorder.applied[-1] else: return not trans.applied trans = StackTransaction(stack, 'delete', allow_conflicts=allow_conflicts) try: to_push = trans.delete_patches(lambda pn: pn in patches) for pn in to_push: trans.push_patch(pn, iw) except TransactionHalted: pass return trans.run(iw) # # Patch description/e-mail/diff parsing # def __split_descr_diff(string): """Return the description and the diff from the given string.""" m = re.search( br''' ^ (?: --- \s* | --- \s \w | diff \s - | Index: \s ) ''', string, re.MULTILINE | re.VERBOSE, ) if m: desc = string[: m.start()] diff = string.split(m.group(), 1)[1] else: desc = string diff = b'' return desc, diff def __parse_description(descr): """Parse the patch description for author information.""" subject = '' authname = authemail = authdate = None descr_lines = [line.rstrip() for line in descr.splitlines()] lasthdr = 0 descr_strip = 0 # Parse the patch header for pos, line in enumerate(descr_lines): if not line: continue # check for a "From|Author:" line if re.match(r'\s*(?:from|author):\s+', line, re.I): auth = re.findall(r'^.*?:\s+(.*)$', line)[0] authname, authemail = name_email(auth) lasthdr = pos + 1 continue # check for a "Date:" line if re.match(r'\s*date:\s+', line, re.I): authdate = re.findall(r'^.*?:\s+(.*)$', line)[0] lasthdr = pos + 1 continue if subject: break # get the subject subject = line[descr_strip:] if re.match(r'commit [\da-f]{40}$', subject): # 'git show' output, look for the real subject subject = '' descr_strip = 4 lasthdr = pos + 1 body = ''.join(line[descr_strip:] + '\n' for line in descr_lines[lasthdr:]) message = subject + '\n' + body return (message, authname, authemail, authdate) def parse_patch(patch_data, contains_diff): """Parse patch data. Returns (description, authname, authemail, authdate, diff) """ assert isinstance(patch_data, bytes) if contains_diff: (descr, diff) = __split_descr_diff(patch_data) else: descr = patch_data diff = None (descr, authname, authemail, authdate) = __parse_description( decode_utf8_with_latin1(descr) ) # we don't yet have an agreed place for the creation date. # Just return None return (descr, authname, authemail, authdate, diff) def run_commit_msg_hook(repo, cd, editor_is_used=True): """Run the commit-msg hook (if any) on a commit. :param cd: The :class:`stgit.lib.git.CommitData` to run the hook on. :returns: the new :class:`stgit.lib.git.CommitData` """ env = dict(cd.env) if not editor_is_used: env['GIT_EDITOR'] = ':' commit_msg_hook = get_hook(repo, 'commit-msg', env) if commit_msg_hook: try: new_msg = run_hook_on_bytes(commit_msg_hook, cd.message) except RunException as exc: raise EditorException(str(exc)) return cd.set_message(new_msg) else: return cd COMMIT_MESSAGE_INSTRUCTIONS = """ # Please enter the commit message for your changes. Lines starting # with '#' will be ignored. # # """ COMMIT_MESSAGE_DEMARCATION_LINE = """ # ------------------------ >8 ------------------------""" COMMIT_MESSAGE_INSTRUCTIONS_2 = """ # Do not modify or remove the line above. # Everything below it will be ignored. """ def _git_status(): out = Run('git', 'status').output_lines() # strip out git's "instruction" lines, such as '(use "git add <file>..." to update' return [line for line in out if '(use' not in line] def _git_diff(): return Run('git', 'diff').output_lines() def update_commit_data( cd, message=None, author=None, sign_str=None, edit=False, verbose=False ): """Create updated CommitData according to the command line options.""" # Set the commit message from commandline. if message is not None: cd = cd.set_message(message) # Modify author data. if author is not None: cd = cd.set_author(author) # Add Signed-off-by: or similar. if sign_str is None: sign_str = config.get("stgit.autosign") if sign_str: cd = cd.set_message( add_trailer(cd.message_str, sign_str, cd.committer.name, cd.committer.email) ) if edit: message_str = cd.message_str tmpl = templates.get_template('patchdescr.tmpl') if tmpl: message_str += tmpl status = '\n# '.join(_git_status()) message_str += COMMIT_MESSAGE_INSTRUCTIONS + status if verbose: # include a diff message_str += ( COMMIT_MESSAGE_DEMARCATION_LINE + COMMIT_MESSAGE_INSTRUCTIONS_2 ) message_str += '\n'.join(_git_diff()) new_message = edit_string(message_str, '.stgit-new.txt') new_message = new_message.split(COMMIT_MESSAGE_DEMARCATION_LINE)[0] new_message = '\n'.join( line for line in new_message.splitlines() if not line.startswith('#') ) cd = cd.set_message(new_message) return cd class DirectoryException(StgException): pass class DirectoryAnywhere: def setup(self): pass class DirectoryHasRepository: def setup(self): # This will throw an exception if we don't have a repository. self.repository = StackRepository.default() def cd_to_topdir(self): worktree_top = ( Run('git', 'rev-parse', '--show-cdup') .discard_stderr() .raw_output() .rstrip() ) if worktree_top: os.chdir(worktree_top) class DirectoryInWorktree(DirectoryHasRepository): def setup(self): DirectoryHasRepository.setup(self) if not self._is_inside_worktree(): raise DirectoryException('Not inside a git worktree') def _is_inside_worktree(self): return ( Run('git', 'rev-parse', '--is-inside-work-tree').output_one_line() == 'true' ) class DirectoryGotoTopLevel(DirectoryInWorktree): def setup(self): DirectoryInWorktree.setup(self) self.cd_to_topdir()
gpl-2.0
6,991,844,058,826,038,000
27.821869
88
0.594052
false
3.814659
true
false
false
bhavin04890/finaldashboard
modules/eden/vulnerability.py
3
17256
# -*- coding: utf-8 -*- """ Sahana Eden Vulnerability Model @copyright: 2012 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ["S3VulnerabilityModel", ] from gluon import * from gluon.storage import Storage from ..s3 import * # ============================================================================= class S3VulnerabilityModel(S3Model): """ Vulnerability Management """ names = ["vulnerability_indicator", "vulnerability_aggregated_indicator", "vulnerability_data", "vulnerability_resilience_id", "vulnerability_ids", "vulnerability_resilience", ] resilience_pid = None # id of the resilience indicator indicator_pids = None # List of ids used to calculate the resilence indicator def model(self): T = current.T db = current.db configure = self.configure crud_strings = current.response.s3.crud_strings define_table = self.define_table super_link = self.super_link # --------------------------------------------------------------------- # Vulnerability Indicator # tablename = "vulnerability_indicator" table = define_table(tablename, super_link("parameter_id", "stats_parameter"), Field("posn", "integer"), Field("name", label = T("Name")), s3_comments("description", label = T("Description")), *s3_meta_fields() ) # CRUD Strings ADD_VULNERABILITY = T("Add Vulnerability Indicator") crud_strings[tablename] = Storage( title_create = ADD_VULNERABILITY, title_display = T("Vulnerability Indicator Details"), title_list = T("Vulnerability Indicators"), title_update = T("Edit Vulnerability Indicator"), title_search = T("Search Vulnerability Indicators"), title_upload = T("Import Vulnerability Indicator"), subtitle_create = T("Add New Vulnerability Indicator"), label_list_button = T("List Vulnerability Indicators"), label_create_button = ADD_VULNERABILITY, msg_record_created = T("Vulnerability Indicator added"), msg_record_modified = T("Vulnerability Indicator updated"), msg_record_deleted = T("Vulnerability Indicator deleted"), msg_list_empty = T("No vulnerability indicators currently defined")) configure(tablename, super_entity = "stats_parameter", deduplicate = self.vulnerability_indicator_duplicate, ) # --------------------------------------------------------------------- # Vulnerability Aggregated Indicator # tablename = "vulnerability_aggregated_indicator" table = define_table(tablename, super_link("parameter_id", "stats_parameter"), Field("name", label = T("Name")), s3_comments("description", label = T("Description")), *s3_meta_fields() ) # CRUD Strings ADD_VULNERABILITY = T("Add Vulnerability Aggregated Indicator") crud_strings[tablename] = Storage( title_create = ADD_VULNERABILITY, title_display = T("Vulnerability Aggregated Indicator Details"), title_list = T("Vulnerability Aggregated Indicators"), title_update = T("Edit Vulnerability Aggregated Indicator"), title_search = T("Search Vulnerability Aggregated Indicators"), title_upload = T("Import Vulnerability Aggregated Indicator"), subtitle_create = T("Add New Vulnerability Aggregated Indicator"), label_list_button = T("List Vulnerability Aggregated Indicators"), label_create_button = ADD_VULNERABILITY, msg_record_created = T("Vulnerability Aggregated Indicator added"), msg_record_modified = T("Vulnerability Aggregated Indicator updated"), msg_record_deleted = T("Vulnerability Aggregated Indicator deleted"), msg_list_empty = T("No vulnerability aggregated indicators currently defined")) configure(tablename, super_entity = "stats_parameter", deduplicate = self.vulnerability_indicator_duplicate, ) # --------------------------------------------------------------------- # Vulnerability Data # tablename = "vulnerability_data" table = define_table(tablename, super_link("data_id", "stats_data"), self.stats_param_id( label = T("Indicator"), requires = IS_ONE_OF(db, "stats_parameter.parameter_id", self.stats_parameter_represent, filterby="instance_type", filter_opts=["vulnerability_indicator"], orderby="stats_parameter.name", sort=True) ), self.gis_location_id( widget = S3LocationAutocompleteWidget(), requires = IS_LOCATION() ), Field("value", "double", label = T("Value")), s3_date(), # Unused but needed for the stats_data SE Field("date_end", "date", readable=False, writable=False ), self.stats_group_id(), *s3_meta_fields() ) # CRUD Strings ADD_DATA = T("Add Vulnerability Data") crud_strings[tablename] = Storage( title_create = ADD_DATA, title_display = T("Vulnerability Data Details"), title_list = T("Vulnerability Data"), title_update = T("Edit Vulnerability Data"), title_search = T("Search Vulnerability Data"), title_upload = T("Import Vulnerability Data"), subtitle_create = T("Add New Vulnerability Data"), label_list_button = T("List Vulnerability Data"), label_create_button = ADD_DATA, msg_record_created = T("Vulnerability Data added"), msg_record_modified = T("Vulnerability Data updated"), msg_record_deleted = T("Vulnerability Data deleted"), msg_list_empty = T("No vulnerability data currently defined")) configure(tablename, super_entity = "stats_data", deduplicate = self.vulnerability_data_duplicate, requires_approval=True, ) # --------------------------------------------------------------------- # Pass model-global names to response.s3 # return Storage( vulnerability_resilience_id = self.vulnerability_resilience_id, vulnerability_ids = self.vulnerability_ids, vulnerability_resilience = self.vulnerability_resilience, ) # ------------------------------------------------------------------------- def defaults(self): """ Safe defaults if the module is disabled """ return Storage( vulnerability_resilience_id = lambda i: [], vulnerability_ids = lambda i: None, ) # ------------------------------------------------------------------------- @staticmethod def vulnerability_resilience_id(): """ Return the parameter_id of the resilience indicator """ if S3VulnerabilityModel.resilience_pid is None: # Get the parameter_id of the aggregated_indicator table = current.s3db.vulnerability_aggregated_indicator query = (table.uuid == "Resilience") & \ (table.deleted == False) row = current.db(query).select(table.parameter_id, limitby=(0, 1)).first() try: S3VulnerabilityModel.resilience_pid = row.parameter_id except: # DB not initialised pass return S3VulnerabilityModel.resilience_pid # ------------------------------------------------------------------------- @staticmethod def vulnerability_ids(): """ Return a list of the parameter_id's that are to be used when calculating the resilience indicator """ if S3VulnerabilityModel.indicator_pids is None: table = current.s3db.vulnerability_indicator query = (table.deleted == False) rows = current.db(query).select(table.parameter_id) S3VulnerabilityModel.indicator_pids = [i.parameter_id for i in rows] return S3VulnerabilityModel.indicator_pids # ------------------------------------------------------------------------- @staticmethod def vulnerability_resilience(loc_level, location_id, resilience_pid, indicator_pids, date_period_start, date_period_end, use_location, ): """ Calculates the resilience held in the vulnerability_data table for a specific location and time period. This is run async Where appropriate add test cases to modules/unit_tests/eden/stats.py """ db = current.db s3db = current.s3db vtable = s3db.vulnerability_data stable = s3db.stats_aggregate # Get the data from the vulnerability_data table query = (vtable.deleted != True) & \ (vtable.approved_by != None) & \ (vtable.parameter_id.belongs(indicator_pids)) ward_count = 1 if use_location: query &= (vtable.location_id == location_id) else: # Get all the child locations child_locations = current.gis.get_children(location_id, loc_level) child_ids = [row.id for row in child_locations] ward_count = len(child_ids) query &= (vtable.location_id.belongs(child_ids)) if date_period_end is None: pass elif date_period_end == "None": date_period_end = None else: query &= (vtable.date <= date_period_end) rows = db(query).select(vtable.parameter_id, vtable.location_id, vtable.value, vtable.date, orderby=(vtable.location_id, vtable.parameter_id, ~vtable.date ) ) # The query may return duplicate records for the same # location+parameter: use the most recent, which because # of the ordering will be the first values = [] append = values.append locations = [] new_location = locations.append last_record = (0, 0) for row in rows: value = row.value if not value: continue l = row.location_id key = (l, row.parameter_id) if last_record != key: last_record = key append(value) if l not in locations: new_location(l) # Aggregate the values values_len = len(values) if not values_len: return import numpy values_sum = sum(values) values_min = min(values) values_max = max(values) values_avg = float(values_sum) / values_len values_med = numpy.median(values) values_mad = numpy.median([abs(v - values_med) for v in values]) reported_count = len(locations) # Store Resilience value in the stats_aggregate table query = (stable.location_id == location_id) & \ (stable.date == date_period_start) & \ (stable.parameter_id == resilience_pid) record = db(query).select(stable.id, limitby=(0, 1)).first() if record: # Update db(query).update(date = date_period_start, end_date = date_period_end, reported_count = reported_count, ward_count = ward_count, min = values_min, max = values_max, mean = values_avg, median = values_med, mad = values_mad, ) else: # Insert new id = stable.insert(agg_type = 4, # indicator parameter_id = resilience_pid, location_id = location_id, date = date_period_start, end_date = date_period_end, reported_count = reported_count, ward_count = ward_count, min = values_min, max = values_max, mean = values_avg, median = values_med, mad = values_mad, ) return # ------------------------------------------------------------------------- @staticmethod def vulnerability_indicator_duplicate(item): """ Import item de-duplication """ if (item.tablename == "vulnerability_indicator") or \ (item.tablename == "vulnerability_aggregated_indicator"): table = item.table name = item.data.get("name", None) query = (table.name.lower() == name.lower()) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # ------------------------------------------------------------------------- @staticmethod def vulnerability_data_duplicate(item): """ Import item de-duplication """ if item.tablename == "vulnerability_data": data = item.data param = data.get("parameter_id", None) location = data.get("location_id", None) date = data.get("date", None) table = item.table query = (table.parameter_id == param) & \ (table.location_id == location) & \ (table.date == date) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE # END =========================================================================
mit
6,863,977,461,972,140,000
41.294118
93
0.480934
false
5.192898
false
false
false
Knio/osxactivity
window.py
1
3156
import os import time import subprocess import logging import json import sys import structpack from collections import defaultdict IDLE_TIME = 15 APP_TIME = 1 log = logging.getLogger() logging.basicConfig( level=logging.DEBUG, # format='%(message)s', datefmt='%Y-%m-%d %H:%M:%s', filename='logs/window.log', ) class Entry(structpack.msg): idle = structpack.bool app = structpack.str window = structpack.str start_time = structpack.int end_time = structpack.int def __init__(self, idle=False, app='', window='', start_time=0, end_time=0): self.idle = idle self.app = app self.window = window self.start_time = int(start_time) self.end_time = int(end_time) def __eq__(self, that): return \ (self.idle, self.app, self.window) == \ (that.idle, that.app, that.window) def duration(self): return self.end_time - self.start_time def get_idle_time(): import Quartz idle = Quartz.CoreGraphics.CGEventSourceSecondsSinceLastEventType(1, 0xFFFFFFFF) return idle def get_active_window(): line = subprocess.check_output('osascript window.scpt', stderr=subprocess.STDOUT, shell=True).strip() try: app, window = line.split(':', 1) except: import traceback traceback.print_exc() return 'ERROR', line return app, window def get_entry(): idle = get_idle_time() > IDLE_TIME if idle: app, window = ('IDLE', 'IDLE') else: app, window = get_active_window() return Entry(idle, app, window, time.time()) def get_entries(): entry = get_entry() while 1: time.sleep(APP_TIME) entry.end_time = time.time() next = get_entry() if entry.duration() > 300: yield entry entry = next if not next == entry: yield entry entry = next def log(): for entry in get_entries(): log.info('Entry:' + json.dumps(entry.pack())) def load_entries(inp): for line in inp: entry_json = line.split('Entry:', 1)[1] entry_data = json.loads(entry_json) entry = Entry.load(entry_data) yield entry def display_time(d, t): h = d / 60 / 60 m = d / 60 % 60 p = d * 100.0 / t return '%2dh %2dm (%2d%%)' % (h, m, p) def stats(entries): time = defaultdict(lambda:defaultdict(int)) app_time = defaultdict(int) total = 0 for e in entries: d = e.duration() total += d app_time[e.app] += d time[e.app][e.window] += d for app, d in sorted(app_time.items(), reverse=True): if d < 5 * 60: continue print '### %-50s ### %s' % (app, display_time(d, total)) for window, d in sorted(time[app].items(), reverse=True): if d < 60: continue print ' %-50s %s' % (window[:50], display_time(d, total)) print if __name__ == '__main__': if len(sys.argv) == 2: stats(load_entries(file(sys.argv[1]))) else: log()
mit
-6,299,528,443,896,209,000
19.36129
105
0.553549
false
3.510567
false
false
false
mosbasik/rvagaming-flask
app/views.py
1
2055
from flask import render_template, flash, redirect, session, url_for, request, g from flask.ext.login import login_user, logout_user, current_user, login_required from app import app, db, lm, oid from forms import LoginForm from models import Site_User @lm.user_loader def load_user(id): return User.query.get(int(id)) @app.route('/') @app.route('/index') def index(): user = {'nickname': 'marseilles'} Steam_Users = [{ 'nickname': 'redsurfable', 'steam_64': '76561198047777536', 'steam_32': '87511808', 'is_main': '1', }, { 'nickname': 'mos_basik', 'steam_64': '76561198062133508', 'steam_32': '101867780', 'is_main': '1', }, { 'nickname': 'Swambulance', 'steam_64': '76561198027956481', 'steam_32': '67690753', 'is_main': '1', }] return render_template('index.html', title='Home', user=user, Steam_Users=Steam_Users) @app.route('/login', methods=['GET', 'POST']) @oid.loginhandler def login(): if g.user is not None and g.user.is_authenticated(): return redirect(url_for('index')) form = LoginForm() if form.validate_on_submit(): session['remember_me'] = form.remember_me.data return oid.try_login(form.openid.data, ask_for=['nickname']) return render_template('login.html', title='Sign In', form=form, providers=app.config['OPENID_PROVIDERS']) @oid.after_login def after_login(resp): if resp.email is None or resp.email == "": flash('Invalid login. Please try again.') return redirect(url_for('login')) user = User.query.filter_by(email=resp.email).first() if user == None: nickname = resp.nickname
mit
5,976,283,463,642,106,000
33.25
90
0.516788
false
3.819703
false
false
false
salabim/salabim
sample models/Demo animate 2.py
1
1276
# Demo animate 2.py import salabim as sim class AnimateWaitSquare(sim.Animate): def __init__(self, i): self.i = i sim.Animate.__init__( self, rectangle0=(-10, -10, 10, 10), x0=300 - 30 * i, y0=100, fillcolor0="red", linewidth0=0 ) def visible(self, t): return q[self.i] is not None class AnimateWaitText(sim.Animate): def __init__(self, i): self.i = i sim.Animate.__init__(self, text="", x0=300 - 30 * i, y0=100, textcolor0="white") def text(self, t): component_i = q[self.i] if component_i is None: return "" else: return component_i.name() def do_animation(): env.animation_parameters() for i in range(10): AnimateWaitSquare(i) AnimateWaitText(i) show_length = sim.Animate(text="", x0=330, y0=100, textcolor0="black", anchor="w") show_length.text = lambda t: "Length= " + str(len(q)) class Person(sim.Component): def process(self): self.enter(q) yield self.hold(15) self.leave(q) env = sim.Environment(trace=True) q = sim.Queue("q") for i in range(15): Person(name="{:02d}".format(i), at=i) do_animation() env.run()
mit
-374,682,658,797,282,800
21.62963
104
0.547806
false
3.182045
false
false
false
rdmorganiser/rdmo
rdmo/questions/migrations/0007_refactoring.py
2
11515
# -*- coding: utf-8 -*- # Generated by Django 1.9 on 2017-01-26 16:01 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('questions', '0006_auto_20160803_1619'), ] operations = [ migrations.AddField( model_name='catalog', name='comment', field=models.TextField(blank=True, help_text='Additional information about this catalog.', null=True, verbose_name='Comment'), ), migrations.AddField( model_name='catalog', name='key', field=models.SlugField(blank=True, help_text='The internal identifier of this catalog. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'), ), migrations.AddField( model_name='catalog', name='uri', field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this catalog (auto-generated).', max_length=640, null=True, verbose_name='URI'), ), migrations.AddField( model_name='catalog', name='uri_prefix', field=models.URLField(blank=True, help_text='The prefix for the URI of this catalog.', max_length=256, null=True, verbose_name='URI Prefix'), ), migrations.AddField( model_name='questionentity', name='comment', field=models.TextField(blank=True, help_text='Additional information about this question/questionset.', null=True, verbose_name='Comment'), ), migrations.AddField( model_name='questionentity', name='key', field=models.SlugField(blank=True, help_text='The internal identifier of this question/questionset. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'), ), migrations.AddField( model_name='questionentity', name='uri', field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this question/questionset (auto-generated).', max_length=640, null=True, verbose_name='URI'), ), migrations.AddField( model_name='questionentity', name='uri_prefix', field=models.URLField(blank=True, help_text='The prefix for the URI of this question/questionset.', max_length=256, null=True, verbose_name='URI Prefix'), ), migrations.AddField( model_name='section', name='comment', field=models.TextField(blank=True, help_text='Additional information about this section.', null=True, verbose_name='Comment'), ), migrations.AddField( model_name='section', name='key', field=models.SlugField(blank=True, help_text='The internal identifier of this section. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'), ), migrations.AddField( model_name='section', name='uri', field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this section (auto-generated).', max_length=640, null=True, verbose_name='URI'), ), migrations.AddField( model_name='section', name='uri_prefix', field=models.URLField(blank=True, help_text='The prefix for the URI of this section.', max_length=256, null=True, verbose_name='URI Prefix'), ), migrations.AddField( model_name='subsection', name='comment', field=models.TextField(blank=True, help_text='Additional information about this subsection.', null=True, verbose_name='Comment'), ), migrations.AddField( model_name='subsection', name='key', field=models.SlugField(blank=True, help_text='The internal identifier of this subsection. The URI will be generated from this key.', max_length=128, null=True, verbose_name='Key'), ), migrations.AddField( model_name='subsection', name='uri', field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this subsection (auto-generated).', max_length=640, null=True, verbose_name='URI'), ), migrations.AddField( model_name='subsection', name='uri_prefix', field=models.URLField(blank=True, help_text='The prefix for the URI of this subsection.', max_length=256, null=True, verbose_name='URI Prefix'), ), migrations.AlterField( model_name='catalog', name='order', field=models.IntegerField(default=0, help_text='The position of this catalog in lists.', verbose_name='Order'), ), migrations.AlterField( model_name='catalog', name='title_de', field=models.CharField(help_text='The German title for this catalog.', max_length=256, verbose_name='Title (de)'), ), migrations.AlterField( model_name='catalog', name='title_en', field=models.CharField(help_text='The English title for this catalog.', max_length=256, verbose_name='Title (en)'), ), migrations.AlterField( model_name='question', name='parent', field=models.ForeignKey(blank=True, help_text='The question set this question belongs to.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='questions.QuestionEntity', verbose_name='Parent'), ), migrations.AlterField( model_name='question', name='text_de', field=models.TextField(help_text='The German text for this question.', verbose_name='Text (de)'), ), migrations.AlterField( model_name='question', name='text_en', field=models.TextField(help_text='The English text for this question.', verbose_name='Text (en)'), ), migrations.AlterField( model_name='question', name='widget_type', field=models.CharField(choices=[('text', 'Text'), ('textarea', 'Textarea'), ('yesno', 'Yes/No'), ('checkbox', 'Checkboxes'), ('radio', 'Radio buttons'), ('select', 'Select drop-down'), ('range', 'Range slider'), ('date', 'Date picker')], help_text='Type of widget for this question.', max_length=12, verbose_name='Widget type'), ), migrations.AlterField( model_name='questionentity', name='attribute_entity', field=models.ForeignKey(blank=True, help_text='The attribute/entity this question belongs to.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='domain.AttributeEntity', verbose_name='Attribute entity'), ), migrations.AlterField( model_name='questionentity', name='help_de', field=models.TextField(blank=True, help_text='The German help text for this question/questionset.', null=True, verbose_name='Help (de)'), ), migrations.AlterField( model_name='questionentity', name='help_en', field=models.TextField(blank=True, help_text='The English help text for this question/questionset.', null=True, verbose_name='Help (en)'), ), migrations.AlterField( model_name='questionentity', name='label_de', field=models.TextField(help_text='The German label for this question/questionset (auto-generated).', verbose_name='Label (de)'), ), migrations.AlterField( model_name='questionentity', name='label_en', field=models.TextField(help_text='The English label for this question/questionset (auto-generated).', verbose_name='Label (en)'), ), migrations.AlterField( model_name='questionentity', name='order', field=models.IntegerField(default=0, help_text='The position of this subsection in lists.', verbose_name='Order'), ), migrations.AlterField( model_name='questionentity', name='subsection', field=models.ForeignKey(help_text='The section this question belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='entities', to='questions.Subsection', verbose_name='Catalog'), ), migrations.AlterField( model_name='section', name='catalog', field=models.ForeignKey(help_text='The catalog this section belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='questions.Catalog', verbose_name='Catalog'), ), migrations.AlterField( model_name='section', name='label_de', field=models.TextField(help_text='The German label for this section (auto-generated).', verbose_name='Label (de)'), ), migrations.AlterField( model_name='section', name='label_en', field=models.TextField(help_text='The English label for this section (auto-generated).', verbose_name='Label (en)'), ), migrations.AlterField( model_name='section', name='order', field=models.IntegerField(default=0, help_text='The position of this section in lists.', verbose_name='Order'), ), migrations.AlterField( model_name='section', name='title_de', field=models.CharField(help_text='The German title for this section.', max_length=256, verbose_name='Title (de)'), ), migrations.AlterField( model_name='section', name='title_en', field=models.CharField(help_text='The English title for this section.', max_length=256, verbose_name='Title (en)'), ), migrations.AlterField( model_name='subsection', name='label_de', field=models.TextField(help_text='The German label for this subsection (auto-generated).', verbose_name='Label (de)'), ), migrations.AlterField( model_name='subsection', name='label_en', field=models.TextField(help_text='The English label for this subsection (auto-generated).', verbose_name='Label (en)'), ), migrations.AlterField( model_name='subsection', name='order', field=models.IntegerField(default=0, help_text='The position of this subsection in lists.', verbose_name='Order'), ), migrations.AlterField( model_name='subsection', name='section', field=models.ForeignKey(help_text='The section this subsection belongs to.', on_delete=django.db.models.deletion.CASCADE, related_name='subsections', to='questions.Section', verbose_name='Catalog'), ), migrations.AlterField( model_name='subsection', name='title_de', field=models.CharField(help_text='The German title for this subsection.', max_length=256, verbose_name='Title (de)'), ), migrations.AlterField( model_name='subsection', name='title_en', field=models.CharField(help_text='The English title for this subsection.', max_length=256, verbose_name='Title (en)'), ), ]
apache-2.0
-6,882,792,998,716,746,000
49.951327
340
0.608597
false
4.405126
false
false
false