prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>PMCCTest.java<|end_file_name|><|fim▁begin|>/*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Copyright (C) 2015 University of Waikato, Hamilton, New Zealand
*/
package meka.classifiers.multilabel;
import junit.framework.Test;
import junit.framework.TestSuite;
import weka.classifiers.Classifier;
/**
* Tests PMCC. Run from the command line with:<p/>
* java meka.classifiers.multilabel.PMCCTest
*
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision: 117 $
*/
public class PMCCTest
extends AbstractMultiLabelClassifierTest {
/**
* Initializes the test.
*
* @param name the name of the test
*/
public PMCCTest(String name) {
super(name);
}<|fim▁hole|>
/**
* Creates a default classifier.
*
* @return the classifier
*/
@Override
public Classifier getClassifier() {
return new PMCC();
}
public static Test suite() {
return new TestSuite(PMCCTest.class);
}
public static void main(String[] args){
junit.textui.TestRunner.run(suite());
}
}<|fim▁end|> | |
<|file_name|>templates.go<|end_file_name|><|fim▁begin|>package django
const Requirements = `
# Requirements
<|fim▁hole|>Django==1.8.3
PyMySQL==0.6.6
python-memcached==1.54
pytz==2015.4
#whitenoise==2.0.2
webassets==0.10.1
cssmin==0.2.0
jsmin==2.1.2
django-assets==0.10
django-markdown==0.8.4
django-easy-pjax==1.2.0
#django-material==0.4.1
djangorestframework==3.2.0
django-debug-toolbar==1.3.2
`
const DevSettings = `
`<|fim▁end|> | |
<|file_name|>recorder.py<|end_file_name|><|fim▁begin|>import cv2<|fim▁hole|>import os
RecorderConfig = namedtuple('RecorderConfig',
['file_limit',
'time_limit',
'directory',
'file_base'],
verbose=False)
class Recorder:
def __init__(self, config, fps, resolution):
self._logger = logging.getLogger(__name__)
self._file_limit = config.file_limit
self._directory = config.directory
self._file_base = config.file_base
self._frame_limit = config.time_limit * fps
self._resolution = resolution
self._fps = fps
self._ext = '.avi'
self._scan_video_files()
self._open_new_video_file()
def _scan_video_files(self):
directory = self._directory
base = self._file_base
ext = self._ext
regex = re.compile(base + '(\d+)')
self._logger.info("Video files dir: %s. File base: %s",
directory, base)
lowest_idx = 0x7fffffff
highest_idx = 0
nbr_of_files = 0
for anyfile in os.listdir(directory):
(anyfile_base, anyfile_ext) = os.path.splitext(anyfile)
if not anyfile_ext == ext:
continue
m = regex.match(anyfile_base)
if m is None:
continue
idx = int(m.group(1))
if idx < lowest_idx:
lowest_idx = idx
if idx > highest_idx:
highest_idx = idx
nbr_of_files += 1
self._nbr_of_outfiles = nbr_of_files
if nbr_of_files == 0:
# There are no logfiles stored in the log file directory
self._logger.info("Videofile dir empty.")
self._cur_outfile_index = 0
self._cur_outfile_lowest_index = 0
else:
self._cur_outfile_index = highest_idx + 1
self._cur_outfile_lowest_index = lowest_idx
self._logger.info("Cur indices: highest = %d, lowest = %d",
self._cur_outfile_index, self._cur_outfile_lowest_index)
def _open_new_video_file(self):
directory = self._directory
base = self._file_base
ext = self._ext
new_file_name = directory + '/' + base + str(self._cur_outfile_index) + ext
self._logger.info("Opening new output file: %s", new_file_name)
fourcc = cv2.VideoWriter_fourcc(*'mjpa')
self._logger.info("recording resoluton: {}".format(self._resolution))
self._outfile = cv2.VideoWriter(new_file_name, fourcc,
self._fps,
self._resolution)
self._nbr_of_outfiles += 1
self._cur_nbr_of_recorded_frames = 0
def _remove_old_video_file(self):
directory = self._directory
base = self._file_base
ext = self._ext
oldest_filename = directory + '/' + base + str(self._cur_outfile_lowest_index) + ext
self._logger.info("Removing old output file: %s", oldest_filename)
os.remove(oldest_filename)
# Update oldest and current index by rescanning all outfiles
self._scan_video_files()
def record_frame(self, frame):
if self._cur_nbr_of_recorded_frames > self._frame_limit:
self._logger.info("Switching output file")
self._outfile.release()
self._cur_outfile_index += 1
self._open_new_video_file()
self._outfile.write(frame)
self._cur_nbr_of_recorded_frames += 1
if self._nbr_of_outfiles > self._file_limit:
self._remove_old_video_file()
def close(self):
if self._outfile is not None:
self._logger.info("Closing video output file")
self._outfile.release()<|fim▁end|> | from collections import namedtuple
import logging
import re |
<|file_name|>test_unit_gpssh.py<|end_file_name|><|fim▁begin|>import imp
import os
import io
<|fim▁hole|>from mock import patch
from gp_unittest import GpTestCase
class GpSshTestCase(GpTestCase):
def setUp(self):
# because gpssh does not have a .py extension, we have to use imp to import it
# if we had a gpssh.py, this is equivalent to:
# import gpssh
# self.subject = gpssh
gpssh_file = os.path.abspath(os.path.dirname(__file__) + "/../../../gpssh")
self.subject = imp.load_source('gpssh', gpssh_file)
self.old_sys_argv = sys.argv
sys.argv = []
def tearDown(self):
sys.argv = self.old_sys_argv
@patch('sys.exit')
def test_when_run_without_args_prints_help_text(self, sys_exit_mock):
sys_exit_mock.side_effect = Exception("on purpose")
# GOOD_MOCK_EXAMPLE of stdout
with patch('sys.stdout', new=io.BytesIO()) as mock_stdout:
with self.assertRaisesRegexp(Exception, "on purpose"):
self.subject.main()
self.assertIn('gpssh -- ssh access to multiple hosts at once', mock_stdout.getvalue())
@patch('sys.exit')
def test_happy_ssh_to_localhost_succeeds(self, sys_mock):
sys.argv = ['', '-h', 'localhost', 'uptime']
self.subject.main()
sys_mock.assert_called_with(0)<|fim▁end|> | import sys |
<|file_name|>test_events.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
from datetime import datetime
from wechatpy import parse_message
class EventsTestCase(unittest.TestCase):
def test_scan_code_push_event(self):
from wechatpy.events import ScanCodePushEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408090502</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[scancode_push]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<ScanCodeInfo><ScanType><![CDATA[qrcode]]></ScanType>
<ScanResult><![CDATA[1]]></ScanResult>
</ScanCodeInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, ScanCodePushEvent))
self.assertEqual("qrcode", event.scan_type)
self.assertEqual("1", event.scan_result)
def test_scan_code_waitmsg_event(self):
from wechatpy.events import ScanCodeWaitMsgEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408090606</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[scancode_waitmsg]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<ScanCodeInfo><ScanType><![CDATA[qrcode]]></ScanType>
<ScanResult><![CDATA[2]]></ScanResult>
</ScanCodeInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, ScanCodeWaitMsgEvent))
self.assertEqual("qrcode", event.scan_type)
self.assertEqual("2", event.scan_result)
def test_pic_sysphoto_event(self):
from wechatpy.events import PicSysPhotoEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408090651</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[pic_sysphoto]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<SendPicsInfo><Count>1</Count>
<PicList>
<item>
<PicMd5Sum><![CDATA[1b5f7c23b5bf75682a53e7b6d163e185]]></PicMd5Sum>
</item>
</PicList>
</SendPicsInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, PicSysPhotoEvent))<|fim▁hole|> def test_pic_photo_or_album_event(self):
from wechatpy.events import PicPhotoOrAlbumEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408090816</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[pic_photo_or_album]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<SendPicsInfo><Count>1</Count>
<PicList>
<item>
<PicMd5Sum><![CDATA[5a75aaca956d97be686719218f275c6b]]></PicMd5Sum>
</item>
</PicList>
</SendPicsInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, PicPhotoOrAlbumEvent))
self.assertEqual(1, event.count)
self.assertEqual("5a75aaca956d97be686719218f275c6b", event.pictures[0]["PicMd5Sum"])
def test_pic_wechat_event(self):
from wechatpy.events import PicWeChatEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408090816</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[pic_weixin]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<SendPicsInfo><Count>1</Count>
<PicList>
<item>
<PicMd5Sum><![CDATA[5a75aaca956d97be686719218f275c6b]]></PicMd5Sum>
</item>
</PicList>
</SendPicsInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, PicWeChatEvent))
self.assertEqual(1, event.count)
self.assertEqual("5a75aaca956d97be686719218f275c6b", event.pictures[0]["PicMd5Sum"])
def test_location_select_event(self):
from wechatpy.events import LocationSelectEvent
xml = """<xml>
<ToUserName><![CDATA[gh_e136c6e50636]]></ToUserName>
<FromUserName><![CDATA[oMgHVjngRipVsoxg6TuX3vz6glDg]]></FromUserName>
<CreateTime>1408091189</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[location_select]]></Event>
<EventKey><![CDATA[6]]></EventKey>
<SendLocationInfo><Location_X><![CDATA[23]]></Location_X>
<Location_Y><![CDATA[113]]></Location_Y>
<Scale><![CDATA[15]]></Scale>
<Label><![CDATA[广州市海珠区客村艺苑路 106号]]></Label>
<Poiname><![CDATA[]]></Poiname>
</SendLocationInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, LocationSelectEvent))
self.assertEqual(("23", "113"), event.location)
self.assertEqual("15", event.scale)
self.assertTrue(event.poiname is None)
self.assertEqual("广州市海珠区客村艺苑路 106号", event.label)
def test_merchant_order_event(self):
from wechatpy.events import MerchantOrderEvent
xml = """<xml>
<ToUserName><![CDATA[weixin_media1]]></ToUserName>
<FromUserName><![CDATA[oDF3iYyVlek46AyTBbMRVV8VZVlI]]></FromUserName>
<CreateTime>1398144192</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[merchant_order]]></Event>
<OrderId><![CDATA[test_order_id]]></OrderId>
<OrderStatus>2</OrderStatus>
<ProductId><![CDATA[test_product_id]]></ProductId>
<SkuInfo><![CDATA[10001:1000012;10002:100021]]></SkuInfo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, MerchantOrderEvent))
self.assertEqual("test_order_id", event.order_id)
self.assertEqual(2, event.order_status)
self.assertEqual("test_product_id", event.product_id)
self.assertEqual("10001:1000012;10002:100021", event.sku_info)
def test_kf_create_session_event(self):
from wechatpy.events import KfCreateSessionEvent
xml = """<xml>
<ToUserName><![CDATA[touser]]></ToUserName>
<FromUserName><![CDATA[fromuser]]></FromUserName>
<CreateTime>1399197672</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[kf_create_session]]></Event>
<KfAccount><![CDATA[test1@test]]></KfAccount>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, KfCreateSessionEvent))
self.assertEqual("test1@test", event.account)
def test_kf_close_session_event(self):
from wechatpy.events import KfCloseSessionEvent
xml = """<xml>
<ToUserName><![CDATA[touser]]></ToUserName>
<FromUserName><![CDATA[fromuser]]></FromUserName>
<CreateTime>1399197672</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[kf_close_session]]></Event>
<KfAccount><![CDATA[test1@test]]></KfAccount>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, KfCloseSessionEvent))
self.assertEqual("test1@test", event.account)
def test_kf_switch_session_event(self):
from wechatpy.events import KfSwitchSessionEvent
xml = """<xml>
<ToUserName><![CDATA[touser]]></ToUserName>
<FromUserName><![CDATA[fromuser]]></FromUserName>
<CreateTime>1399197672</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[kf_switch_session]]></Event>
<FromKfAccount><![CDATA[test1@test]]></FromKfAccount>
<ToKfAccount><![CDATA[test2@test]]></ToKfAccount>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, KfSwitchSessionEvent))
self.assertEqual("test1@test", event.from_account)
self.assertEqual("test2@test", event.to_account)
def test_template_send_job_finish_event(self):
from wechatpy.events import TemplateSendJobFinishEvent
xml = """<xml>
<ToUserName><![CDATA[touser]]></ToUserName>
<FromUserName><![CDATA[fromuser]]></FromUserName>
<CreateTime>1395658920</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[TEMPLATESENDJOBFINISH]]></Event>
<MsgID>200163836</MsgID>
<Status><![CDATA[success]]></Status>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, TemplateSendJobFinishEvent))
self.assertEqual(200163836, event.id)
self.assertEqual("success", event.status)
def test_template_subscribe_msg_popup_event(self):
from wechatpy.events import SubscribeMsgPopupEvent
xml = """<xml>
<ToUserName><![CDATA[gh_123456789abc]]></ToUserName>
<FromUserName><![CDATA[otFpruAK8D-E6EfStSYonYSBZ8_4]]></FromUserName>
<CreateTime>1610969440</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[subscribe_msg_popup_event]]></Event>
<SubscribeMsgPopupEvent>
<List>
<TemplateId><![CDATA[VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc]]></TemplateId>
<SubscribeStatusString><![CDATA[accept]]></SubscribeStatusString>
<PopupScene>2</PopupScene>
</List>
<List>
<TemplateId><![CDATA[9nLIlbOQZC5Y89AZteFEux3WCXRRRG5Wfzkpssu4bLI]]></TemplateId>
<SubscribeStatusString><![CDATA[reject]]></SubscribeStatusString>
<PopupScene>2</PopupScene>
</List>
</SubscribeMsgPopupEvent>
</xml>"""
event = parse_message(xml)
self.assertIsInstance(event, SubscribeMsgPopupEvent)
self.assertEqual(2, len(event.subscribes))
self.assertEqual("VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc", event.subscribes[0]["TemplateId"])
def test_template_subscribe_msg_change_event(self):
from wechatpy.events import SubscribeMsgChangeEvent
xml = """<xml>
<ToUserName><![CDATA[gh_123456789abc]]></ToUserName>
<FromUserName><![CDATA[otFpruAK8D-E6EfStSYonYSBZ8_4]]></FromUserName>
<CreateTime>1610969440</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[subscribe_msg_change_event]]></Event>
<SubscribeMsgChangeEvent>
<List>
<TemplateId><![CDATA[VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc]]></TemplateId>
<SubscribeStatusString><![CDATA[reject]]></SubscribeStatusString>
</List>
</SubscribeMsgChangeEvent>
</xml>"""
event = parse_message(xml)
self.assertIsInstance(event, SubscribeMsgChangeEvent)
self.assertEqual(1, len(event.subscribes))
self.assertEqual("VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc", event.subscribes[0]["TemplateId"])
self.assertEqual("reject", event.subscribes[0]["SubscribeStatusString"])
def test_template_subscribe_msg_sent_event(self):
from wechatpy.events import SubscribeMsgSentEvent
xml = """<xml>
<ToUserName><![CDATA[gh_123456789abc]]></ToUserName>
<FromUserName><![CDATA[otFpruAK8D-E6EfStSYonYSBZ8_4]]></FromUserName>
<CreateTime>1610969468</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[subscribe_msg_sent_event]]></Event>
<SubscribeMsgSentEvent>
<List>
<TemplateId><![CDATA[VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc]]></TemplateId>
<MsgID>1700827132819554304</MsgID>
<ErrorCode>0</ErrorCode>
<ErrorStatus><![CDATA[success]]></ErrorStatus>
</List>
</SubscribeMsgSentEvent>
</xml>"""
event = parse_message(xml)
self.assertIsInstance(event, SubscribeMsgSentEvent)
self.assertEqual(1, len(event.subscribes))
self.assertEqual("VRR0UEO9VJOLs0MHlU0OilqX6MVFDwH3_3gz3Oc0NIc", event.subscribes[0]["TemplateId"])
self.assertEqual("1700827132819554304", event.subscribes[0]["MsgID"])
def test_shakearound_user_shake_event(self):
from wechatpy.events import ShakearoundUserShakeEvent
xml = """<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1433332012</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[ShakearoundUserShake]]></Event>
<ChosenBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>0.057</Distance>
</ChosenBeacon>
<AroundBeacons>
<AroundBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>166.816</Distance>
</AroundBeacon>
<AroundBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>15.013</Distance>
</AroundBeacon>
</AroundBeacons>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, ShakearoundUserShakeEvent))
chosen_beacon = {
"uuid": "uuid",
"major": "major",
"minor": "minor",
"distance": 0.057,
}
self.assertEqual(chosen_beacon, event.chosen_beacon)
self.assertEqual(2, len(event.around_beacons))
def test_wifi_connected_event(self):
from wechatpy.events import WiFiConnectedEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[WifiConnected]]></Event>
<ConnectTime>0</ConnectTime>
<ExpireTime>0</ExpireTime>
<VendorId><![CDATA[3001224419]]></VendorId>
<PlaceId><![CDATA[1234]]></PlaceId>
<DeviceNo><![CDATA[00:1f:7a:ad:5c:a8]]></DeviceNo>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, WiFiConnectedEvent))
self.assertEqual(0, event.connect_time)
self.assertEqual("1234", event.shop_id)
self.assertEqual("00:1f:7a:ad:5c:a8", event.bssid)
def test_qualification_verify_success_event(self):
from wechatpy.events import QualificationVerifySuccessEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401156</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[qualification_verify_success]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, QualificationVerifySuccessEvent))
self.assertTrue(isinstance(event.expired_time, datetime))
def test_qualification_verify_fail_event(self):
from wechatpy.events import QualificationVerifyFailEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401156</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[qualification_verify_fail]]></Event>
<FailTime>1442401122</FailTime>
<FailReason><![CDATA[by time]]></FailReason>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, QualificationVerifyFailEvent))
self.assertTrue(isinstance(event.fail_time, datetime))
self.assertEqual(event.fail_reason, "by time")
def test_naming_verify_success_event(self):
from wechatpy.events import NamingVerifySuccessEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401093</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[naming_verify_success]]></Event>
<ExpiredTime>1442401093</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, NamingVerifySuccessEvent))
self.assertTrue(isinstance(event.expired_time, datetime))
def test_naming_verify_fail_event(self):
from wechatpy.events import NamingVerifyFailEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401061</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[naming_verify_fail]]></Event>
<FailTime>1442401061</FailTime>
<FailReason><![CDATA[by time]]></FailReason>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, NamingVerifyFailEvent))
self.assertTrue(isinstance(event.fail_time, datetime))
self.assertEqual(event.fail_reason, "by time")
def test_annual_renew_event(self):
from wechatpy.events import AnnualRenewEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401004</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[annual_renew]]></Event>
<ExpiredTime>1442401004</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, AnnualRenewEvent))
self.assertTrue(isinstance(event.expired_time, datetime))
def test_verify_expired_event(self):
from wechatpy.events import VerifyExpiredEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442400900</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[verify_expired]]></Event>
<ExpiredTime>1442400900</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, VerifyExpiredEvent))
self.assertTrue(isinstance(event.expired_time, datetime))<|fim▁end|> | self.assertEqual(1, event.count)
self.assertEqual("1b5f7c23b5bf75682a53e7b6d163e185", event.pictures[0]["PicMd5Sum"])
|
<|file_name|>CWE78_OS_Command_Injection__char_file_execl_43.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE78_OS_Command_Injection__char_file_execl_43.cpp
Label Definition File: CWE78_OS_Command_Injection.strings.label.xml
Template File: sources-sink-43.tmpl.cpp
*/
/*
<|fim▁hole|> * CWE: 78 OS Command Injection
* BadSource: file Read input from a file
* GoodSource: Fixed string
* Sinks: execl
* BadSink : execute command with execl
* Flow Variant: 43 Data flow: data flows using a C++ reference from one function to another in the same source file
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifdef _WIN32
#define COMMAND_INT_PATH "%WINDIR%\\system32\\cmd.exe"
#define COMMAND_INT "cmd.exe"
#define COMMAND_ARG1 "/c"
#define COMMAND_ARG2 "dir "
#define COMMAND_ARG3 data
#else /* NOT _WIN32 */
#include <unistd.h>
#define COMMAND_INT_PATH "/bin/sh"
#define COMMAND_INT "sh"
#define COMMAND_ARG1 "-c"
#define COMMAND_ARG2 "ls "
#define COMMAND_ARG3 data
#endif
#ifdef _WIN32
#define FILENAME "C:\\temp\\file.txt"
#else
#define FILENAME "/tmp/file.txt"
#endif
#ifdef _WIN32
#include <process.h>
#define EXECL _execl
#else /* NOT _WIN32 */
#define EXECL execl
#endif
namespace CWE78_OS_Command_Injection__char_file_execl_43
{
#ifndef OMITBAD
static void badSource(char * &data)
{
{
/* Read input from a file */
size_t dataLen = strlen(data);
FILE * pFile;
/* if there is room in data, attempt to read the input from a file */
if (100-dataLen > 1)
{
pFile = fopen(FILENAME, "r");
if (pFile != NULL)
{
/* POTENTIAL FLAW: Read data from a file */
if (fgets(data+dataLen, (int)(100-dataLen), pFile) == NULL)
{
printLine("fgets() failed");
/* Restore NUL terminator if fgets fails */
data[dataLen] = '\0';
}
fclose(pFile);
}
}
}
}
void bad()
{
char * data;
char dataBuffer[100] = COMMAND_ARG2;
data = dataBuffer;
badSource(data);
/* execl - specify the path where the command is located */
/* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */
EXECL(COMMAND_INT_PATH, COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
static void goodG2BSource(char * &data)
{
/* FIX: Append a fixed string to data (not user / external input) */
strcat(data, "*.*");
}
static void goodG2B()
{
char * data;
char dataBuffer[100] = COMMAND_ARG2;
data = dataBuffer;
goodG2BSource(data);
/* execl - specify the path where the command is located */
/* POTENTIAL FLAW: Execute command without validating input possibly leading to command injection */
EXECL(COMMAND_INT_PATH, COMMAND_INT_PATH, COMMAND_ARG1, COMMAND_ARG3, NULL);
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
using namespace CWE78_OS_Command_Injection__char_file_execl_43; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif<|fim▁end|> | * @description
|
<|file_name|>server.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import asyncio
import struct
from .logger import get_logger
from .protocol import PeerStreamIterator
from .message import (MessageID,
InterestedMessage,
HandshakeMessage,
BitFieldMessage,
NotInterestedMessage,
ChokeMessage,
UnchokeMessage,
HaveMessage,
RequestMessage,
PieceMessage,
CancelMessage,
KeepAliveMessage)
logger = get_logger()
class SourceFileReader:
def __init__(self, torrent):
self.torrent = torrent
self.fd = os.open(self.torrent.name, os.O_RDONLY)
def read(self, begin, index, length):
pos = index * self.torrent.info.piece_length
os.lseek(self.fd, pos, os.SEEK_SET)
return os.read(self.fd, length)
def has_all_pieces(self):
"""Check the size on the disk is equal or greater than
(piece_length - 1) * piece_length.
The assumption is clients wrote the last piece to disk
after checking integrating
Returns True or False.
"""
min_length = (len(self.torrent.info.pieces) - 1) * self.torrent.info.piece_length
return os.path.getsize(self.torrent.name) > min_length
def calculate_have_pieces(self):
pass
def get_have_pieces(self):
"""Get all have pieces
Returns list of all bool values with size of piece+1.
The last element in the list is False and other positions contains
True or False.
Available piece is represented as True and missing piece
is represented as False.
"""
if self.has_all_pieces():
pieces_availability = [True] * len(self.torrent.info.pieces)
pieces_availability.append(False)
return pieces_availability
return self.calculate_have_pieces()
class RequestHandler:
def __init__(self, torrent):
self.torrent = torrent
self.file_reader = SourceFileReader(torrent=self.torrent)
def parse(self, buffer):
"""
Tries to parse protocol messages if there is enough bytes read in the
buffer.
:return The parsed message, or None if no message could be parsed
"""
# Each message is structured as:
# <length prefix><message ID><payload>
#
# The `length prefix` is a four byte big-endian value
# The `message ID` is a decimal byte
# The `payload` is the value of `length prefix`
#
# The message length is not part of the actual length. So another
# 4 bytes needs to be included when slicing the buffer.
self.buffer = buffer
header_length = 4
if len(self.buffer) == 68:
return HandshakeMessage.decode(self.buffer)
elif len(self.buffer) > 4: # 4 bytes is needed to identify the message
message_length = struct.unpack('>I', self.buffer[0:4])[0]
if message_length == 0:
return KeepAliveMessage()
if len(self.buffer) >= message_length:
message_id = struct.unpack('>b', self.buffer[4:5])[0]
def _consume():
"""Consume the current message from the read buffer"""
self.buffer = self.buffer[header_length + message_length:]
def _data():
""""Extract the current message from the read buffer"""
return self.buffer[:header_length + message_length]
if message_id is MessageID.BitField.value:
data = _data()
_consume()
return BitFieldMessage.decode(data)
elif message_id is MessageID.Interested.value:
_consume()
return InterestedMessage()
elif message_id is MessageID.NotInterested.value:<|fim▁hole|> return NotInterestedMessage()
elif message_id is MessageID.Choke.value:
_consume()
return ChokeMessage()
elif message_id is MessageID.Unchoke.value:
_consume()
return UnchokeMessage()
elif message_id is MessageID.Have.value:
data = _data()
_consume()
return HaveMessage.decode(data)
elif message_id is MessageID.Piece.value:
data = _data()
_consume()
return PieceMessage.decode(data)
elif message_id is MessageID.Request.value:
data = _data()
_consume()
return RequestMessage.decode(data)
elif message_id is MessageID.Cancel.value:
data = _data()
_consume()
return CancelMessage.decode(data)
else:
logger.debug('Unsupported message!')
else:
#import ipdb;ipdb.set_trace()
return None
logger.debug('Not enough in buffer in order to parse')
return None
def get_piece(self, begin, index, length):
data = self.file_reader.read(begin=begin, index=index, length=length)
return PieceMessage(begin=begin, index=index, block=data)
def handle_message(self, buffer):
message = self.parse(buffer)
if isinstance(message, NotInterestedMessage):
logger.debug('Remove interested state')
elif isinstance(message, HandshakeMessage):
logger.debug('Received Handshake')
elif isinstance(message, ChokeMessage):
logger.debug('Received choke message')
self.current_state.append(PeerState.Choked.value)
elif isinstance(message, UnchokeMessage):
logger.debug('Received unchoke message')
elif isinstance(message, HaveMessage):
logger.debug('Received have message')
elif isinstance(message, BitFieldMessage):
logger.debug('Received bit field message: {}'.format(message))
elif isinstance(message, PieceMessage):
pass
elif isinstance(message, InterestedMessage):
return BitFieldMessage(val=self.file_reader.get_have_pieces())
elif isinstance(message, RequestMessage):
return self.get_piece(begin=message.begin, index=message.index,
length=message.length)
elif isinstance(message, CancelMessage):
# TODO: Implement cancel data
pass
return message
class TorrentServer(asyncio.Protocol):
def __init__(self, torrent):
self.torrent = torrent
super().__init__()
def __call__(self):
self.connections = set([])
self.request_handler = RequestHandler(torrent=self.torrent)
logger.debug('Init server')
return self
def connection_made(self, transport):
self.transport = transport
peer = transport.get_extra_info('peername')
self.connections.add(peer)
def data_received(self, data):
message = self.request_handler.handle_message(data)
logger.debug(message)
if message:
logger.info('Serving {}'.format(message))
self.transport.write(message.encode())
def eof_received(self):
logger.debug('eof received')
def connection_lost(self, exc):
logger.debug('connectin lost')
async def run_server(port, torrent):
"""Run a server to respond to all clients
"""
logger.info('Starting server in port {}'.format(port))
loop = asyncio.get_event_loop()
server = await loop.create_server(
TorrentServer(torrent), host='127.0.0.1', port=port)
return server<|fim▁end|> | _consume() |
<|file_name|>system.py<|end_file_name|><|fim▁begin|>"""
System plugin
Copyright (C) 2016 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from accmon.plugins.plugin import *
class System(Plugin):
<|fim▁hole|>
def handle_request(self, request):
res = super(System, self).handle_request(request)
if res is not None: return res<|fim▁end|> | def __init__(self):
super().__init__() |
<|file_name|>spark_webservice_demo.py<|end_file_name|><|fim▁begin|># Copyright 2015 David Wang. All rights reserved.
# Use of this source code is governed by MIT license.
# Please see LICENSE file
# WebSpark
# Spark web service demo
# version 0.2
# use REPL or define sc SparkContext
import urllib2, urllib
import math
import time
import traceback
# Spark Web Application demo with parallel processing
# see demoservice function
ServerAddr="http://<enter WebSpark IP address here>:8001"
RegisterURL=ServerAddr + "/addapi?"
RespondURL=ServerAddr + "/respond?"
errwaitseconds = 3
element = '<li class="list-group-item">first prime above %d is %d</li>'
<|fim▁hole|> if num<2:
return False
for i in range(2, int(math.sqrt(num))+1):
if num%i==0:
return False
return True
def firstprimeabove(num):
i=num+1
while True:
if slow_isprime(i):
return i
i+=1
servicename = 'demo'
# Spark Web Application demo
def demo(url):
rawdata = range(1000, 20000, 1100)
data = sc.parallelize(rawdata)
above=data.map(lambda x: (x, firstprimeabove(x))).collect()
primelist=[element%x for x in above]
response = template % ' '.join(primelist)
return response
def parserequest(rawrequest):
lines = rawrequest.split('\n')
if len(lines)<4:
print 'incorrect WebSpark request'
else:
name = lines[0]
url = lines[1]
remoteaddr = lines[2]
header = lines[3:]
return name, url, remoteaddr, header
st =''
# publish web service with WebSpark
while True:
try:
url = RegisterURL + urllib.urlencode({'name': servicename})
conn = urllib2.urlopen(url)
data = conn.read()
conn.close()
name, clienturl, remoteaddr, header = parserequest(data)
print name, clienturl, remoteaddr, header
response = demo(clienturl)
url = RespondURL + urllib.urlencode({'name': name})
conn = urllib2.urlopen(url, response)
conn.close()
except Exception as ex:
print 'error connecting to WebSpark at', ServerAddr
traceback.print_exc()
time.sleep(errwaitseconds)
continue<|fim▁end|> | with open('template.html') as f:
template = f.read()
def slow_isprime(num): |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import nose
from os import path
<|fim▁hole|>tests_path = path.join(path.abspath(path.dirname(file_path)), "tests")
nose.main(argv=[path.abspath(__file__), "--with-coverage", "--cover-erase", "--cover-package=frapalyzer", tests_path])<|fim▁end|> | file_path = path.abspath(__file__) |
<|file_name|>MWAttributeHandle.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 1998, 2012 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation from Oracle TopLink
******************************************************************************/
package org.eclipse.persistence.tools.workbench.mappingsmodel.handles;
import org.eclipse.persistence.tools.workbench.mappingsmodel.MWModel;
import org.eclipse.persistence.tools.workbench.mappingsmodel.meta.MWClass;
import org.eclipse.persistence.tools.workbench.mappingsmodel.meta.MWClassAttribute;
import org.eclipse.persistence.tools.workbench.utility.node.Node;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.mappings.OneToOneMapping;
import org.eclipse.persistence.oxm.XMLDescriptor;
/**
* MWAttributeHandle is used to isolate the painful bits of code
* necessary to correctly handle references to MWClassAttributes.
*
* Since a MWClassAttribute is nested within the XML file
* for a MWClass, we need to store a reference to a particular
* attribute as a pair of instance variables:
* - the name of the declaring MWClass
* - the name of the attribute
*
* This causes no end of pain when dealing with TopLink, property
* change listeners, backward-compatibility, etc.
*/
public final class MWAttributeHandle extends MWHandle {
/**
* This is the actual attribute.
* It is built from the declaring type and attribute names, below.
*/
private volatile MWClassAttribute attribute;
/**
* The declaring type and attribute names are transient. They
* are used only to hold their values until postProjectBuild()
* is called and we can resolve the actual attribute.
* We do not keep these in synch with the attribute itself because
* we cannot know when the attribute has been renamed etc.
*/
private volatile String attributeDeclaringTypeName;
private volatile String attributeName;
// ********** constructors **********
/**
* default constructor - for TopLink use only
*/
private MWAttributeHandle() {
super();
}
<|fim▁hole|> public MWAttributeHandle(MWModel parent, NodeReferenceScrubber scrubber) {
super(parent, scrubber);
}
public MWAttributeHandle(MWModel parent, MWClassAttribute attribute, NodeReferenceScrubber scrubber) {
super(parent, scrubber);
this.attribute = attribute;
}
// ********** instance methods **********
public MWClassAttribute getAttribute() {
return this.attribute;
}
public void setAttribute(MWClassAttribute attribute) {
this.attribute = attribute;
}
protected Node node() {
return getAttribute();
}
public MWAttributeHandle setScrubber(NodeReferenceScrubber scrubber) {
this.setScrubberInternal(scrubber);
return this;
}
public void postProjectBuild() {
super.postProjectBuild();
if (this.attributeDeclaringTypeName != null && this.attributeName != null) {
// the type will never be null - the repository will auto-generate one if necessary
this.attribute = this.typeNamed(this.attributeDeclaringTypeName).attributeNamedFromCombinedAll(this.attributeName);
}
// Ensure attributeDeclaringTypeName and attributeName are not
// used by setting them to null....
// If the XML is corrupt and only one of these attributes is populated,
// this will cause the populated attribute to be cleared out if the
// objects are rewritten.
this.attributeDeclaringTypeName = null;
this.attributeName = null;
}
/**
* Override to delegate comparison to the attribute itself.
* If the handles being compared are in a collection that is being sorted,
* NEITHER attribute should be null.
*/
public int compareTo(Object o) {
return this.attribute.compareTo(((MWAttributeHandle) o).attribute);
}
public void toString(StringBuffer sb) {
if (this.attribute == null) {
sb.append("null");
} else {
this.attribute.toString(sb);
}
}
// ********** TopLink methods **********
public static XMLDescriptor buildDescriptor(){
XMLDescriptor descriptor = new XMLDescriptor();
descriptor.setJavaClass(MWAttributeHandle.class);
descriptor.addDirectMapping("attributeDeclaringTypeName", "getAttributeDeclaringTypeNameForTopLink", "setAttributeDeclaringTypeNameForTopLink", "attribute-declaring-type-name/text()");
descriptor.addDirectMapping("attributeName", "getAttributeNameForTopLink", "setAttributeNameForTopLink", "attribute-name/text()");
return descriptor;
}
private String getAttributeDeclaringTypeNameForTopLink(){
return (this.attribute == null) ? null : this.attribute.getDeclaringType().getName();
}
private void setAttributeDeclaringTypeNameForTopLink(String attributeDeclaringTypeName){
this.attributeDeclaringTypeName = attributeDeclaringTypeName;
}
private String getAttributeNameForTopLink() {
return (this.attribute == null) ? null : attribute.getName();
}
private void setAttributeNameForTopLink(String attributeName) {
this.attributeName = attributeName;
}
}<|fim▁end|> | |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>from datetime import date, time
<|fim▁hole|>NO_DATE = date(1970, 01, 01)
NO_TIME = time(0, 0)
NO_URL = u"__NO_URL__"
UNFINISHED_TAG = u"unfinished"
GHOST_LINK_TAG = u"ghost link"
GHOST_LINK_TITLE = u"__GHOST_LINK__"
GHOST_LINK_URL = u"__GHOST_LINK__"
PAYWALLED_CONTENT = u"__PAYWALLED__"
RENDERED_STORIFY_TITLE = u"__RENDERED_STORIFY__"
RENDERED_TWEET_TITLE = u"__RENDERED_TWEET__"
EMBEDDED_VIDEO_TITLE = u"__EMBEDDED_VIDEO_TITLE__"
EMBEDDED_VIDEO_URL = u"__EMBEDDED_VIDEO_URL__"<|fim▁end|> | NO_TITLE = u"__NO_TITLE__"
NO_AUTHOR_NAME = 'None'
NO_CATEGORY_NAME = 'None'
NON_EXISTENT_ARTICLE_TITLE = 'NON_EXISTENT' |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2021 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Command line interface for mcuxpresso_builder."""
import argparse
import pathlib
import sys
from pw_build_mcuxpresso import components
def _parse_args() -> argparse.Namespace:
"""Setup argparse and parse command line args."""
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='command',
metavar='<command>',
required=True)
project_parser = subparsers.add_parser(
'project', help='output components of an MCUXpresso project')
project_parser.add_argument('manifest_filename', type=pathlib.Path)
project_parser.add_argument('--include', type=str, action='append')<|fim▁hole|> return parser.parse_args()
def main():
"""Main command line function."""
args = _parse_args()
if args.command == 'project':
components.project(args.manifest_filename,
include=args.include,
exclude=args.exclude,
path_prefix=args.path_prefix)
sys.exit(0)
if __name__ == '__main__':
main()<|fim▁end|> | project_parser.add_argument('--exclude', type=str, action='append')
project_parser.add_argument('--prefix', dest='path_prefix', type=str)
|
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export = EarthFilled24;<|fim▁end|> | import { EarthFilled24 } from "../../";
|
<|file_name|>89874000.jsonp.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | jsonp({"cep":"89874000","cidade":"Maravilha","uf":"SC","estado":"Santa Catarina"}); |
<|file_name|>reentrant.rs<|end_file_name|><|fim▁begin|>//! Module to handle reentrant/recursion limits while deserializing.
use std::cell::Cell;
use std::rc::Rc;
use crate::errors::*;
/// Sets a limit on the amount of recursion during deserialization. This does
/// not do any synchronization -- it is intended purely for single-threaded use.
pub struct ReentrantLimit(Rc<Cell<usize>>);
impl ReentrantLimit {
/// Create a new reentrant limit.
pub fn new(limit: usize) -> Self {
ReentrantLimit(Rc::new(Cell::new(limit)))
}
/// Try to decrease the limit by 1. Return an RAII guard that when freed
/// will increase the limit by 1.
pub fn acquire<S: Into<String>>(&mut self, kind: S) -> Result<ReentrantGuard> {
if self.0.get() == 0 {
bail!(ErrorKind::DeRecursionLimitExceeded(kind.into()));
}
self.0.set(self.0.get() - 1);
Ok(ReentrantGuard(self.0.clone()))
}<|fim▁hole|>}
/// RAII guard for reentrant limits.
pub struct ReentrantGuard(Rc<Cell<usize>>);
impl Drop for ReentrantGuard {
fn drop(&mut self) {
self.0.set(self.0.get() + 1);
}
}<|fim▁end|> | |
<|file_name|>lsolveAll.js<|end_file_name|><|fim▁begin|>import { factory } from '../../../utils/factory.js'
import { createSolveValidation } from './utils/solveValidation.js'
const name = 'lsolveAll'
const dependencies = [
'typed',
'matrix',
'divideScalar',
'multiplyScalar',
'subtract',
'equalScalar',
'DenseMatrix'
]
export const createLsolveAll = /* #__PURE__ */ factory(name, dependencies, ({ typed, matrix, divideScalar, multiplyScalar, subtract, equalScalar, DenseMatrix }) => {
const solveValidation = createSolveValidation({ DenseMatrix })
/**
* Finds all solutions of a linear equation system by forwards substitution. Matrix must be a lower triangular matrix.
*
* `L * x = b`
*
* Syntax:
*
* math.lsolveAll(L, b)
*
* Examples:
*
* const a = [[-2, 3], [2, 1]]
* const b = [11, 9]
* const x = lsolveAll(a, b) // [ [[-5.5], [20]] ]
*
* See also:
*
* lsolve, lup, slu, usolve, lusolve
*
* @param {Matrix, Array} L A N x N matrix or array (L)
* @param {Matrix, Array} b A column vector with the b values
*
* @return {DenseMatrix[] | Array[]} An array of affine-independent column vectors (x) that solve the linear system
*/
return typed(name, {
'SparseMatrix, Array | Matrix': function (m, b) {
return _sparseForwardSubstitution(m, b)
},
'DenseMatrix, Array | Matrix': function (m, b) {
return _denseForwardSubstitution(m, b)
},
'Array, Array | Matrix': function (a, b) {
const m = matrix(a)
const R = _denseForwardSubstitution(m, b)
return R.map(r => r.valueOf())
}
})
function _denseForwardSubstitution (m, b_) {
// the algorithm is derived from
// https://www.overleaf.com/read/csvgqdxggyjv
// array of right-hand sides
const B = [solveValidation(m, b_, true)._data.map(e => e[0])]
const M = m._data
const rows = m._size[0]
const columns = m._size[1]
// loop columns
for (let i = 0; i < columns; i++) {
let L = B.length
// loop right-hand sides
for (let k = 0; k < L; k++) {
const b = B[k]
if (!equalScalar(M[i][i], 0)) {
// non-singular row
b[i] = divideScalar(b[i], M[i][i])
for (let j = i + 1; j < columns; j++) {
// b[j] -= b[i] * M[j,i]
b[j] = subtract(b[j], multiplyScalar(b[i], M[j][i]))
}
} else if (!equalScalar(b[i], 0)) {
// singular row, nonzero RHS
if (k === 0) {
// There is no valid solution
return []
} else {
// This RHS is invalid but other solutions may still exist
B.splice(k, 1)
k -= 1
L -= 1
}
} else if (k === 0) {
// singular row, RHS is zero
const bNew = [...b]
bNew[i] = 1
for (let j = i + 1; j < columns; j++) {
bNew[j] = subtract(bNew[j], M[j][i])
}
B.push(bNew)
}
}
}
return B.map(x => new DenseMatrix({ data: x.map(e => [e]), size: [rows, 1] }))
}
function _sparseForwardSubstitution (m, b_) {
// array of right-hand sides
const B = [solveValidation(m, b_, true)._data.map(e => e[0])]
const rows = m._size[0]
const columns = m._size[1]
const values = m._values
const index = m._index
const ptr = m._ptr
// loop columns
for (let i = 0; i < columns; i++) {
let L = B.length
// loop right-hand sides
for (let k = 0; k < L; k++) {
const b = B[k]
// values & indices (column i)
const iValues = []
const iIndices = []
// first & last indeces in column
const firstIndex = ptr[i]<|fim▁hole|> for (let j = firstIndex; j < lastIndex; j++) {
const J = index[j]
// check row
if (J === i) {
Mii = values[j]
} else if (J > i) {
// store lower triangular
iValues.push(values[j])
iIndices.push(J)
}
}
if (!equalScalar(Mii, 0)) {
// non-singular row
b[i] = divideScalar(b[i], Mii)
for (let j = 0, lastIndex = iIndices.length; j < lastIndex; j++) {
const J = iIndices[j]
b[J] = subtract(b[J], multiplyScalar(b[i], iValues[j]))
}
} else if (!equalScalar(b[i], 0)) {
// singular row, nonzero RHS
if (k === 0) {
// There is no valid solution
return []
} else {
// This RHS is invalid but other solutions may still exist
B.splice(k, 1)
k -= 1
L -= 1
}
} else if (k === 0) {
// singular row, RHS is zero
const bNew = [...b]
bNew[i] = 1
for (let j = 0, lastIndex = iIndices.length; j < lastIndex; j++) {
const J = iIndices[j]
bNew[J] = subtract(bNew[J], iValues[j])
}
B.push(bNew)
}
}
}
return B.map(x => new DenseMatrix({ data: x.map(e => [e]), size: [rows, 1] }))
}
})<|fim▁end|> | const lastIndex = ptr[i + 1]
// find the value at [i, i]
let Mii = 0 |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render_to_response
from django.template import RequestContext
from markitup import settings
from markitup.markup import filter_func
from markitup.sanitize import sanitize_html
def apply_filter(request):
cleaned_data = sanitize_html(request.POST.get('data', ''), strip=True)
markup = filter_func(cleaned_data)
return render_to_response( 'markitup/preview.html',<|fim▁hole|><|fim▁end|> | {'preview': markup},
context_instance=RequestContext(request)) |
<|file_name|>dispatcher.js<|end_file_name|><|fim▁begin|>/**
* App Dispatcher
* Extends Facebook's Flux Dispatcher
*/
'use strict';
var Dispatcher = require('flux').Dispatcher;<|fim▁hole|><|fim▁end|> | var AppDispatcher = new Dispatcher();
module.exports = AppDispatcher; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# coding=utf-8
#
# Copyright 2014 Sascha Schirra
#
# This file is part of Ropper.
#
# Ropper is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ropper is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁end|> | |
<|file_name|>cash_flow_mapper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
<|fim▁hole|><|fim▁end|> |
class CashFlowMapper(Document):
pass |
<|file_name|>order.py<|end_file_name|><|fim▁begin|>import falcon
import msgpack
import json
from btree import BinaryTree
import ZODB, ZODB.FileStorage
import transaction
from persistent import Persistent
import uuid
import urllib
import btree
from pprint import pprint
class Collection (object):
def on_post(self, req, resp):
# req.stream corresponds to the WSGI wsgi.input environ variable,
# and allows you to read bytes from the request body.
#
# See also: PEP 3333
if req.content_length in (None, 0):
# Nothing to do
print "nothin"
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
pprint(body)
req.context['doc'] = json.loads(body.decode('utf-8'))
token = str(uuid.uuid4())
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
unordered_list = req.context['doc']['data']
root.tree = BinaryTree(unordered_list.pop())
tree = root.tree
tree.unordered_list = unordered_list
#tree.setList()
if len(unordered_list) <2:
raise falcon.HTTPBadRequest('Empty request body', 'We need more than 2 data elements')
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
tree.current = tree
tree.treeroot = tree.current
tree.next = tree.unordered_list.pop()
tree.ordered = False
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"self":"/order/"},
{'order':'/order/%s'%(urllib.quote(token))},
{'lt':'/order/%s/%s/%s'%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{'gt':'/order/%s/%s/%s'%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
def on_get(self, req, resp, token):
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
lst = list(btree.inorder(tree))
tree.jresp = {'data':lst, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
{"self":"/order/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)<|fim▁hole|>
def on_put(self, req, resp, token):
if req.content_length in (None, 0):
# Nothing to do
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
req.context['doc'] = json.loads(body.decode('utf-8'))
left = req.context['doc']['left']
right = req.context['doc']['right']
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
if tree.next not in [left, right]:
resp.body = json.dumps(tree.jresp)
connection.close()
db.close()
storage.close()
return
if left == tree.current.getNodeValue():
if tree.current.getRightChild() == None:
tree.current.insertRight(right)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getRightChild()
elif right == tree.current.getNodeValue():
if tree.current.getLeftChild()== None:
tree.current.insertLeft(left)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getLeftChild()
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
{"order":"/order/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)<|fim▁end|> | |
<|file_name|>php.js<|end_file_name|><|fim▁begin|>// in all regexp "\" must be replaced by "\\"
var datas= {
"default": { // the name of this definition group. It's posisble to have different rules inside the same definition file
"REGEXP": { "before_word": "[^a-zA-Z0-9_]|^" // \\s|\\.|
,"possible_words_letters": "[a-zA-Z0-9_]+"
,"letter_after_word_must_match": "[^a-zA-Z0-9_]|$"
,"prefix_separator": "\\.|->"
}
,"CASE_SENSITIVE": true
<|fim▁hole|> // [
// 0 : the keyword the user is typing
// 1 : the string inserted in code ("{_@_}" being the new position of the cursor)
// 2 : the needed prefix
// 3 : the text the appear in the suggestion box (if empty, the string to insert will be displayed
['Array', 'Array()', '', 'alert( String message )']
,['alert', 'alert({_@_})', '', 'alert(message)']
,['ascrollTo', 'scrollTo({_@_})', '', 'scrollTo(x,y)']
,['alert', 'alert({_@_},bouh);', '', 'alert(message, message2)']
,['aclose', 'close({_@_})', '', 'alert(message)']
,['aconfirm', 'confirm({_@_})', '', 'alert(message)']
,['aonfocus', 'onfocus', '', '']
,['aonerror', 'onerror', '', 'blabla']
,['aonerror', 'onerror', '', '']
,['window', '', '', '']
,['location', 'location', 'window', '']
,['document', 'document', 'window', '']
,['href', 'href', 'location', '']
]
}
};
// the second identifier must be the same as the one of the syntax coloring definition file
EditArea_autocompletion._load_auto_complete_file( datas, "php" );<|fim▁end|> | ,"MAX_TEXT_LENGTH": 100 // the length of the text being analyzed before the cursor position
,"KEYWORDS": [
|
<|file_name|>ingest_file_browser.js<|end_file_name|><|fim▁begin|>/*
This file is part of Archivematica.
Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
Archivematica is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Archivematica is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
*/
var enableElements = function(cssSelectors) {
for (var index in cssSelectors) {
$(cssSelectors[index]).removeAttr('disabled');
}
};
var disableElements = function(cssSelectors) {
for (var index in cssSelectors) {
$(cssSelectors[index]).attr('disabled', 'disabled');
}
};
function setupBacklogBrowser() {
var backlogBrowserEntryClickHandler = function(event) {
if (typeof event.data != 'undefined') {
var explorer = event.data.self.container
, explorerId = explorer.id
var entryEl = this
, entryId = $(this).attr('id')
, borderCssSpec = '1px solid red';
if (explorer.selectedEntryId == entryId) {
// un-highlight selected entry
$(entryEl).css('border', '');
// remove selected entry
explorer.selectedEntryId = undefined;
} else {
// remove highlighting of existing entries
$('#' + explorerId).find('.backbone-file-explorer-entry').css('border', '');
// highlight selected entry
$(entryEl).css('border', borderCssSpec);
// change selected entry
explorer.selectedEntryId = entryId;
// enable/disable arrange panel action buttons
if (explorer.id == 'originals') {
enableOrDisableOriginalsPanelActionButtons(explorer);
}
// enable/disable arrange panel action buttons
if (explorer.id == 'arrange') {
enableOrDisableArrangePanelActionButtons(explorer);
}
}
}
}
function enableOrDisableOriginalsPanelActionButtons(originals) {
var selectedType = originals.getTypeForCssId(originals.selectedEntryId);
// enable/disable hide button
if (typeof originals.selectedEntryId !== 'undefined') {
enableElements(['#originals_hide_button']);
} else {
disableElements(['#originals_hide_button']);
}
// enable/disable buttons for actions that only work with files
if (typeof originals.selectedEntryId !== 'undefined' && selectedType == 'file') {
enableElements(['#open_originals_file_button']);
} else {
disableElements(['#open_originals_file_button']);
}
}
function enableOrDisableArrangePanelActionButtons(arrange) {
var selectedType = arrange.getTypeForCssId(arrange.selectedEntryId);
// enable/disable delete button
if (typeof arrange.selectedEntryId !== 'undefined') {
enableElements(['#arrange_delete_button']);
} else {
disableElements(['#arrange_delete_button']);
}
// enable/disable create SIP button
if (selectedType == 'directory') {
enableElements(['#arrange_create_sip_button']);
} else {
disableElements(['#arrange_create_sip_button']);
}
// enable/disable metadata button
if (typeof arrange.selectedEntryId !== 'undefined') {
enableElements(['#arrange_edit_metadata_button']);
} else {
disableElements(['#arrange_edit_metadata_button']);
}
// enable/disable create directory button
// (if nothing is selected, it'll create in top level)
if (typeof arrange.selectedEntryId === 'undefined' || selectedType == 'directory') {
enableElements(['#arrange_create_directory_button']);
} else {
disableElements(['#arrange_create_directory_button']);
}
}
function moveHandler(move) {
// don't allow moving anything into the originals directory
if (move.self.id == 'originals') {
move.self.alert('Error', "You can't copy into the originals directory.");
return;
}
if (!move.allowed) {
move.self.alert('Error', "You can't move a directory into its subdirectory.");
return;
}
// move.self is the arrange browser
move.self.busy();
// determine whether a move or copy should be performed
var source,
actionUrlPath = '/filesystem/copy_to_arrange/',
arrangeDir = '/'+Base64.decode(move.self.structure.name);
// do a move if drag and drop occurs within the arrange pane
if (
move.droppedPath.indexOf(arrangeDir) == 0
&& move.containerPath.indexOf(arrangeDir) == 0
) {
// arrange -> arrange
source = move.self.getByPath(move.droppedPath)
} else {
// originals -> arrange
// TODO don't use global if possible
source = originals.getByPath(move.droppedPath)
}
var destination = move.self.getByPath(move.containerPath);
// Add trailing / to directories
if (source.type() == 'directory') {
move.droppedPath+='/'
}
if (typeof destination == 'undefined') {
// Moving into the parent directory arrange/
// Error if source is a file
if (source.type() != 'directory') {
move.self.alert('Error', "Files must go in a SIP, not the parent directory.");
}
move.containerPath = arrangeDir+'/'
} else if (destination.type() == 'directory') {
move.containerPath+='/'
} else if (destination.type() == 'file') {
move.containerPath = move.containerPath.match(/.*\//)[0];
}
$.post(
actionUrlPath,
{
filepath: Base64.encode(move.droppedPath),
destination: Base64.encode(move.containerPath)
},
function(result) {
if (result.error == undefined) {
move.self.idle();
move.self.render();
$('#search_submit').click(); // Fetches from backlog again and renders it
} else {
alert(result.message);
move.self.idle();
}
}
);
}
var originals = new FileExplorer({
el: $('#originals'),
levelTemplate: $('#template-dir-level').html(),
entryTemplate: $('#template-dir-entry').html(),
entryClickHandler: backlogBrowserEntryClickHandler,
nameClickHandler: backlogBrowserEntryClickHandler,
// Data will be populated by backlog.js when a search is conducted
});
originals.structure = {
'name': Base64.encode('originals'),
'parent': '',
'children': []
};
originals.itemsPerPage = 10;
originals.moveHandler = moveHandler;
originals.options.actionHandlers = [];
originals.render();
enableOrDisableOriginalsPanelActionButtons(originals);
var arrange = new FileExplorer({<|fim▁hole|> entryClickHandler: backlogBrowserEntryClickHandler,
nameClickHandler: backlogBrowserEntryClickHandler,
ajaxDeleteUrl: '/filesystem/delete/arrange/',
ajaxChildDataUrl: '/filesystem/contents/arrange/'
});
arrange.structure = {
'name': Base64.encode('arrange'),
'parent': '',
'children': []
};
arrange.itemsPerPage = 10;
arrange.options.actionHandlers = [];
arrange.moveHandler = moveHandler;
arrange.render();
enableOrDisableArrangePanelActionButtons(arrange);
// search results widget
var originals_search_results = new fileBrowser.EntryList({
el: $('#originals_search_results'),
moveHandler: moveHandler,
levelTemplate: $('#template-dir-level').html(),
entryTemplate: $('#template-dir-entry').html(),
itemsPerPage: 20
});
return {
'originals': originals,
'arrange': arrange
};
}
// spawn browsers
var originals_browser,
arrange_browser;
$(document).ready(function() {
// Monkey-patch entry toggling logic to allow auto-search of backlog
(function(originalToggleDirectoryLogic) {
var backlogSearched = false;
fileBrowser.EntryView.prototype.toggleDirectory = function($el) {
var result = originalToggleDirectoryLogic.apply(this, arguments);
// if toggling in the original panels, check to see if backlog entries have been
// added to it yet and, if not, perform search
if (this.container.id == 'originals' &&
this.container.structure.children.length == 0 &&
backlogSearched == false
) {
backlogSearched = true;
$('#search_submit').click();
}
return result;
};
})(fileBrowser.EntryView.prototype.toggleDirectory);
var browsers = setupBacklogBrowser();
originals_browser = browsers['originals'];
arrange_browser = browsers['arrange'];
originals_browser.display_data = function(data) {
// Accept and display data from an external source
// Assumes it is properly formatted already
this.structure.children = data;
// Open top level folder
this.openFolder($('#'+this.id+'__'+Base64.decode(this.structure.name)))
this.render();
}
$('#arrange_edit_metadata_button').click(function() {
// if metadata button isn't disabled, execute
if (typeof $('#arrange_edit_metadata_button').attr('disabled') === 'undefined') {
if (typeof arrange_browser.selectedEntryId === 'undefined') {
arrange_browser.alert('Edit metadata', 'Please select a directory or file to edit.');
return;
}
var path = arrange_browser.getPathForCssId(arrange_browser.selectedEntryId);
directoryMetadataForm.show(path, function(levelOfDescription) {
var entry = arrange_browser.getByPath(path);
entry.set({'levelOfDescription': levelOfDescription});
arrange_browser.render();
});
}
});
$('#arrange_create_directory_button').click(function() {
// if create directory button isn't disabled, execute
if (typeof $('#arrange_create_directory_button').attr('disabled') === 'undefined') {
var selectedType = arrange_browser.getTypeForCssId(arrange_browser.selectedEntryId);
if (selectedType != 'directory' && typeof arrange_browser.selectedEntryId !== 'undefined') {
arrange_browser.alert('Create Directory', "You can't create a directory in a file.");
} else {
var path = prompt('Name of new directory?');
if (path) {
var path_root = arrange_browser.getPathForCssId(arrange_browser.selectedEntryId) || '/' + Base64.decode(arrange_browser.structure.name)
, relative_path = path_root + '/' + path;
$.ajax({
url: '/filesystem/create_directory_within_arrange/',
type: 'POST',
async: false,
cache: false,
data: {
path: Base64.encode(relative_path)
},
success: function(results) {
arrange_browser.dirView.model.addDir({'name': path});
arrange_browser.render();
},
error: function(results) {
originals_browser.alert('Error', results.message);
}
});
}
}
}
});
$('#arrange_delete_button').click(function() {
if (typeof arrange_browser.selectedEntryId === 'undefined') {
arrange_browser.alert('Delete', 'Please select a directory or file to delete.');
return;
}
var path = arrange_browser.getPathForCssId(arrange_browser.selectedEntryId)
, type = arrange_browser.getTypeForCssId(arrange_browser.selectedEntryId);
arrange_browser.confirm(
'Delete',
'Are you sure you want to delete this directory or file?',
function() {
if( type == 'directory') {
path += '/'
}
arrange_browser.deleteEntry(path, type);
arrange_browser.selectedEntryId = undefined;
$('#search_submit').click();
}
);
});
// Hide the selected object
$('#originals_hide_button').click(function () {
// Have to hide all its children too or weird behaviour
$('#' + originals_browser.selectedEntryId).next().hide();
$('#' + originals_browser.selectedEntryId).hide();
});
// create SIP button functionality
$('#arrange_create_sip_button').click(function() {
// if create SIP button isn't disabled, execute
if (typeof $('#arrange_create_sip_button').attr('disabled') === 'undefined') {
if (typeof arrange_browser.selectedEntryId === 'undefined') {
arrange_browser.alert('Create SIP', 'Please select a directory before creating a SIP.');
return
}
var entryDiv = $('#' + arrange_browser.selectedEntryId)
, path = arrange_browser.getPathForCssId(arrange_browser.selectedEntryId)
, entryObject = arrange_browser.getByPath(path)
if (entryObject.type() != 'directory') {
arrange_browser.alert('Create SIP', 'SIPs can only be created from directories, not files.')
return
}
arrange_browser.confirm(
'Create SIP',
'Are you sure you want to create a SIP?',
function() {
$('.activity-indicator').show();
$.post(
'/filesystem/copy_from_arrange/',
{filepath: Base64.encode(path+'/')},
function(result) {
$('.activity-indicator').hide();
var title = (result.error) ? 'Error' : ''
arrange_browser.alert(
title,
result.message
)
if (!result.error) {
$(entryDiv).next().hide()
$(entryDiv).hide()
}
}
)
}
)
}
});
var createOpenHandler = function(buttonCssSelector, browser) {
return function() {
// if view button isn't disabled, execute
if (typeof $(buttonCssSelector).attr('disabled') === 'undefined') {
if (typeof browser.selectedEntryId === 'undefined') {
browser.alert('Error', 'Please specifiy a file to view.');
} else {
var entryDiv = $('#' + browser.selectedEntryId)
, path = browser.getPathForCssId(browser.selectedEntryId)
, type = browser.getTypeForCssId(browser.selectedEntryId);
if (type == 'directory') {
browser.alert('Error', 'Please specifiy a file to view.');
} else {
window.open(
'/filesystem/download_ss/?filepath=' + encodeURIComponent(Base64.encode(path)),
'_blank'
);
}
}
}
};
};
// open originals file button functionality
$('#open_originals_file_button').click(createOpenHandler('#open_originals_file_button', originals_browser));
});<|fim▁end|> | el: $('#arrange'),
levelTemplate: $('#template-dir-level').html(),
entryTemplate: $('#template-dir-entry').html(), |
<|file_name|>json.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
//! Expose portions of the libjansson API to Rust so Rust code can
//! populate a json_t and return it for logging by Suricata.
use std::ffi::CString;
use std::os::raw::c_char;
/// The Rust place holder for the json_t pointer.
pub enum JsonT {}
/// Expose the jansson functions we need.
extern {
fn json_object() -> *mut JsonT;
fn json_object_set_new(js: *mut JsonT, key: *const c_char,
val: *mut JsonT) -> u32;
fn json_array() -> *mut JsonT;
fn json_array_append_new(array: *mut JsonT, value: *mut JsonT);
fn json_string(value: *const c_char) -> *mut JsonT;
fn json_integer(val: u64) -> *mut JsonT;
fn SCJsonDecref(value: *mut JsonT);
fn SCJsonBool(val: bool) -> *mut JsonT;
}
pub struct Json {
pub js: *mut JsonT,
}
impl Json {
pub fn decref(val: Json) {
unsafe{SCJsonDecref(val.js)};
}
pub fn object() -> Json {
return Json{
js: unsafe{json_object()},
}
}
pub fn array() -> Json {
return Json{
js: unsafe{json_array()},
}
}
pub fn string(val: &str) -> Json {
return Json{
js: unsafe{json_string(to_cstring(val.as_bytes()).as_ptr())}
};
}
pub fn string_from_bytes(val: &[u8]) -> Json {
return Json{
js: unsafe{json_string(to_cstring(val).as_ptr())}
};
}
pub fn unwrap(&self) -> *mut JsonT {
return self.js;
}
pub fn set(&self, key: &str, val: Json) {
unsafe {
json_object_set_new(self.js,
CString::new(key).unwrap().as_ptr(),<|fim▁hole|>
pub fn set_string_from_bytes(&self, key: &str, val: &[u8]) {
unsafe {
json_object_set_new(self.js,
CString::new(key).unwrap().as_ptr(),
json_string(to_cstring(val).as_ptr()));
}
}
pub fn set_string(&self, key: &str, val: &str) {
unsafe {
json_object_set_new(self.js,
CString::new(key).unwrap().as_ptr(),
json_string(to_cstring(val.as_bytes()).as_ptr()));
}
}
pub fn set_integer(&self, key: &str, val: u64) {
unsafe {
json_object_set_new(self.js,
CString::new(key).unwrap().as_ptr(),
json_integer(val));
}
}
pub fn set_boolean(&self, key: &str, val: bool) {
unsafe {
json_object_set_new(self.js,
CString::new(key).unwrap().as_ptr(),
SCJsonBool(val));
}
}
pub fn array_append(&self, val: Json) {
unsafe {
json_array_append_new(self.js, val.js);
}
}
pub fn array_append_string(&self, val: &str) {
unsafe {
json_array_append_new(self.js, json_string(to_cstring(val.as_bytes()).as_ptr()));
}
}
}
/// Convert an array of bytes into an ascii printable string replacing
/// non-printable characters (including NULL) with hex value.
///
/// Newer versions of Jansson have a json_stringn that will allow us
/// to create a string out of a byte array of unicode compliant bytes,
/// but until we can use it across all platforms this is probably the
/// best we can do.
fn to_cstring(val: &[u8]) -> CString {
let mut safe = Vec::with_capacity(val.len());
for c in val {
if *c == 0 || *c > 0x7f {
safe.extend(format!("\\x{:02x}", *c).as_bytes());
} else {
safe.push(*c);
}
}
match CString::new(safe) {
Ok(cstr) => cstr,
_ => {
CString::new("<failed to encode string>").unwrap()
}
}
}
#[cfg(test)]
mod tests {
use json::to_cstring;
#[test]
fn test_to_string() {
assert_eq!("A\\x00A",
to_cstring(&[0x41, 0x00, 0x41]).into_string().unwrap());
assert_eq!("", to_cstring(&[]).into_string().unwrap());
assert_eq!("\\x80\\xf1\\xf2\\xf3",
to_cstring(&[0x80, 0xf1, 0xf2, 0xf3]).into_string().unwrap());
}
}<|fim▁end|> | val.js);
}
} |
<|file_name|>git.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
FILES_COMMAND = 'git ls-files'
DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*'
def parse(root, describe_command=DEFAULT_DESCRIBE):
real_root, _, ret = do_ex('git rev-parse --show-toplevel', root)
if ret:
return
trace('real root', real_root)
if abspath(realpath(real_root)) != abspath(realpath(root)):
return
rev_node, _, ret = do_ex('git rev-parse --verify --quiet HEAD', root)
if ret:
return meta('0.0')
rev_node = rev_node[:7]
out, err, ret = do_ex(describe_command, root)
if '-' not in out and '.' not in out:
revs = do('git rev-list HEAD', root)
count = revs.count('\n')
if ret:
out = rev_node
return meta('0.0', distance=count + 1, node=out)
if ret:
return
dirty = out.endswith('-dirty')
if dirty:
out = out.rsplit('-', 1)[0]
tag, number, node = out.rsplit('-', 2)
number = int(number)
if number:
return meta(tag, distance=number, node=node, dirty=dirty)
else:
return meta(tag, dirty=dirty, node=node)<|fim▁end|> | from .utils import do, do_ex, trace
from .version import meta
from os.path import abspath, realpath |
<|file_name|>TestConceptFactory.java<|end_file_name|><|fim▁begin|>package org.auscope.portal.core.services.responses.vocab;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathException;
import org.auscope.portal.core.services.namespaces.VocabNamespaceContext;
import org.auscope.portal.core.test.PortalTestClass;
import org.auscope.portal.core.util.DOMUtil;
import org.auscope.portal.core.util.ResourceUtil;
import org.junit.Assert;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/**
* Unit tests for ConceptFactory
*
* @author Josh Vote
*
*/
public class TestConceptFactory extends PortalTestClass {
private void assertSameConcept(Concept[] expected, Concept[] actual, List<String> traversedUrns) {
String errMsg = String.format("%1$s != %2$s", Arrays.toString(expected), Arrays.toString(actual));<|fim▁hole|> assertSameConcept(expected[i], actual[i], traversedUrns);
}
}
private void assertSameConcept(Concept expected, Concept actual, List<String> traversedUrns) {
String errMsg = String.format("%1$s != %2$s", expected, actual);
Assert.assertEquals(errMsg, expected, actual);
Assert.assertEquals(errMsg, expected.getLabel(), actual.getLabel());
Assert.assertEquals(errMsg, expected.getPreferredLabel(), actual.getPreferredLabel());
Assert.assertEquals(errMsg, expected.isHref(), actual.isHref());
Assert.assertEquals(errMsg, expected.getDefinition(), actual.getDefinition());
//To deal with cycles in the hierarchy
if (traversedUrns.contains(expected.getUrn())) {
return;
} else {
traversedUrns.add(expected.getUrn());
}
assertSameConcept(expected.getBroader(), actual.getBroader(), traversedUrns);
assertSameConcept(expected.getNarrower(), actual.getNarrower(), traversedUrns);
assertSameConcept(expected.getRelated(), actual.getRelated(), traversedUrns);
}
/**
* Runs the factory through a standard SISSVoc response XML
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
* @throws XPathException
*/
@Test
public void testSISSVocRDF() throws IOException, ParserConfigurationException, SAXException, XPathException {
//Build our expectation
Concept concept1 = new Concept("urn:concept:1");
Concept concept2 = new Concept("urn:concept:2");
Concept concept3 = new Concept("urn:concept:3");
Concept concept4 = new Concept("urn:concept:4");
NamedIndividual ni1 = new NamedIndividual("urn:ni:1");
NamedIndividual ni2 = new NamedIndividual("urn:ni:2");
NamedIndividual ni3 = new NamedIndividual("urn:ni:3");
concept1.setNarrower(new Concept[] {concept2, concept3, ni2});
concept1.setLabel("LabelConcept1");
concept1.setPreferredLabel("PrefLabelConcept1");
concept2.setBroader(new Concept[] {concept1});
concept2.setRelated(new Concept[] {concept3});
concept2.setLabel("LabelConcept2");
concept2.setPreferredLabel("PrefLabelConcept2");
concept2.setDefinition("DefinitionConcept2");
concept3.setBroader(new Concept[] {concept1});
concept3.setRelated(new Concept[] {concept2});
concept3.setNarrower(new Concept[] {ni1});
concept3.setLabel("LabelConcept3");
concept3.setPreferredLabel("PrefLabelConcept3");
concept4.setNarrower(new Concept[] {ni3});
concept4.setLabel("LabelConcept4");
concept4.setPreferredLabel("PrefLabelConcept4");
concept4.setDefinition("DefinitionConcept4");
ni1.setBroader(new Concept[] {concept3});
ni1.setLabel("LabelNamedIndividual1");
ni1.setPreferredLabel("PrefLabelNamedIndividual1");
ni2.setBroader(new Concept[] {concept1});
ni2.setLabel("LabelNamedIndividual2");
ni2.setPreferredLabel("PrefLabelNamedIndividual2");
ni3.setBroader(new Concept[] {concept4});
ni3.setLabel("LabelNamedIndividual3");
ni3.setPreferredLabel("PrefLabelNamedIndividual3");
Concept[] expectation = new Concept[] {concept1, concept4};
//Build our actual list
String responseXml = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/responses/sissvoc/SISSVocResponse.xml");
Document responseDoc = DOMUtil.buildDomFromString(responseXml);
Node rdfNode = (Node) DOMUtil.compileXPathExpr("rdf:RDF", new VocabNamespaceContext()).evaluate(responseDoc,
XPathConstants.NODE);
ConceptFactory cf = new ConceptFactory();
Concept[] actualConcepts = cf.parseFromRDF(rdfNode);
Assert.assertNotNull(actualConcepts);
assertSameConcept(expectation, actualConcepts, new ArrayList<String>());
}
/**
* This is a legacy test for the older vocabularyServiceResponse.xml
*
* It tests our concepts still return EVEN if we don't define top level concepts
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
* @throws XPathException
*/
@Test
public void testGetConcepts() throws IOException, ParserConfigurationException, SAXException, XPathException {
String responseXml = ResourceUtil
.loadResourceAsString("org/auscope/portal/core/test/responses/sissvoc/vocabularyServiceResponse.xml");
Document responseDoc = DOMUtil.buildDomFromString(responseXml);
Node rdfNode = (Node) DOMUtil.compileXPathExpr("rdf:RDF", new VocabNamespaceContext()).evaluate(responseDoc,
XPathConstants.NODE);
ConceptFactory cf = new ConceptFactory();
Concept[] actualConcepts = cf.parseFromRDF(rdfNode);
Assert.assertEquals("There are 27 concepts", 27, actualConcepts.length);
//Must contain: Siltstone - concrete aggregate
boolean found = false;
for (Concept concept : actualConcepts) {
if (concept.getPreferredLabel().equals("Siltstone - concrete aggregate")) {
found = true;
break;
}
}
Assert.assertTrue("Must contain: Siltstone - concrete aggregate", found);
//Must contain: Gneiss - crusher dust
found = false;
for (Concept concept : actualConcepts) {
if (concept.getPreferredLabel().equals("Gneiss - crusher dust")) {
found = true;
break;
}
}
Assert.assertTrue("Must contain: Gneiss - crusher dust", found);
}
}<|fim▁end|> |
Assert.assertArrayEquals(errMsg, expected, actual);
for (int i = 0; i < expected.length; i++) { |
<|file_name|>geant3tasks.C<|end_file_name|><|fim▁begin|>/// \file
/// \ingroup tutorial_legacy
/// This script is a representation using TTasks of the Geant3 simulation program
/// This example uses directly TTask objects.
/// A real implementation would require one class per task derived from TTask.
///
/// \macro_code
///
/// \author Rene Brun
void geant3tasks()
{
TTask *geant3 = new TTask("geant3","Geant3 simulation main program");
gROOT->GetListOfTasks()->Add(geant3);
TTask *uginit = new TTask("uginit","Initialisation manager");
TTask *grun = new TTask("grun","Run manager");
TTask *uglast = new TTask("uglast","Termination manager");
geant3->Add(uginit);
geant3->Add(grun);
geant3->Add(uglast);
TTask *ginit = new TTask("ginit","Geant3 initialisation");
TTask *ugeom = new TTask("ugeom","Geometry initialisation manager");
TTask *gphysi = new TTask("gphysi","Initialise cross-sections and energy loss tables");
TTask *ggclos = new TTask("ggclos","Geometry analyzer and optimizer");
uginit->Add(ginit);
uginit->Add(ugeom);
uginit->Add(gphysi);
uginit->Add(ggclos);
TTask *gtrigi = new TTask("gtrigi","Event initialisation");
TTask *gtrig = new TTask("gtrig","Event manager");
TTask *gtrigc = new TTask("gtrigc","Event cleaner");
grun->Add(gtrigi);
grun->Add(gtrig);
grun->Add(gtrigc);
TTask *glast = new TTask("glast","Geant3 termination");
TTask *igend = new TTask("igend","Graphics package termination");
uglast->Add(glast);
uglast->Add(igend);
TTask *gukine = new TTask("gukine","Event generator manager");
TTask *gutrev = new TTask("gutrev","Event application manager");
TTask *gudigi = new TTask("gudigi","Event digitisation manager");
TTask *guout = new TTask("guout","Event termination manager");
gtrig->Add(gukine);
gtrig->Add(gutrev);
gtrig->Add(gudigi);
gtrig->Add(guout);
TTask *gtreve = new TTask("gtreve","Geant3 event manager");
gutrev->Add(gtreve);
TTask *gltrac = new TTask("gltrac","Initialize tracking parameters");
TTask *gftrac = new TTask("gftrac","select next track segment from stack JTRACK");
TTask *gutrak = new TTask("gutrak","Application track manager");
gtreve->Add(gltrac);
gtreve->Add(gftrac);
gtreve->Add(gutrak);
TTask *gtrack = new TTask("gtrack","Geant3 track manager");
gutrak->Add(gtrack);
TTask *gtgama = new TTask("gtgama","photon propagator");
TTask *gtelec = new TTask("gtelec","electron propagator");
TTask *gtneut = new TTask("gtneut","neutron propagator");
TTask *gthadr = new TTask("gthadr","hadron propagator");
TTask *gtmuon = new TTask("gtmuon","muon propagator");
TTask *gtnino = new TTask("gtnino","geantino propagator");
TTask *gtckov = new TTask("gtckov","Cherenkov light propagator");
TTask *gthion = new TTask("gthion","heavy ion propagator");
TTask *gustep = new TTask("gustep","Application step manager");
TTask *gtmedi = new TTask("gtmedi","Geometry volume finder");
gtrack->Add(gtgama);
gtrack->Add(gtelec);
gtrack->Add(gtneut);
gtrack->Add(gthadr);
gtrack->Add(gtmuon);
gtrack->Add(gtnino);
gtrack->Add(gtckov);
gtrack->Add(gthion);
gtrack->Add(gustep);
gtrack->Add(gtmedi);
TTask *gtnext = new TTask("gtnext","Geometry bounary manager");
TTask *gpairg = new TTask("gpairg","Generate pair production");
TTask *gcomp = new TTask("gcomp","Generate Compton scattering");
TTask *gphot = new TTask("gphot","Generate photo effect");
TTask *grayl = new TTask("grayl","Generate Rayleigh effect");
TTask *gpfis = new TTask("gpfis","Generate photo fission");
gtgama->Add(gtnext);
gtgama->Add(gpairg);
gtgama->Add(gcomp);
gtgama->Add(gphot);
gtgama->Add(grayl);
gtgama->Add(gpfis);
TTask *guswim = new TTask("guswim","magnetic field propagator");
TTask *ggckov = new TTask("ggckov","Generate Cherenkov photons");
TTask *gsync = new TTask("gsync","Generate synchrotron radiation");
TTask *gmults = new TTask("gmults","Apply multiple scattering");
TTask *gbreme = new TTask("gbreme","Generate Bremsstrahlung");
TTask *gdray = new TTask("gdray","Generate delta ray");
TTask *ganni = new TTask("ganni","Generate Positron annihilation");
TTask *gannir = new TTask("gannir","Stopped tracks and annihilation at rest");
gtelec->Add(gtnext);
gtelec->Add(guswim);
gtelec->Add(ggckov);
gtelec->Add(gsync);
gtelec->Add(gmults);
gtelec->Add(gbreme);
gtelec->Add(gdray);
gtelec->Add(ganni);
gtelec->Add(gannir);
TTask *guphad = new TTask("guphad","Hadronic cross-section manager");
TTask *guhadr = new TTask("guhadr","Hadronic cascade manager");
TTask *gdecay = new TTask("gdecay","Particle decay");
gtneut->Add(gtnext);
gtneut->Add(guphad);
gtneut->Add(guhadr);
gtneut->Add(gdecay);
gthadr->Add(gtnext);
gthadr->Add(guphad);
gthadr->Add(guswim);
gthadr->Add(ggckov);
gthadr->Add(gmults);
gthadr->Add(guhadr);
gthadr->Add(gdecay);
gthadr->Add(gdray);
TTask *gbremm = new TTask("gbremm","Generate Bremsstrahlung");
TTask *gpairm = new TTask("gpairm","Generate Pair production");
TTask *gmunu = new TTask("gmunu","Generate Nuclear interaction");
gtmuon->Add(gtnext);
gtmuon->Add(guswim);
gtmuon->Add(ggckov);
gtmuon->Add(gmults);
gtmuon->Add(gbremm);
gtmuon->Add(gpairm);
gtmuon->Add(gdecay);
gtmuon->Add(gdray);<|fim▁hole|>
gtnino->Add(gtnext);
TTask *glisur = new TTask("glisur","Photon is reflected");
gtckov->Add(gtnext);
gtckov->Add(glisur);
gthion->Add(gtnext);
gthion->Add(guswim);
gthion->Add(gmults);
gthion->Add(guhadr);
gthion->Add(gdray);
new TBrowser;
gDebug = 2;
}<|fim▁end|> | gtmuon->Add(gmunu);
gtmuon->Add(gdecay); |
<|file_name|>groups_74.js<|end_file_name|><|fim▁begin|>var searchData=<|fim▁hole|> ['transfer_20commands',['Transfer Commands',['../group___d_a_p__transfer__gr.html',1,'']]]
];<|fim▁end|> | [ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Asciimatics is a package to help people create full-screen text UIs (from interactive forms to
ASCII animations) on any platform. It is licensed under the Apache Software Foundation License 2.0.
"""
__author__ = 'Peter Brittain'
<|fim▁hole|>except ImportError:
# Someone is running straight from the GIT repo - dummy out the version
version = "0.0.0"
__version__ = version<|fim▁end|> | try:
from .version import version |
<|file_name|>test_uri_eval.py<|end_file_name|><|fim▁begin|>"""Functional tests for URIEval Plugin"""
from __future__ import absolute_import
import random
import sys
import unittest
from string import ascii_letters
from string import digits
import tests.util
# Load plugin and report matched RULES and SCORE
PRE_CONFIG = """
loadplugin oa.plugins.uri_eval.URIEvalPlugin
report _SCORE_
report _TESTS_
"""
# Define rules for plugin
CONFIG = """
body CHECK_FOR_HTTP_REDIRECTOR eval:check_for_http_redirector()
body CHECK_HTTPS_IP_MISMATCH eval:check_https_ip_mismatch()
body CHECK_URI_TRUNCATED eval:check_uri_truncated()
"""
class TestFunctionalURIEval(tests.util.TestBase):
"""Class containing functional tests for the URI Plugin"""
mytext = [random.choice(ascii_letters + digits) for _ in range(8182)]
long_text = "".join(mytext)
def test_check_for_http_redirector(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_in_a_label_closed_commas(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com"></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
@unittest.skipIf(sys.version_info.major == 3 and sys.version_info.minor < 3,
'Incompatible with python 3.2.*')
def test_check_for_http_redirector_in_a_label_no_commas(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href=http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_links_combined(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&urlhttps://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_no_http(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url=://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_with_ftp(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url=ftp://log0.wordpress.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_only_http(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_incomplete_link(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url=https://ceva"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_http_redirector_different_links(self):
email = """From: [email protected]
\nhttp://utility.baidu.com/traf/click.php?id=215&url= https://ceva.com"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_http_redirector_middle_of_body(self):
email = """From: [email protected]
\nFYI, this week is Learning Week @LinkedIn, so if you are interested in taking some free courses, hurry up
asfglajds;galsg a;slfa;sl laddg http://utility.baidu.com/traf/click.php?id=215&url=https://ceva.com asdgksal;fjlaskfdghs"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited http://utility.baidu.com/traf/click.php?id=215&url=https://log0.wordpress.com:
<a href="http://45.42.12.12/login/account-unlock">https://www.paypal.com/login/account-unlock</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com=https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label_with_invalid_expression(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://@1.2.3.4=https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_a_label_ip_left(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://log0.wordpress.com/">https://ceva.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_same_address(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://log0.wordpress.com/https://log0.wordpress.com/">
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://google.com=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_with_invalid_expression(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://@1.2.3.4=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_and_redirector_in_link_label_ip_left(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link rel=parent href="http://1.2.3.4=https://log0.wordpress.com/">https://ceva.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_domains(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/">https://www.google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_domains_incomplete_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/"> cevatest https://ceva/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ip_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://google.com/">http://300.58.209.206/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_both_ips(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://5.79.73.204/">http://300.58.209.206/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_incomplete_domain(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://5.79.73.204/">https://ceva/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_left(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ipv6_left_domain_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_left_multiple_labels(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://6.6.6.6/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_ipv6_with_redirector(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://test">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_ipv6_with_redirector_and_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://test">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
<link href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_ipv6_with_false_redirector(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2001:1af8:4700:a02d:2::1">https://1.2.3.4/</a>
<a href="http://2001:1af8:4700:a02d:2::1/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_ipv4_domain_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d/https://2001:1af8:4700:a02d/">https://yahoo.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_no_domain(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_ip(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3/">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_unfinished_ip(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3./">https://</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_inverted_commas_16_ip(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.'2'.3.4/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_inverted_commas_ip_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://'1'.2.3.4</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_inverted_commas_on_all_ip(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://'1.2.3.4'/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_invalid_expression_ip(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://@1.2.3.4/">https://test.com</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_ipv6_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://2001:1af8:4700:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/">https://2001:1af8:4700:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left_with_redirector(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2901:1af8:4711:a02d:2::1">https://2901:1af8:4711:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_same_ipv6_right_and_left_with_redirector_negative(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://2001:1af8:4700:a02d:2::1/https://2001:1af8:4700:a02d:2::1/">https://2901:1af8:4711:a02d:2::1/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_text_between_links_domain_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_text_between_links_ip_right(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_with_multiple_uri(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/"> cevatest https://1.2.3.4/ https://test.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_redirector_with_multiple_redirector(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://1.2.3.4/https://test.com/https://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_redirector_with_multiple_redirector_negative(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://1.2.3.4/https://1.2.3.4/"> cevatest https://1.2.3.4/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_label_not_closed(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_incorrect_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://1.2.3.4/">https://google.com/</link>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_https_ip_mismatch_multiple_labels_redirector_in_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<link href="http://1.2.3.4/https://google.com/">
<a href="http://1.2.3.4/">https://6.6.6.6/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_multiple_labels_match_on_a(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google.com/</a>
<link href="http://1.2.3.4/">https://test.com/
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_on_both(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://google.com/</a>
<link href="http://1.2.3.4/https://test.com/">
<a href="http://6.6.6.6/"></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_HTTPS_IP_MISMATCH', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_https_ip_mismatch_multiple_labels(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://google.com/</a>
<a href="http://1.2.3.4/">https://6.6.6.6/></a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_last(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://google.com/</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_https_ip_mismatch_multiple_labels_match_first(self):
<|fim▁hole|>Content-Type: text/html
\n<html>
<a href="http://1.2.3.4/">https://google.com/</a>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/">https://5.5.5.5/</a>
<a href="http://1.2.3.4/">https://1.2.3.4./</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_uri_truncated_negative(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="https://www.PAYPAL.com/login/account-unlock">https://www.PAYPAL.com/...</a>
</html>"""
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated_superior_limit(self):
mytext1 = [random.choice(ascii_letters + digits) for _ in range(8181)]
long_text1 = "".join(mytext1)
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com">https://test.com</a>
</html>"""
email = email % (long_text1)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_URI_TRUNCATED'])
def test_check_for_uri_truncated_and_redirector_after(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://%s.com/https://ceva.com">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_uri_truncated_redirector_before_and_ip_mismatch(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<a href="http://1.2.3.4/https://%s.com/">https://test.com</a>
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 3, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR','CHECK_HTTPS_IP_MISMATCH'])
def test_check_for_uri_truncated_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com">
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['CHECK_URI_TRUNCATED'])
def test_check_for_uri_truncated_superior_limit_link_label(self):
mytext1 = [random.choice(ascii_letters + digits) for _ in range(8181)]
long_text1 = "".join(mytext1)
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com">
</html>"""
email = email % (long_text1)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_uri_truncated_and_redirector_after_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://%s.com/https://%s.com/https://ceva.com">
</html>"""
email = email % (self.long_text, self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def test_check_for_uri_truncated_redirector_before_link_label(self):
email = """From: [email protected]
Content-Type: text/html
\n<html>
Dear user,
Your account has been limited please follow the instructions on the next link:
<link href="http://1.2.3.4/https://1.2.3.4/https://%s.com/">
</html>"""
email = email % (self.long_text)
self.setup_conf(config=CONFIG, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 2, ['CHECK_URI_TRUNCATED', 'CHECK_FOR_HTTP_REDIRECTOR'])
def suite():
"""Gather all the tests from this package in a test suite."""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(TestFunctionalURIEval, "test"))
return test_suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')<|fim▁end|> | email = """From: [email protected] |
<|file_name|>ser.rs<|end_file_name|><|fim▁begin|>//! The main serializer mux.
//
// This Source Code Form is subject to the terms of the Mozilla Public License,
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at https://mozilla.org/MPL/2.0/.
use std::result;
use byteorder::{ByteOrder, BigEndian, LittleEndian};
use serde::Serialize;
use serde;
use error::Error;
use defs::*;
use seq_serializer::*;
use map_serializer::*;
/// The corepack Serializer. Contains a closure that receives byte buffers as the output is created.
pub struct Serializer<F: FnMut(&[u8]) -> Result<(), Error>> {
output: F,
}
impl<F: FnMut(&[u8]) -> Result<(), Error>> Serializer<F> {
/// Create a new Deserializer given an input function.
pub fn new(output: F) -> Serializer<F> {
Serializer { output: output }
}
fn serialize_signed(&mut self, value: i64) -> Result<(), Error> {
if value >= FIXINT_MIN as i64 && value <= FIXINT_MAX as i64 {
let mut buf = [0; U16_BYTES];
LittleEndian::write_i16(&mut buf, value as i16);
(self.output)(&buf[..1])
} else if value >= i8::min_value() as i64 && value <= i8::max_value() as i64 {
let mut buf = [0; U16_BYTES];
LittleEndian::write_i16(&mut buf, value as i16);
(self.output)(&[INT8, buf[0]])
} else if value >= 0 && value <= u8::max_value() as i64 {
let mut buf = [0; U16_BYTES];
LittleEndian::write_i16(&mut buf, value as i16);
(self.output)(&[UINT8, buf[0]])
} else if value >= i16::min_value() as i64 && value <= i16::max_value() as i64 {
let mut buf = [INT16; U16_BYTES + 1];
BigEndian::write_i16(&mut buf[1..], value as i16);
(self.output)(&buf)
} else if value >= 0 && value <= u16::max_value() as i64 {
let mut buf = [UINT16; U16_BYTES + 1];
BigEndian::write_u16(&mut buf[1..], value as u16);
(self.output)(&buf)
} else if value >= i32::min_value() as i64 && value <= i32::max_value() as i64 {
let mut buf = [INT32; U32_BYTES + 1];
BigEndian::write_i32(&mut buf[1..], value as i32);
(self.output)(&buf)
} else if value >= 0 && value <= u32::max_value() as i64 {
let mut buf = [UINT32; U16_BYTES + 1];
BigEndian::write_u32(&mut buf[1..], value as u32);
(self.output)(&buf)
} else {
let mut buf = [INT64; U64_BYTES + 1];
BigEndian::write_i64(&mut buf[1..], value);
(self.output)(&buf)
}
}
fn serialize_unsigned(&mut self, value: u64) -> Result<(), Error> {
if value <= FIXINT_MAX as u64 {
(self.output)(&[value as u8])
} else if value <= u8::max_value() as u64 {
(self.output)(&[UINT8, value as u8])
} else if value <= u16::max_value() as u64 {
let mut buf = [UINT16; U16_BYTES + 1];
BigEndian::write_u16(&mut buf[1..], value as u16);
(self.output)(&buf)
} else if value <= u32::max_value() as u64 {
let mut buf = [UINT32; U32_BYTES + 1];
BigEndian::write_u32(&mut buf[1..], value as u32);
(self.output)(&buf)
} else {
let mut buf = [UINT64; U64_BYTES + 1];
BigEndian::write_u64(&mut buf[1..], value);
(self.output)(&buf)
}
}
fn serialize_bool(&mut self, value: bool) -> Result<(), Error> {
if value {
(self.output)(&[TRUE])
} else {
(self.output)(&[FALSE])
}
}
fn serialize_f32(&mut self, value: f32) -> Result<(), Error> {
let mut buf = [FLOAT32; U32_BYTES + 1];
BigEndian::write_f32(&mut buf[1..], value);
(self.output)(&buf)
}
fn serialize_f64(&mut self, value: f64) -> Result<(), Error> {
let mut buf = [FLOAT64; U64_BYTES + 1];
BigEndian::write_f64(&mut buf[1..], value);
(self.output)(&buf)
}
fn serialize_bytes(&mut self, value: &[u8]) -> Result<(), Error> {
if value.len() <= MAX_BIN8 {
try!((self.output)(&[BIN8, value.len() as u8]));
} else if value.len() <= MAX_BIN16 {
let mut buf = [BIN16; U16_BYTES + 1];
BigEndian::write_u16(&mut buf[1..], value.len() as u16);
try!((self.output)(&buf));
} else if value.len() <= MAX_BIN32 {
let mut buf = [BIN32; U32_BYTES + 1];
BigEndian::write_u32(&mut buf[1..], value.len() as u32);
try!((self.output)(&buf));
} else {
return Err(Error::TooBig);
}
(self.output)(value)
}
fn serialize_str(&mut self, value: &str) -> Result<(), Error> {
if value.len() <= MAX_FIXSTR {
try!((self.output)(&[value.len() as u8 | FIXSTR_MASK]));
} else if value.len() <= MAX_STR8 {
try!((self.output)(&[STR8, value.len() as u8]));
} else if value.len() <= MAX_STR16 {
let mut buf = [STR16; U16_BYTES + 1];
BigEndian::write_u16(&mut buf[1..], value.len() as u16);
try!((self.output)(&buf));
} else if value.len() <= MAX_STR32 {
let mut buf = [STR32; U32_BYTES + 1];
BigEndian::write_u32(&mut buf[1..], value.len() as u32);
try!((self.output)(&buf));
} else {
return Err(Error::TooBig);
}
(self.output)(value.as_bytes())
}
fn serialize_unit(&mut self) -> Result<(), Error> {
(self.output)(&[NIL])
}
fn serialize_variant(&mut self, variant_index: u32) -> Result<(), Error> {
// Serialize variants as two-tuples with the variant index and its contents.
// Because messagepack is purely right-associative, we don't have to track
// the variant once we get it going.
// start a two element array
(self.output)(&[2u8 | FIXARRAY_MASK])?;
// encode the variant and done
self.serialize_unsigned(variant_index as u64)
}
}
impl<'a, F: 'a + FnMut(&[u8]) -> Result<(), Error>> serde::Serializer for &'a mut Serializer<F> {
type Ok = ();
type Error = Error;
type SerializeSeq = SeqSerializer<'a, F>;
type SerializeTuple = Self::SerializeSeq;
type SerializeTupleStruct = Self::SerializeTuple;
type SerializeTupleVariant = Self::SerializeTuple;
type SerializeMap = MapSerializer<'a, F>;
type SerializeStruct = Self::SerializeMap;
type SerializeStructVariant = Self::SerializeMap;
fn serialize_seq(self, size: Option<usize>) -> result::Result<Self::SerializeSeq, Self::Error> {
let mut seq = SeqSerializer::new(&mut self.output);
seq.hint_size(size)?;
Ok(seq)
}
fn serialize_map(self, size: Option<usize>) -> result::Result<Self::SerializeMap, Self::Error> {
let mut map = MapSerializer::new(&mut self.output);
map.hint_size(size)?;
Ok(map)
}
fn serialize_bool(self, v: bool) -> Result<(), Error> {
Serializer::serialize_bool(self, v)
}
fn serialize_i64(self, value: i64) -> Result<(), Error> {
Serializer::serialize_signed(self, value)
}
fn serialize_u64(self, value: u64) -> Result<(), Error> {
Serializer::serialize_unsigned(self, value)
}
fn serialize_f32(self, value: f32) -> Result<(), Error> {
Serializer::serialize_f32(self, value)
}
fn serialize_f64(self, value: f64) -> Result<(), Error> {
Serializer::serialize_f64(self, value)
}
fn serialize_bytes(self, value: &[u8]) -> Result<(), Error> {
Serializer::serialize_bytes(self, value)
}
fn serialize_str(self, value: &str) -> Result<(), Error> {
Serializer::serialize_str(self, value)
}
fn serialize_unit(self) -> Result<(), Error> {
Serializer::serialize_unit(self)
}
fn serialize_i8(self, value: i8) -> Result<(), Error> {
Serializer::serialize_signed(self, value as i64)
}
fn serialize_i16(self, value: i16) -> Result<(), Error> {
Serializer::serialize_signed(self, value as i64)
}
fn serialize_i32(self, value: i32) -> Result<(), Error> {
Serializer::serialize_signed(self, value as i64)
}
fn serialize_u8(self, value: u8) -> Result<(), Error> {
Serializer::serialize_unsigned(self, value as u64)
}
fn serialize_u16(self, value: u16) -> Result<(), Error> {
Serializer::serialize_unsigned(self, value as u64)
}
fn serialize_u32(self, value: u32) -> Result<(), Error> {
Serializer::serialize_unsigned(self, value as u64)
}
fn serialize_char(self, v: char) -> Result<(), Error> {
let mut buf = [0; 4];
self.serialize_str(v.encode_utf8(&mut buf))
}
fn serialize_unit_struct(self, _: &'static str) -> Result<(), Error> {
self.serialize_unit()
}
fn serialize_unit_variant(self,
_: &'static str,
index: u32,
_: &'static str)
-> Result<(), Error> {
self.serialize_variant(index)?;
self.serialize_unit()
}
fn serialize_newtype_struct<T>(self, _: &'static str, value: &T) -> Result<(), Error>
where T: ?Sized + serde::Serialize
{
// serialize newtypes directly
value.serialize(self)
}
fn serialize_newtype_variant<T>(self,
name: &'static str,
variant_index: u32,
_: &'static str,
value: &T)
-> Result<(), Error>
where T: ?Sized + serde::Serialize
{
self.serialize_variant(variant_index)?;
self.serialize_newtype_struct(name, value)
}
fn serialize_none(self) -> Result<(), Error> {
(false,).serialize(self)
}
fn serialize_some<V>(self, value: &V) -> Result<(), Self::Error>
where V: ?Sized + serde::Serialize
{
(true, value).serialize(self)
}
fn serialize_tuple(self, len: usize) -> result::Result<Self::SerializeTuple, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(self,
_: &'static str,
len: usize)
-> result::Result<Self::SerializeTupleStruct, Self::Error> {
self.serialize_tuple(len)
}
fn serialize_tuple_variant(self,
name: &'static str,
index: u32,
_: &'static str,
len: usize)
-> result::Result<Self::SerializeTupleVariant, Self::Error> {
self.serialize_variant(index)?;
self.serialize_tuple_struct(name, len)
}
fn serialize_struct(self,
_: &'static str,
len: usize)
-> result::Result<Self::SerializeStruct, Self::Error> {
self.serialize_map(Some(len))
}
fn serialize_struct_variant(self,
name: &'static str,
index: u32,
_: &'static str,
len: usize)
-> result::Result<Self::SerializeStructVariant, Self::Error> {
self.serialize_variant(index)?;
self.serialize_struct(name, len)
}
}
#[cfg(test)]
mod test {
use std::collections::BTreeMap;
#[test]
fn positive_fixint_test() {
let v: u8 = 23;
assert_eq!(::to_bytes(v).unwrap(), &[0x17]);
}
#[test]
fn negative_fixint_test() {
let v: i8 = -5;
assert_eq!(::to_bytes(v).unwrap(), &[0xfb]);
}
#[test]
fn uint8_test() {
let v: u8 = 154;
assert_eq!(::to_bytes(v).unwrap(), &[0xcc, 0x9a]);
}
#[test]
fn fixstr_test() {
let s: &str = "Hello World!";
assert_eq!(::to_bytes(s).unwrap(),
&[0xac, 0x48, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x57, 0x6f, 0x72, 0x6c, 0x64, 0x21]);
}
#[test]
fn str8_test() {
let s: &str = "The quick brown fox jumps over the lazy dog";
let mut fixture: Vec<u8> = vec![];
fixture.push(0xd9);<|fim▁hole|> fixture.push(s.len() as u8);
fixture.extend_from_slice(s.as_bytes());
assert_eq!(::to_bytes(s).unwrap(), fixture);
}
#[test]
fn fixarr_test() {
let v: Vec<u8> = vec![5, 8, 20, 231];
assert_eq!(::to_bytes(v).unwrap(),
&[0x94, 0x05, 0x08, 0x14, 0xcc, 0xe7]);
}
#[test]
fn array16_test() {
let v: Vec<isize> = vec![-5, 16, 101, -45, 184, 89, 62, -233, -33, 304, 76, 90, 23, 108,
45, -3, 2];
assert_eq!(::to_bytes(v).unwrap(),
&[0xdc, 0x00, 0x11, 0xfb, 0x10, 0x65, 0xd0, 0xd3, 0xcc, 0xb8, 0x59, 0x3e,
0xd1, 0xff, 0x17, 0xd0, 0xdf, 0xd1, 0x01, 0x30, 0x4c, 0x5a, 0x17, 0x6c,
0x2d, 0xfd, 0x02]);
}
#[test]
fn fixmap_test() {
let mut map: BTreeMap<String, usize> = BTreeMap::new();
map.insert("one".into(), 1);
map.insert("two".into(), 2);
map.insert("three".into(), 3);
assert_eq!(::to_bytes(map).unwrap(),
&[0x83, 0xa3, 0x6f, 0x6e, 0x65, 0x01, 0xa5, 0x74, 0x68, 0x72, 0x65, 0x65,
0x03, 0xa3, 0x74, 0x77, 0x6f, 0x02]);
}
}<|fim▁end|> | |
<|file_name|>test_vxlan.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import socket
import unittest
from framework import VppTestCase, VppTestRunner
from template_bd import BridgeDomain<|fim▁hole|>from scapy.layers.l2 import Ether
from scapy.layers.inet import IP, UDP
from scapy.layers.vxlan import VXLAN
from scapy.utils import atol
class TestVxlan(BridgeDomain, VppTestCase):
""" VXLAN Test Case """
def __init__(self, *args):
BridgeDomain.__init__(self)
VppTestCase.__init__(self, *args)
def encapsulate(self, pkt, vni):
"""
Encapsulate the original payload frame by adding VXLAN header with its
UDP, IP and Ethernet fields
"""
return (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) /
IP(src=self.pg0.remote_ip4, dst=self.pg0.local_ip4) /
UDP(sport=self.dport, dport=self.dport, chksum=0) /
VXLAN(vni=vni, flags=self.flags) /
pkt)
def encap_mcast(self, pkt, src_ip, src_mac, vni):
"""
Encapsulate the original payload frame by adding VXLAN header with its
UDP, IP and Ethernet fields
"""
return (Ether(src=src_mac, dst=self.mcast_mac4) /
IP(src=src_ip, dst=self.mcast_ip4) /
UDP(sport=self.dport, dport=self.dport, chksum=0) /
VXLAN(vni=vni, flags=self.flags) /
pkt)
def decapsulate(self, pkt):
"""
Decapsulate the original payload frame by removing VXLAN header
"""
# check if is set I flag
self.assertEqual(pkt[VXLAN].flags, int('0x8', 16))
return pkt[VXLAN].payload
# Method for checking VXLAN encapsulation.
#
def check_encapsulation(self, pkt, vni, local_only=False):
# TODO: add error messages
# Verify source MAC is VPP_MAC and destination MAC is MY_MAC resolved
# by VPP using ARP.
self.assertEqual(pkt[Ether].src, self.pg0.local_mac)
if not local_only:
self.assertEqual(pkt[Ether].dst, self.pg0.remote_mac)
# Verify VXLAN tunnel source IP is VPP_IP and destination IP is MY_IP.
self.assertEqual(pkt[IP].src, self.pg0.local_ip4)
if not local_only:
self.assertEqual(pkt[IP].dst, self.pg0.remote_ip4)
# Verify UDP destination port is VXLAN 4789, source UDP port could be
# arbitrary.
self.assertEqual(pkt[UDP].dport, type(self).dport)
# TODO: checksum check
# Verify VNI
self.assertEqual(pkt[VXLAN].vni, vni)
@staticmethod
def ip4_range(ip4n, s=10, e=20):
base = str(bytearray(ip4n)[:3])
return ((base + ip) for ip in str(bytearray(range(s, e))))
@classmethod
def create_vxlan_flood_test_bd(cls, vni):
# Create 10 ucast vxlan tunnels under bd
ip_range_start = 10
ip_range_end = 20
next_hop_address = cls.pg0.remote_ip4n
for dest_addr in cls.ip4_range(next_hop_address, ip_range_start,
ip_range_end):
# add host route so dest_addr will not be resolved
cls.vapi.ip_add_del_route(dest_addr, 32, next_hop_address)
r = cls.vapi.vxlan_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=dest_addr,
vni=vni)
cls.vapi.sw_interface_set_l2_bridge(r.sw_if_index, bd_id=vni)
@classmethod
def add_del_mcast_load(cls, is_add):
ip_range_start = 10
ip_range_end = 210
for dest_addr in cls.ip4_range(cls.mcast_ip4n, ip_range_start,
ip_range_end):
vni = bytearray(dest_addr)[3]
cls.vapi.vxlan_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=dest_addr,
mcast_sw_if_index=1,
vni=vni,
is_add=is_add)
@classmethod
def add_mcast_load(cls):
cls.add_del_mcast_load(is_add=1)
@classmethod
def del_mcast_load(cls):
cls.add_del_mcast_load(is_add=0)
# Class method to start the VXLAN test case.
# Overrides setUpClass method in VppTestCase class.
# Python try..except statement is used to ensure that the tear down of
# the class will be executed even if exception is raised.
# @param cls The class pointer.
@classmethod
def setUpClass(cls):
super(TestVxlan, cls).setUpClass()
try:
cls.dport = 4789
cls.flags = 0x8
# Create 2 pg interfaces.
cls.create_pg_interfaces(range(4))
for pg in cls.pg_interfaces:
pg.admin_up()
# Configure IPv4 addresses on VPP pg0.
cls.pg0.config_ip4()
# Resolve MAC address for VPP's IP address on pg0.
cls.pg0.resolve_arp()
# Our Multicast address
cls.mcast_ip4 = '239.1.1.1'
cls.mcast_ip4n = socket.inet_pton(socket.AF_INET, cls.mcast_ip4)
iplong = atol(cls.mcast_ip4)
cls.mcast_mac4 = "01:00:5e:%02x:%02x:%02x" % (
(iplong >> 16) & 0x7F, (iplong >> 8) & 0xFF, iplong & 0xFF)
# Create VXLAN VTEP on VPP pg0, and put vxlan_tunnel0 and pg1
# into BD.
cls.single_tunnel_bd = 1
r = cls.vapi.vxlan_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=cls.pg0.remote_ip4n,
vni=cls.single_tunnel_bd)
cls.vapi.sw_interface_set_l2_bridge(r.sw_if_index,
bd_id=cls.single_tunnel_bd)
cls.vapi.sw_interface_set_l2_bridge(cls.pg1.sw_if_index,
bd_id=cls.single_tunnel_bd)
# Setup vni 2 to test multicast flooding
cls.mcast_flood_bd = 2
cls.create_vxlan_flood_test_bd(cls.mcast_flood_bd)
r = cls.vapi.vxlan_add_del_tunnel(
src_addr=cls.pg0.local_ip4n,
dst_addr=cls.mcast_ip4n,
mcast_sw_if_index=1,
vni=cls.mcast_flood_bd)
cls.vapi.sw_interface_set_l2_bridge(r.sw_if_index,
bd_id=cls.mcast_flood_bd)
cls.vapi.sw_interface_set_l2_bridge(cls.pg2.sw_if_index,
bd_id=cls.mcast_flood_bd)
# Add and delete mcast tunnels to check stability
cls.add_mcast_load()
cls.del_mcast_load()
# Setup vni 3 to test unicast flooding
cls.ucast_flood_bd = 3
cls.create_vxlan_flood_test_bd(cls.ucast_flood_bd)
cls.vapi.sw_interface_set_l2_bridge(cls.pg3.sw_if_index,
bd_id=cls.ucast_flood_bd)
except Exception:
super(TestVxlan, cls).tearDownClass()
raise
# Method to define VPP actions before tear down of the test case.
# Overrides tearDown method in VppTestCase class.
# @param self The object pointer.
def tearDown(self):
super(TestVxlan, self).tearDown()
if not self.vpp_dead:
self.logger.info(self.vapi.cli("show bridge-domain 1 detail"))
self.logger.info(self.vapi.cli("show bridge-domain 2 detail"))
self.logger.info(self.vapi.cli("show bridge-domain 3 detail"))
self.logger.info(self.vapi.cli("show vxlan tunnel"))
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)<|fim▁end|> | |
<|file_name|>cache.go<|end_file_name|><|fim▁begin|>package gocdn
import (
"io/ioutil"
"os"
"path"
"log"
)
func cacheFile(fileName string, data []byte) (err error){
fileName = path.Clean(fileName)
dir := path.Dir(fileName)
if err = os.MkdirAll(dir, os.FileMode(0775)); err != nil {<|fim▁hole|>
if err = ioutil.WriteFile(fileName, data, 0644); err != nil {
log.Printf("Could not write file: %s", dir)
return
}
return
}<|fim▁end|> | log.Printf("Could not create directory: %s", dir)
return
} |
<|file_name|>matege2015p.js<|end_file_name|><|fim▁begin|>if(!window.nabor)<|fim▁hole|> adres:'../zdn/matege2015p/',
name:'matege2015p',
prefix:'',
});<|fim▁end|> | window.nabor={};
window.nabor.importFrom({
nZad:14, |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub use self::manager::ContextManager;
pub use self::manager::ViewContext;
pub use self::manager::ViewContextMut;
mod manager;
//mod proxies;
//use mopa;
use std::collections::HashMap;
use store::StoreValueStatic;
use {
Store,
StoreValue,
AttributeGetResult,
AttributeMutResult,
AttributeSetResult
};
use lookup::PropertyAccessor;
/// This trait is used to provide a possible interface for Context
/// objects managed by the `ContextManager`. It is implemented by
/// the `AmbientModel` to give an example of such a `Context`.
/// **Note:**
/// If the "Context" type for the `ContextManager` implement this trait,
/// then those function can be used also on the `ContextManager`.
pub trait Context {
/// Register a single value at the key
fn register_value<V: Into<StoreValueStatic>>(&mut self, key: String, value: V);
/// Register a store:
fn register_store<S: Store>(&mut self, key: String, store: S);
/// Return a previously registered store:
/// This can be useful when you want to modify an existing store but without
/// retaining a reference to it.
fn get_store_mut(&mut self, key: String) -> Option<&mut Box<Store + 'static>>;
}
/// Default version of the `ContextManager` where the template
/// parameter is set to `AmbientModel`.
pub type DefaultContextManager = ContextManager<AmbientModel, AmbientModel>;
/// An `AmbientModel` instance is a root object that is used
/// by the `DefaultContextManager`.
/// Internally it use a HashMap for single `StoreValue`s
/// and an other HashMap for boxed type implementing the trait `Store`.
#[derive(Default)]
pub struct AmbientModel {
values: HashMap<String, StoreValueStatic>,
stores: HashMap<String, Box<Store>>,
}
/// Minimal contraint to be used in a `ContextManager`:
/// implement the trait `Store`.
impl Store for AmbientModel {
fn get_attribute<'a>(&'a self, k: PropertyAccessor) -> AttributeGetResult<'a> {
let value = self.stores.get_attribute(k.clone());
if value.is_found() {
value
} else {
self.values.get_attribute(k)
}
}
fn get_attribute_mut<'a>(&'a mut self, k: PropertyAccessor) -> AttributeMutResult<'a> {
let value = self.stores.get_attribute_mut(k.clone());
if value.is_found() {
value
} else {
self.values.get_attribute_mut(k)
}
}
fn set_attribute<'a>(&mut self, k: PropertyAccessor, value: StoreValue<'a>) -> AttributeSetResult<'a> {
match self.stores.set_attribute(k.clone(), value) {
AttributeSetResult::NoSuchProperty(v) => {
self.values.set_attribute(k, v)
}
_ => AttributeSetResult::Stored
}
}
}
// Context implementation
impl Context for AmbientModel {<|fim▁hole|>
fn register_value<V: Into<StoreValueStatic>>(&mut self, key: String, value: V) {
self.values.insert(key, value.into());
}
fn register_store<S: Store + 'static>(&mut self, key: String, store: S) {
self.stores.insert(key, Box::new(store) as Box<Store>);
}
fn get_store_mut(&mut self, key: String) -> Option<&mut Box<Store + 'static>> {
self.stores.get_mut(&key)
}
}<|fim▁end|> | |
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the<|fim▁hole|>// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use std; // To refer to std::result::Result.
use rusqlite;
use mentat_core::{
ValueTypeSet,
};
use mentat_db;
use mentat_query::{
PlainSymbol,
};
use mentat_query_pull;
use aggregates::{
SimpleAggregationOp,
};
#[macro_export]
macro_rules! bail {
($e:expr) => (
return Err($e.into());
)
}
pub type Result<T> = std::result::Result<T, ProjectorError>;
#[derive(Debug, Fail)]
pub enum ProjectorError {
/// We're just not done yet. Message that the feature is recognized but not yet
/// implemented.
#[fail(display = "not yet implemented: {}", _0)]
NotYetImplemented(String),
#[fail(display = "no possible types for value provided to {:?}", _0)]
CannotProjectImpossibleBinding(SimpleAggregationOp),
#[fail(display = "cannot apply projection operation {:?} to types {:?}", _0, _1)]
CannotApplyAggregateOperationToTypes(SimpleAggregationOp, ValueTypeSet),
#[fail(display = "invalid projection: {}", _0)]
InvalidProjection(String),
#[fail(display = "cannot project unbound variable {:?}", _0)]
UnboundVariable(PlainSymbol),
#[fail(display = "cannot find type for variable {:?}", _0)]
NoTypeAvailableForVariable(PlainSymbol),
#[fail(display = "expected {}, got {}", _0, _1)]
UnexpectedResultsType(&'static str, &'static str),
#[fail(display = "expected tuple of length {}, got tuple of length {}", _0, _1)]
UnexpectedResultsTupleLength(usize, usize),
#[fail(display = "min/max expressions: {} (max 1), corresponding: {}", _0, _1)]
AmbiguousAggregates(usize, usize),
// It would be better to capture the underlying `rusqlite::Error`, but that type doesn't
// implement many useful traits, including `Clone`, `Eq`, and `PartialEq`.
#[fail(display = "SQL error: {}", _0)]
RusqliteError(String),
#[fail(display = "{}", _0)]
DbError(#[cause] mentat_db::DbError),
#[fail(display = "{}", _0)]
PullError(#[cause] mentat_query_pull::PullError),
}
impl From<rusqlite::Error> for ProjectorError {
fn from(error: rusqlite::Error) -> ProjectorError {
ProjectorError::RusqliteError(error.to_string())
}
}
impl From<mentat_db::DbError> for ProjectorError {
fn from(error: mentat_db::DbError) -> ProjectorError {
ProjectorError::DbError(error)
}
}
impl From<mentat_query_pull::PullError> for ProjectorError {
fn from(error: mentat_query_pull::PullError) -> ProjectorError {
ProjectorError::PullError(error)
}
}<|fim▁end|> | // License at http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from itertools import product
<|fim▁hole|>import requests
import shutil
def api_list(apiargs):
"""Google Street View Image API results.
Constructs a list of `Google Street View Image API queries <https://developers.google.com/maps/documentation/streetview/>`_
from a dictionary.
Args:
apiargs (listof dict):
Dict containing `street view URL parameters <https://developers.google.com/maps/documentation/streetview/intro>`_.
Each parameter can have multiple values if separated by ``;``.
Returns:
A ``listof dict`` containing single query requests per dictionary for Google Street View Image API.
Examples:
::
# Import google_streetview for the api and helper module
import google_streetview.api
import google_streetview.helpers
# Create a dictionary with multiple parameters separated by ;
apiargs = {
'location': '46.414382,10.013988;40.720032,-73.988354',
'size': '640x300;640x640',
'heading': '0;90;180;270',
'fov': '0;90;120',
'pitch': '-90;0;90'
}
# Get a list of all possible queries from multiple parameters
api_list = google_streetview.helpers.api_list(apiargs)
# Create a results object for all possible queries
results = google_streetview.api.results(api_list)
# Preview results
results.preview()
# Download images to directory 'downloads'
results.download_links('downloads')
# Save metadata
results.save_metadata('metadata.json')
"""
# (api_query) Query combinations for each parameter
api_queries = {}
keywords = [k for k in apiargs]
for k in keywords:
if k in apiargs:
api_queries[k] = apiargs[k].split(';')
apiargs.pop(k, None)
# (api_list) Build list of api requests based on query combinations
out = []
keys = [k for k in api_queries]
queries = [api_queries[k] for k in api_queries]
combinations = product(*queries)
for combo in combinations:
api_copy = apiargs.copy()
for k, parameter in zip(keys, combo):
api_copy[k] = parameter
out.append(api_copy)
return(out)
def download(url, file_path):
r = requests.get(url, stream=True)
if r.status_code == 200: # if request is successful
with open(file_path, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)<|fim▁end|> | |
<|file_name|>test_catchup.py<|end_file_name|><|fim▁begin|>import asyncio
import json
import uuid
import pytest
from photonpump import exceptions as exn
from photonpump import messages as msg
from photonpump import messages_pb2 as proto
from photonpump.conversations import CatchupSubscription
from ..fakes import TeeQueue
async def anext(it, count=1):
if count == 1:
return await asyncio.wait_for(it.anext(), 1)
result = []
while len(result) < count:
result.append(await asyncio.wait_for(it.anext(), 1))
return result
async def reply_to(convo, message, output):
command, payload = message
await convo.respond_to(msg.InboundMessage(uuid.uuid4(), command, payload), output)
def read_as(cls, message):
body = cls()
body.ParseFromString(message.payload)
return body
async def drop_subscription(
convo, output, reason=msg.SubscriptionDropReason.Unsubscribed
):
response = proto.SubscriptionDropped()
response.reason = reason
await convo.respond_to(
msg.InboundMessage(
uuid.uuid4(),
msg.TcpCommand.SubscriptionDropped,
response.SerializeToString(),
),
output,
)
async def confirm_subscription(convo, output_queue=None, event_number=1, commit_pos=1):
response = proto.SubscriptionConfirmation()
response.last_event_number = event_number
response.last_commit_position = commit_pos
await convo.respond_to(
msg.InboundMessage(
uuid.uuid4(),
msg.TcpCommand.SubscriptionConfirmation,
response.SerializeToString(),
),
output_queue,
)
return await convo.result
def event_appeared(
commit_position=1,
prepare_position=1,
event_number=10,
event_id=None,
type="some-event",
data=None,
stream="stream-123",
):
response = proto.StreamEventAppeared()
response.event.event.event_stream_id = stream
response.event.event.event_number = event_number
response.event.event.event_id = (event_id or uuid.uuid4()).bytes_le
response.event.event.event_type = type
response.event.event.data_content_type = msg.ContentType.Json
response.event.event.metadata_content_type = msg.ContentType.Binary
response.event.commit_position = commit_position
response.event.prepare_position = prepare_position
response.event.event.data = json.dumps(data).encode("UTF-8") if data else bytes()
return (msg.TcpCommand.StreamEventAppeared, response.SerializeToString())
class ReadStreamEventsResponseBuilder:
def __init__(self, stream=None):
self.result = msg.ReadStreamResult.Success
self.next_event_number = 10
self.last_event_number = 9
self.is_end_of_stream = False
self.last_commit_position = 8
self.stream = stream or "some-stream"
self.events = []
def at_end_of_stream(self):
self.is_end_of_stream = True
return self
def with_next_event_number(self, num):
self.next_event_number = num
return self
def with_last_position(self, event_number=9, commit_position=8):
self.last_event_number = event_number
self.last_commit_position = commit_position
return self
def with_event(
self,
event_number=10,
event_id=None,
type="some-event",
data=None,
link_event_number=None,
):
event = proto.ResolvedIndexedEvent()
event.event.event_stream_id = self.stream
event.event.event_number = event_number
event.event.event_id = (event_id or uuid.uuid4()).bytes_le
event.event.event_type = type
event.event.data_content_type = msg.ContentType.Json
event.event.metadata_content_type = msg.ContentType.Binary
event.event.data = json.dumps(data).encode("UTF-8") if data else bytes()
if link_event_number is not None:
event.link.event_number = link_event_number
event.link.event_stream_id = "some-stream-name"
event.link.event_id = uuid.uuid4().bytes_le
event.link.event_type = "$>"
event.link.data_content_type = msg.ContentType.Json
event.link.metadata_content_type = msg.ContentType.Binary
event.link.data = f"{event_number}@{self.stream}".encode("UTF-8")
self.events.append(event)
return self
def build(self):
response = proto.ReadStreamEventsCompleted()
response.result = self.result
response.next_event_number = self.next_event_number
response.last_event_number = self.last_event_number
response.is_end_of_stream = self.is_end_of_stream
response.last_commit_position = self.last_commit_position
response.events.extend(self.events)
return (
msg.TcpCommand.ReadStreamEventsForwardCompleted,
response.SerializeToString(),
)
EMPTY_STREAM_PAGE = (
ReadStreamEventsResponseBuilder(stream="stream-123")
.with_next_event_number(0)
.at_end_of_stream()
.build()
)
@pytest.mark.asyncio
async def test_start_read_phase():
"""
A "catchup" subscription starts by iterating the events in the stream until
it reaches the most recent event.
This is the "Read" phase.
"""
output = TeeQueue()
conversation_id = uuid.uuid4()
convo = CatchupSubscription(
"my-stream", start_from=0, conversation_id=conversation_id
)
await convo.start(output)
[request] = output.items
body = proto.ReadStreamEvents()
body.ParseFromString(request.payload)
assert request.command is msg.TcpCommand.ReadStreamEventsForward
assert body.event_stream_id == "my-stream"
assert body.from_event_number == 0
assert body.resolve_link_tos is True
assert body.require_master is False
assert body.max_count == 100
@pytest.mark.asyncio
async def test_end_of_stream():
"""
During the Read phase, we yield the events to the subscription so that the
user is unaware of the chicanery in the background.
When we reach the end of the stream, we should send a subscribe message to
start the volatile subscription.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
event_1_id = uuid.uuid4()
event_2_id = uuid.uuid4()
response = (
ReadStreamEventsResponseBuilder(stream="stream-123")
.at_end_of_stream()
.with_event(event_id=event_1_id, event_number=32)
.with_event(event_id=event_2_id, event_number=33)
).build()
await reply_to(convo, response, output)
subscription = await convo.result
event_1 = await anext(subscription.events)
event_2 = await anext(subscription.events)
assert event_1.stream == "stream-123"
assert event_1.id == event_1_id
assert event_1.event_number == 32
assert event_2.stream == "stream-123"
assert event_2.id == event_2_id
assert event_2.event_number == 33
@pytest.mark.asyncio
async def test_paging():
"""
During the read phase, we expect to page through multiple batches of
events. In this scenario we have two batches, each of two events.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await output.get()
event_1_id = uuid.uuid4()
event_2_id = uuid.uuid4()
event_3_id = uuid.uuid4()
event_4_id = uuid.uuid4()
first_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_1_id, event_number=32)
.with_event(event_id=event_2_id, event_number=33)
.with_next_event_number(34)
).build()
second_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_3_id, event_number=34)
.with_event(event_id=event_4_id, event_number=35)
).build()
await reply_to(convo, first_response, output)
subscription = await convo.result
event_1 = await anext(subscription.events)
event_2 = await anext(subscription.events)
assert event_1.id == event_1_id
assert event_2.id == event_2_id
reply = await output.get()
body = proto.ReadStreamEvents()
body.ParseFromString(reply.payload)
assert body.from_event_number == 34
await reply_to(convo, second_response, output)
event_3 = await anext(subscription.events)
event_4 = await anext(subscription.events)
assert event_3.id == event_3_id
assert event_4.id == event_4_id
@pytest.mark.asyncio
async def test_subscribes_at_end_of_stream():
"""
When we have read all the events in the stream, we should send a
request to subscribe for new events.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await output.get()
await reply_to(
convo, ReadStreamEventsResponseBuilder().at_end_of_stream().build(), output
)
reply = await output.get()
payload = proto.SubscribeToStream()
payload.ParseFromString(reply.payload)
assert reply.command == msg.TcpCommand.SubscribeToStream
assert payload.event_stream_id == "my-stream"
assert payload.resolve_link_tos is True
@pytest.mark.asyncio
async def test_should_perform_a_catchup_when_subscription_is_confirmed():
"""
When we have read all the events in the stream, we should send a
request to subscribe for new events.
We should start reading catchup events from the `next_event_number` returned
by the historical event read.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await reply_to(
convo,
ReadStreamEventsResponseBuilder()
.with_next_event_number(17)
.at_end_of_stream()
.build(),
output,
)
await confirm_subscription(convo, output, event_number=42, commit_pos=40)
[read_historial, subscribe, catch_up] = await output.next_event(3)
assert read_historial.command == msg.TcpCommand.ReadStreamEventsForward
assert subscribe.command == msg.TcpCommand.SubscribeToStream
assert catch_up.command == msg.TcpCommand.ReadStreamEventsForward
payload = proto.ReadStreamEvents()
payload.ParseFromString(catch_up.payload)
assert payload.event_stream_id == "my-stream"
assert payload.from_event_number == 17
@pytest.mark.asyncio
async def test_should_return_catchup_events_before_subscribed_events():
"""
It's possible that the following sequence of events occurs:
* The client reads the last batch of events from a stream containing
50 events.
* The client sends SubscribeToStream
* Event 51 is written to the stream
* The server creates a subscription starting at event 51 and
responds with SubscriptionConfirmed
* Event 52 is written to the stream
* The client receives event 52.
To solve this problem, the client needs to perform an additional read
to fetch any missing events created between the last batch and the
subscription confirmation.
--------------
In this scenario, we read a single event (1) from the end of the stream
and expect to create a subscription.
We receive event 4 immediately on the subscription. We expect that the
client requests missing events.
We receive two pages, of one event each: 3, and 4, and then drop the subscription.
Lastly, we expect that the events are yielded in the correct order
despite being received out of order and that we have no duplicates.
"""
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await output.get()
last_page = (
ReadStreamEventsResponseBuilder()
.at_end_of_stream()
.with_event(event_number=1, type="a")
.build()
)
subscribed_event = event_appeared(event_number=4, type="d")
first_catchup = (
ReadStreamEventsResponseBuilder().with_event(event_number=2, type="b").build()
)
second_catchup = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=3, type="c")
.with_event(event_number=4, type="d")
).build()
await reply_to(convo, last_page, output)
assert (await output.get()).command == msg.TcpCommand.SubscribeToStream
await confirm_subscription(convo, output, event_number=3)
await reply_to(convo, subscribed_event, output)
assert (await output.get()).command == msg.TcpCommand.ReadStreamEventsForward
await reply_to(convo, first_catchup, output)
assert (await output.get()).command == msg.TcpCommand.ReadStreamEventsForward
await reply_to(convo, second_catchup, output)
await drop_subscription(convo, output)
events = []
subscription = await convo.result
async for e in subscription.events:
events.append(e)
assert len(events) == 4
[a, b, c, d] = events
assert a.event_number == 1
assert b.event_number == 2
assert c.event_number == 3
assert d.event_number == 4
@pytest.mark.asyncio
async def test_subscription_dropped_mid_stream():
convo = CatchupSubscription("my-stream")
output = TeeQueue()
empty_page = (
ReadStreamEventsResponseBuilder(stream="stream-123").at_end_of_stream().build()
)
await reply_to(convo, empty_page, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, empty_page, output)
subscription = convo.result.result()
await reply_to(convo, event_appeared(), output)
await drop_subscription(convo, output)
events = [e async for e in subscription.events]
assert len(events) == 1
@pytest.mark.asyncio
async def test_subscription_failure_mid_stream():
output = TeeQueue()
convo = CatchupSubscription("my-stream")
event_id = uuid.uuid4()
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
subscription = convo.result.result()
await reply_to(convo, event_appeared(event_id=event_id), output)
await drop_subscription(
convo, output, msg.SubscriptionDropReason.SubscriberMaxCountReached
)
with pytest.raises(exn.SubscriptionFailed):
event = await anext(subscription.events)
assert event.id == event_id
await anext(subscription.events)
@pytest.mark.asyncio<|fim▁hole|> correlation_id = uuid.uuid4()
output = TeeQueue()
convo = CatchupSubscription("my-stream", conversation_id=correlation_id)
await convo.start(output)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
sub = convo.result.result()
await sub.unsubscribe()
[read_historical, subscribe, catch_up, unsubscribe] = output.items
assert unsubscribe.command == msg.TcpCommand.UnsubscribeFromStream
assert unsubscribe.conversation_id == correlation_id
@pytest.mark.asyncio
async def test_subscribe_with_context_manager():
conversation_id = uuid.uuid4()
output = TeeQueue()
convo = CatchupSubscription("my-stream", conversation_id=conversation_id)
await convo.start(output)
# Create a subscription with three events in it
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
for i in range(0, 3):
await reply_to(
convo, event_appeared(event_id=uuid.uuid4(), event_number=i), output
)
async with (await convo.result) as subscription:
events_seen = 0
async for _ in subscription.events:
events_seen += 1
if events_seen == 3:
break
# Having exited the context manager it should send
# an unsubscribe message
[read_historical, subscribe, catch_up, unsubscribe] = output.items
assert unsubscribe.command == msg.TcpCommand.UnsubscribeFromStream
assert unsubscribe.conversation_id == conversation_id
@pytest.mark.asyncio
async def test_restart_from_historical():
"""
If we ask the conversation to start again while we're reading historical events
we should re-send the most recent page request.
In this scenario, we start reading the stream at event 10, we receive a
page with 2 events, we request the next page starting at 12.
When we restart the conversation, we should again request the page starting at 12.
"""
conversation_id = uuid.uuid4()
output = TeeQueue()
convo = CatchupSubscription(
"my-stream", start_from=10, conversation_id=conversation_id
)
await convo.start(output)
await reply_to(
convo,
(
ReadStreamEventsResponseBuilder(stream="stream-123")
.with_event(event_number=10)
.with_event(event_number=11)
.with_next_event_number(12)
.build()
),
output,
)
await convo.start(output)
[first_page, second_page, second_page_again] = [
read_as(proto.ReadStreamEvents, m) for m in output.items
]
assert second_page.from_event_number == second_page_again.from_event_number
@pytest.mark.asyncio
async def test_restart_from_catchup():
"""
If the connection drops during the catchup phase, we need to unsubscribe
from the stream and then go back to reading historical events starting from
the last page.
=> Request historical events
<= Receive 1 event, next_event = 1
=> Subscribe
<= Confirmed
=> Catch up from 1
** Restart **
=> Unsubscribe
<= Confirmed
=> Read historical from 1
<= Empty page
=> Subscribe
"""
conversation_id = uuid.uuid4()
output = TeeQueue()
convo = CatchupSubscription("my-stream", conversation_id=conversation_id)
await convo.start(output)
await output.get()
page_one = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=1)
.with_next_event_number(1)
.at_end_of_stream()
.build()
)
await reply_to(convo, page_one, output)
await output.get()
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
first_catch_up = read_as(proto.ReadStreamEvents, await output.get())
await reply_to(convo, page_one, output)
# Restart
await convo.start(output)
unsubscribe = await output.get()
assert first_catch_up.from_event_number == 1
assert unsubscribe.command == msg.TcpCommand.UnsubscribeFromStream
await drop_subscription(convo, output)
second_catchup = read_as(proto.ReadStreamEvents, await output.get())
assert second_catchup.from_event_number == 1
@pytest.mark.asyncio
async def test_historical_duplicates():
"""
It's possible that we receive the reply to a ReadStreamEvents request after we've
resent the request. This will result in our receiving a duplicate page.
In this instance, we should not raise duplicate events.
=> Request historical
RESTART
=> Request historical
<= 2 events
<= 3 events
Should only see the 3 unique events
"""
two_events = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=1)
.with_event(event_number=2)
.with_next_event_number(2)
.at_end_of_stream()
.build()
)
three_events = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=1)
.with_event(event_number=2)
.with_event(event_number=3)
.with_next_event_number(3)
.at_end_of_stream()
.build()
)
output = TeeQueue()
convo = CatchupSubscription("my-stream")
await convo.start(output)
await convo.start(output)
await reply_to(convo, two_events, output)
await reply_to(convo, three_events, output)
[event_1, event_2, event_3] = await anext(convo.subscription.events, 3)
assert event_1.event_number == 1
assert event_2.event_number == 2
assert event_3.event_number == 3
@pytest.mark.asyncio
async def test_subscription_duplicates():
"""
If we receive subscription events while catching up, we buffer them internally.
If we restart the conversation at that point we need to make sure we clear our buffer
and do not raise duplicate events.
=> Request historical
<= Empty
=> Subscribe to stream
<= Confirmed
=> Request catchup
<= Subscribed event 2 appeared
<= Event 1, not end of stream
RESTART
=> Drop subscription
<= Dropped
=> Request historical from_event = 1
<= Receive event 2 at end of stream
=> Subscribe
<= Confirmed
=> Catchup
<= Subscribed event 3 appeared
<= Empty
Should yield [event 1, event 2, event 3]
"""
event_1_not_end_of_stream = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=1)
.with_next_event_number(2)
.build()
)
event_2_at_end_of_stream = (
ReadStreamEventsResponseBuilder()
.with_event(event_number=2)
.with_next_event_number(2)
.at_end_of_stream()
.build()
)
output = TeeQueue()
convo = CatchupSubscription("my-stream")
await convo.start(output)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, event_appeared(event_number=2), output)
await reply_to(convo, event_1_not_end_of_stream, output)
# RESTART
await convo.start(output)
output.items.clear()
await drop_subscription(convo, output)
second_read_historical = read_as(proto.ReadStreamEvents, output.items[0])
await reply_to(convo, event_2_at_end_of_stream, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, event_appeared(event_number=3), output)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
[event_1, event_2, event_3] = await anext(convo.subscription.events, 3)
assert event_1.event_number == 1
assert event_2.event_number == 2
assert event_3.event_number == 3
assert second_read_historical.from_event_number == 2
@pytest.mark.asyncio
async def test_live_restart():
"""
If we reset the conversation while we are live, we should first unsubscribe
then start a historical read from the last read event.
=> Read historial
<= empty
=> subscribe
<= confirmed
=> catchup
<= empty
<= event 1 appeared
<= event 2 appeared
RESTART
=> unsubscribe
<= dropped
=> Read historical from 2
"""
output = TeeQueue()
convo = CatchupSubscription("my-stream")
await convo.start(output)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await confirm_subscription(convo, output, event_number=10, commit_pos=10)
await reply_to(convo, EMPTY_STREAM_PAGE, output)
await reply_to(convo, event_appeared(event_number=1), output)
await reply_to(convo, event_appeared(event_number=2), output)
output.items.clear()
await convo.start(output)
await drop_subscription(convo, output)
[unsubscribe, read_historical] = output.items
read_historical = read_as(proto.ReadStreamEvents, read_historical)
assert unsubscribe.command == msg.TcpCommand.UnsubscribeFromStream
assert read_historical.from_event_number == 2
@pytest.mark.asyncio
async def test_paging_projection():
""" """
convo = CatchupSubscription("my-stream")
output = TeeQueue()
await convo.start(output)
await output.get()
event_1_id = uuid.uuid4()
event_2_id = uuid.uuid4()
event_3_id = uuid.uuid4()
event_4_id = uuid.uuid4()
first_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_1_id, event_number=0, link_event_number=32)
.with_event(event_id=event_2_id, event_number=0, link_event_number=33)
.with_next_event_number(34)
).build()
second_response = (
ReadStreamEventsResponseBuilder()
.with_event(event_id=event_3_id, event_number=0, link_event_number=34)
.with_event(event_id=event_4_id, event_number=0, link_event_number=35)
).build()
await reply_to(convo, first_response, output)
subscription = await convo.result
event_1 = await anext(subscription.events)
event_2 = await anext(subscription.events)
assert event_1.id == event_1_id
assert event_2.id == event_2_id
reply = await output.get()
body = proto.ReadStreamEvents()
body.ParseFromString(reply.payload)
assert body.from_event_number == 34
await reply_to(convo, second_response, output)
event_3 = await anext(subscription.events)
event_4 = await anext(subscription.events)
assert event_3.id == event_3_id
assert event_4.id == event_4_id<|fim▁end|> | async def test_unsubscription(): |
<|file_name|>custom-input-manager.ts<|end_file_name|><|fim▁begin|>import { inject, injectable } from 'inversify';
import TYPES from '../../di/types';
import * as i from '../../i';
import { RunOptions } from '../../models';
import { IInputConfig } from '../../user-extensibility';<|fim▁hole|>var NestedError = require('nested-error-stacks');
@injectable()
export class CustomInputManager extends BaseInputManager {
constructor(
@inject(TYPES.HandlerService) private handlerService: i.IHandlerService
) {
super();
}
async ask(config: IInputConfig, options: RunOptions): Promise<{ [key: string]: any }> {
try {
const handler: Function = await this.handlerService
.resolveAndLoad(this.tmplRootPath, config.handler);
return handler(config);
} catch (ex) {
throw new NestedError("Error running handler for input configuration", ex);
}
}
}<|fim▁end|> | import { BaseInputManager } from '../base-input-manager'; |
<|file_name|>react_jqxgrid.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
declare class JqxGrid extends React.PureComponent<IGridProps, IState> {
protected static getDerivedStateFromProps(props: IGridProps, state: IState): null | IState;
private _jqx;
private _id;
private _componentSelector;
constructor(props: IGridProps);
componentDidMount(): void;
componentDidUpdate(): void;
render(): React.ReactNode;
setOptions(options: IGridProps): void;
getOptions(option: string): any;
autoresizecolumns(type?: string): void;
autoresizecolumn(dataField: string, type?: string): void;
beginupdate(): void;
clear(): void;
createChart(type: string, dataSource?: any): void;
destroy(): void;
endupdate(): void;
ensurerowvisible(rowBoundIndex: number): void;
focus(): void;
getcolumnindex(dataField: string): number;
getcolumn(dataField: string): IGridGetColumn;
getcolumnproperty(dataField: string, propertyName: string): any;
getrowid(rowBoundIndex: number): string;
getrowdata(rowBoundIndex: number): any;
getrowdatabyid(rowID: string): any;
getrowboundindexbyid(rowID: string): number;
getrowboundindex(rowDisplayIndex: number): number;
getrows(): any[];
getboundrows(): any[];
getdisplayrows(): any[];
getdatainformation(): IGridGetDataInformation;
getsortinformation(): IGridGetSortInformation;
getpaginginformation(): IGridGetPagingInformation;
hidecolumn(dataField: string): void;
hideloadelement(): void;
hiderowdetails(rowBoundIndex: number): void;
iscolumnvisible(dataField: string): boolean;
iscolumnpinned(dataField: string): boolean;
localizestrings(localizationobject: IGridLocalizationobject): void;
pincolumn(dataField: string): void;
refreshdata(): void;
refresh(): void;
renderWidget(): void;
scrolloffset(top: number, left: number): void;
scrollposition(): IGridScrollPosition;
showloadelement(): void;
showrowdetails(rowBoundIndex: number): void;
setcolumnindex(dataField: string, index: number): void;
setcolumnproperty(dataField: string, propertyName: any, propertyValue: any): void;
showcolumn(dataField: string): void;
unpincolumn(dataField: string): void;
updatebounddata(type?: any): void;
updating(): boolean;
getsortcolumn(): string;
removesort(): void;
sortby(dataField: string, sortOrder: string): void;
addgroup(dataField: string): void;
cleargroups(): void;
collapsegroup(group: number | string): void;
collapseallgroups(): void;
<|fim▁hole|> expandallgroups(): void;
expandgroup(group: number | string): void;
getrootgroupscount(): number;
getgroup(groupIndex: number): IGridGetGroup;
insertgroup(groupIndex: number, dataField: string): void;
iscolumngroupable(): boolean;
removegroupat(groupIndex: number): void;
removegroup(dataField: string): void;
addfilter(dataField: string, filterGroup: any, refreshGrid?: boolean): void;
applyfilters(): void;
clearfilters(): void;
getfilterinformation(): any;
getcolumnat(index: number): any;
removefilter(dataField: string, refreshGrid: boolean): void;
refreshfilterrow(): void;
gotopage(pagenumber: number): void;
gotoprevpage(): void;
gotonextpage(): void;
addrow(rowIds: any, data: any, rowPosition?: any): void;
begincelledit(rowBoundIndex: number, dataField: string): void;
beginrowedit(rowBoundIndex: number): void;
closemenu(): void;
deleterow(rowIds: string | number | Array<number | string>): void;
endcelledit(rowBoundIndex: number, dataField: string, confirmChanges: boolean): void;
endrowedit(rowBoundIndex: number, confirmChanges: boolean): void;
getcell(rowBoundIndex: number, datafield: string): IGridGetCell;
getcellatposition(left: number, top: number): IGridGetCell;
getcelltext(rowBoundIndex: number, dataField: string): string;
getcelltextbyid(rowID: string, dataField: string): string;
getcellvaluebyid(rowID: string, dataField: string): any;
getcellvalue(rowBoundIndex: number, dataField: string): any;
isBindingCompleted(): boolean;
openmenu(dataField: string): void;
setcellvalue(rowBoundIndex: number, dataField: string, value: any): void;
setcellvaluebyid(rowID: string, dataField: string, value: any): void;
showvalidationpopup(rowBoundIndex: number, dataField: string, validationMessage: string): void;
updaterow(rowIds: string | number | Array<number | string>, data: any): void;
clearselection(): void;
getselectedrowindex(): number;
getselectedrowindexes(): number[];
getselectedcell(): IGridGetSelectedCell;
getselectedcells(): IGridGetSelectedCell[];
selectcell(rowBoundIndex: number, dataField: string): void;
selectallrows(): void;
selectrow(rowBoundIndex: number): void;
unselectrow(rowBoundIndex: number): void;
unselectcell(rowBoundIndex: number, dataField: string): void;
getcolumnaggregateddata(dataField: string, aggregates: any[]): string;
refreshaggregates(): void;
renderaggregates(): void;
exportdata(dataType: string, fileName?: string, exportHeader?: boolean, rows?: number[], exportHiddenColumns?: boolean, serverURL?: string, charSet?: string): any;
exportview(dataType: string, fileName?: string): any;
openColumnChooser(columns?: any, header?: string): void;
getstate(): IGridGetState;
loadstate(stateobject: any): void;
savestate(): IGridGetState;
private _manageProps;
private _wireEvents;
}
export default JqxGrid;
export declare const jqx: any;
export declare const JQXLite: any;
interface IState {
lastProps: object;
}
export interface IGridCharting {
appendTo?: string;
colorScheme?: string;
dialog?: (width: number, height: number, header: string, position: any, enabled: boolean) => void;
formatSettings?: any;
ready?: any;
}
export interface IGridColumn {
text?: string;
datafield?: string;
displayfield?: string;
threestatecheckbox?: boolean;
sortable?: boolean;
filterable?: boolean;
filter?: (cellValue?: any, rowData?: any, dataField?: string, filterGroup?: any, defaultFilterResult?: any) => any;
buttonclick?: (row: number) => void;
hideable?: boolean;
hidden?: boolean;
groupable?: boolean;
menu?: boolean;
exportable?: boolean;
columngroup?: string;
enabletooltips?: boolean;
columntype?: 'number' | 'checkbox' | 'button' | 'numberinput' | 'dropdownlist' | 'combobox' | 'datetimeinput' | 'textbox' | 'rating' | 'progressbar' | 'template' | 'custom';
renderer?: (defaultText?: string, alignment?: string, height?: number) => string;
rendered?: (columnHeaderElement?: any) => void;
cellsrenderer?: (row?: number, columnfield?: string, value?: any, defaulthtml?: string, columnproperties?: any, rowdata?: any) => string;
aggregatesrenderer?: (aggregates?: any, column?: any, element?: any, summaryData?: any) => string;
validation?: (cell?: any, value?: number) => any;
createwidget?: (row: any, column: any, value: string, cellElement: any) => void;
initwidget?: (row: number, column: string, value: string, cellElement: any) => void;
createfilterwidget?: (column: any, htmlElement: HTMLElement, editor: any) => void;
createfilterpanel?: (datafield: string, filterPanel: any) => void;
initeditor?: (row: number, cellvalue: any, editor: any, celltext: any, pressedChar: string, callback: any) => void;
createeditor?: (row: number, cellvalue: any, editor: any, celltext: any, cellwidth: any, cellheight: any) => void;
destroyeditor?: (row: number, callback: any) => void;
geteditorvalue?: (row: number, cellvalue: any, editor: any) => any;
cellbeginedit?: (row: number, datafield: string, columntype: string, value: any) => boolean;
cellendedit?: (row: number, datafield: string, columntype: string, oldvalue: any, newvalue: any) => boolean;
cellvaluechanging?: (row: number, datafield: string, columntype: string, oldvalue: any, newvalue: any) => string | void;
createeverpresentrowwidget?: (datafield: string, htmlElement: HTMLElement, popup: any, addRowCallback: any) => any;
initeverpresentrowwidget?: (datafield: string, htmlElement: HTMLElement, popup: any) => void;
reseteverpresentrowwidgetvalue?: (datafield: string, htmlElement: HTMLElement) => void;
geteverpresentrowwidgetvalue?: (datafield: string, htmlElement: HTMLElement) => any;
destroyeverpresentrowwidget?: (htmlElement: HTMLElement) => void;
validateeverpresentrowwidgetvalue?: (datafield: string, value: any, rowValues: any) => boolean | object;
cellsformat?: string;
cellclassname?: any;
aggregates?: any;
align?: 'left' | 'center' | 'right';
cellsalign?: 'left' | 'center' | 'right';
width?: number | string;
minwidth?: any;
maxwidth?: any;
resizable?: boolean;
draggable?: boolean;
editable?: boolean;
classname?: string;
pinned?: boolean;
nullable?: boolean;
filteritems?: any;
filterdelay?: number;
filtertype?: 'textbox' | 'input' | 'checkedlist' | 'list' | 'number' | 'bool' | 'date' | 'range' | 'custom';
filtercondition?: 'EMPTY' | 'NOT_EMPTY' | 'CONTAINS' | 'CONTAINS_CASE_SENSITIVE' | 'DOES_NOT_CONTAIN' | 'DOES_NOT_CONTAIN_CASE_SENSITIVE' | 'STARTS_WITH' | 'STARTS_WITH_CASE_SENSITIVE' | 'ENDS_WITH' | 'ENDS_WITH_CASE_SENSITIVE' | 'EQUAL' | 'EQUAL_CASE_SENSITIVE' | 'NULL' | 'NOT_NULL' | 'EQUAL' | 'NOT_EQUAL' | 'LESS_THAN' | 'LESS_THAN_OR_EQUAL' | 'GREATER_THAN' | 'GREATER_THAN_OR_EQUAL' | 'NULL' | 'NOT_NULL';
}
export interface IGridSourceDataFields {
name?: string;
type?: 'string' | 'date' | 'int' | 'float' | 'number' | 'bool';
format?: string;
map?: string;
id?: string;
text?: string;
source?: any[];
}
export interface IGridSource {
url?: string;
data?: any;
localdata?: any;
datatype?: 'xml' | 'json' | 'jsonp' | 'tsv' | 'csv' | 'local' | 'array' | 'observablearray';
type?: 'GET' | 'POST';
id?: string;
root?: string;
record?: string;
datafields?: IGridSourceDataFields[];
pagenum?: number;
pagesize?: number;
pager?: (pagenum?: number, pagesize?: number, oldpagenum?: number) => any;
sortcolumn?: string;
sortdirection?: 'asc' | 'desc';
sort?: (column?: any, direction?: any) => void;
filter?: (filters?: any, recordsArray?: any) => void;
addrow?: (rowid?: any, rowdata?: any, position?: any, commit?: boolean) => void;
deleterow?: (rowid?: any, commit?: boolean) => void;
updaterow?: (rowid?: any, newdata?: any, commit?: any) => void;
processdata?: (data: any) => void;
formatdata?: (data: any) => any;
async?: boolean;
totalrecords?: number;
unboundmode?: boolean;
}
export interface IGridGetColumn {
datafield?: string;
displayfield?: string;
text?: string;
sortable?: boolean;
filterable?: boolean;
exportable?: boolean;
editable?: boolean;
groupable?: boolean;
resizable?: boolean;
draggable?: boolean;
classname?: string;
cellclassname?: any;
width?: number | string;
menu?: boolean;
}
export interface IGridGetDataInformation {
rowscount?: string;
sortinformation?: any;
sortcolumn?: any;
sortdirection?: any;
paginginformation?: any;
pagenum?: any;
pagesize?: any;
pagescount?: any;
}
export interface IGridGetSortInformation {
sortcolumn?: string;
sortdirection?: any;
}
export interface IGridGetPagingInformation {
pagenum?: string;
pagesize?: any;
pagescount?: any;
}
export interface IGridDateNaming {
names?: string[];
namesAbbr?: string[];
namesShort?: string[];
}
export interface IGridLocalizationobject {
filterstringcomparisonoperators?: any;
filternumericcomparisonoperators?: any;
filterdatecomparisonoperators?: any;
filterbooleancomparisonoperators?: any;
pagergotopagestring?: string;
pagershowrowsstring?: string;
pagerrangestring?: string;
pagernextbuttonstring?: string;
pagerpreviousbuttonstring?: string;
sortascendingstring?: string;
sortdescendingstring?: string;
sortremovestring?: string;
firstDay?: number;
percentsymbol?: string;
currencysymbol?: string;
currencysymbolposition?: string;
decimalseparator?: string;
thousandsseparator?: string;
days?: IGridDateNaming;
months?: IGridDateNaming;
addrowstring?: string;
updaterowstring?: string;
deleterowstring?: string;
resetrowstring?: string;
everpresentrowplaceholder?: string;
emptydatastring?: string;
}
export interface IGridScrollPosition {
top?: number;
left?: number;
}
export interface IGridGetGroup {
group?: number;
level?: number;
expanded?: number;
subgroups?: number;
subrows?: number;
}
export interface IGridGetCell {
value?: number;
row?: number;
column?: number;
}
export interface IGridGetSelectedCell {
rowindex?: number;
datafield?: string;
}
export interface IGridGetStateColumns {
width?: number | string;
hidden?: boolean;
index?: number;
pinned?: boolean;
groupable?: boolean;
resizable?: boolean;
draggable?: boolean;
text?: string;
align?: string;
cellsalign?: string;
}
export interface IGridGetState {
width?: number | string;
height?: number | string;
pagenum?: number;
pagesize?: number;
pagesizeoptions?: string[];
sortcolumn?: any;
sortdirection?: any;
filters?: any;
groups?: any;
columns?: IGridGetStateColumns;
}
export interface IGridColumnmenuopening {
menu?: any;
datafield?: any;
height?: any;
}
export interface IGridColumnmenuclosing {
menu?: any;
datafield?: any;
height?: any;
}
export interface IGridCellhover {
cellhtmlElement?: any;
x?: any;
y?: any;
}
export interface IGridGroupsrenderer {
text?: string;
group?: number;
expanded?: boolean;
data?: object;
}
export interface IGridGroupcolumnrenderer {
text?: any;
}
export interface IGridHandlekeyboardnavigation {
event?: any;
}
export interface IGridScrollfeedback {
row?: object;
}
export interface IGridFilter {
cellValue?: any;
rowData?: any;
dataField?: string;
filterGroup?: any;
defaultFilterResult?: boolean;
}
export interface IGridRendertoolbar {
toolbar?: any;
}
export interface IGridRenderstatusbar {
statusbar?: any;
}
interface IGridOptions {
altrows?: boolean;
altstart?: number;
altstep?: number;
autoshowloadelement?: boolean;
autoshowfiltericon?: boolean;
autoshowcolumnsmenubutton?: boolean;
showcolumnlines?: boolean;
showrowlines?: boolean;
showcolumnheaderlines?: boolean;
adaptive?: boolean;
adaptivewidth?: number;
clipboard?: boolean;
closeablegroups?: boolean;
columnsmenuwidth?: number;
columnmenuopening?: (menu?: IGridColumnmenuopening['menu'], datafield?: IGridColumnmenuopening['datafield'], height?: IGridColumnmenuopening['height']) => boolean | void;
columnmenuclosing?: (menu?: IGridColumnmenuclosing['menu'], datafield?: IGridColumnmenuclosing['datafield'], height?: IGridColumnmenuclosing['height']) => boolean;
cellhover?: (cellhtmlElement?: IGridCellhover['cellhtmlElement'], x?: IGridCellhover['x'], y?: IGridCellhover['y']) => void;
enablekeyboarddelete?: boolean;
enableellipsis?: boolean;
enablemousewheel?: boolean;
enableanimations?: boolean;
enabletooltips?: boolean;
enablehover?: boolean;
enablebrowserselection?: boolean;
everpresentrowposition?: 'top' | 'bottom' | 'topAboveFilterRow';
everpresentrowheight?: number;
everpresentrowactions?: string;
everpresentrowactionsmode?: 'popup' | 'columns';
filterrowheight?: number;
filtermode?: 'default' | 'excel';
groupsrenderer?: (text?: IGridGroupsrenderer['text'], group?: IGridGroupsrenderer['group'], expanded?: IGridGroupsrenderer['expanded'], data?: IGridGroupsrenderer['data']) => string;
groupcolumnrenderer?: (text?: IGridGroupcolumnrenderer['text']) => string;
groupsexpandedbydefault?: boolean;
handlekeyboardnavigation?: (event: IGridHandlekeyboardnavigation['event']) => boolean;
pagerrenderer?: () => any[];
rtl?: boolean;
showdefaultloadelement?: boolean;
showfiltercolumnbackground?: boolean;
showfiltermenuitems?: boolean;
showpinnedcolumnbackground?: boolean;
showsortcolumnbackground?: boolean;
showsortmenuitems?: boolean;
showgroupmenuitems?: boolean;
showrowdetailscolumn?: boolean;
showheader?: boolean;
showgroupsheader?: boolean;
showaggregates?: boolean;
showgroupaggregates?: boolean;
showeverpresentrow?: boolean;
showfilterrow?: boolean;
showemptyrow?: boolean;
showstatusbar?: boolean;
statusbarheight?: number;
showtoolbar?: boolean;
showfilterbar?: boolean;
filterbarmode?: string;
selectionmode?: 'none' | 'singlerow' | 'multiplerows' | 'multiplerowsextended' | 'singlecell' | 'multiplecells' | 'multiplecellsextended' | 'multiplecellsadvanced' | 'checkbox';
updatefilterconditions?: (type?: string, defaultconditions?: any) => any;
updatefilterpanel?: (filtertypedropdown1?: any, filtertypedropdown2?: any, filteroperatordropdown?: any, filterinputfield1?: any, filterinputfield2?: any, filterbutton?: any, clearbutton?: any, columnfilter?: any, filtertype?: any, filterconditions?: any) => any;
theme?: string;
toolbarheight?: number;
autoheight?: boolean;
autorowheight?: boolean;
columnsheight?: number;
deferreddatafields?: string[];
groupsheaderheight?: number;
groupindentwidth?: number;
height?: number | string;
pagerheight?: number | string;
rowsheight?: number;
scrollbarsize?: number | string;
scrollmode?: 'default' | 'logical' | 'deferred';
scrollfeedback?: (row: IGridScrollfeedback['row']) => string;
width?: string | number;
autosavestate?: boolean;
autoloadstate?: boolean;
columns?: IGridColumn[];
enableSanitize?: boolean;
cardview?: boolean;
cardviewcolumns?: any;
cardheight?: number;
cardsize?: number;
columngroups?: any[];
columnsmenu?: boolean;
columnsresize?: boolean;
columnsautoresize?: boolean;
columnsreorder?: boolean;
charting?: IGridCharting;
disabled?: boolean;
editable?: boolean;
editmode?: 'click' | 'selectedcell' | 'selectedrow' | 'dblclick' | 'programmatic';
filter?: (cellValue?: IGridFilter['cellValue'], rowData?: IGridFilter['rowData'], dataField?: IGridFilter['dataField'], filterGroup?: IGridFilter['filterGroup'], defaultFilterResult?: IGridFilter['defaultFilterResult']) => any;
filterable?: boolean;
groupable?: boolean;
groups?: string[];
horizontalscrollbarstep?: number;
horizontalscrollbarlargestep?: number;
initrowdetails?: (index?: number, parentElement?: any, gridElement?: any, datarecord?: any) => void;
keyboardnavigation?: boolean;
localization?: IGridLocalizationobject;
pagesize?: number;
pagesizeoptions?: Array<number | string>;
pagermode?: 'simple' | 'default' | 'material';
pagerbuttonscount?: number;
pageable?: boolean;
autofill?: boolean;
rowdetails?: boolean;
rowdetailstemplate?: any;
ready?: () => void;
rendered?: (type: any) => void;
renderstatusbar?: (statusbar?: IGridRenderstatusbar['statusbar']) => void;
rendertoolbar?: (toolbar?: IGridRendertoolbar['toolbar']) => void;
rendergridrows?: (params?: any) => any;
sortable?: boolean;
sortmode?: string;
selectedrowindex?: number;
selectedrowindexes?: number[];
source?: IGridSource;
sorttogglestates?: '0' | '1' | '2';
updatedelay?: number;
virtualmode?: boolean;
verticalscrollbarstep?: number;
verticalscrollbarlargestep?: number;
}
export interface IGridProps extends IGridOptions {
className?: string;
style?: React.CSSProperties;
onBindingcomplete?: (e?: Event) => void;
onColumnresized?: (e?: Event) => void;
onColumnreordered?: (e?: Event) => void;
onColumnclick?: (e?: Event) => void;
onCellclick?: (e?: Event) => void;
onCelldoubleclick?: (e?: Event) => void;
onCellselect?: (e?: Event) => void;
onCellunselect?: (e?: Event) => void;
onCellvaluechanged?: (e?: Event) => void;
onCellbeginedit?: (e?: Event) => void;
onCellendedit?: (e?: Event) => void;
onFilter?: (e?: Event) => void;
onGroupschanged?: (e?: Event) => void;
onGroupexpand?: (e?: Event) => void;
onGroupcollapse?: (e?: Event) => void;
onPagechanged?: (e?: Event) => void;
onPagesizechanged?: (e?: Event) => void;
onRowclick?: (e?: Event) => void;
onRowdoubleclick?: (e?: Event) => void;
onRowselect?: (e?: Event) => void;
onRowunselect?: (e?: Event) => void;
onRowexpand?: (e?: Event) => void;
onRowcollapse?: (e?: Event) => void;
onSort?: (e?: Event) => void;
}<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Environment configuration
# Copyright (c) 2016, Tieto Corporation
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
#
# Currently static definition, in the future this could be a config file,
# or even common database with host management.
#
import logging
logger = logging.getLogger()<|fim▁hole|>
#
# You can put your settings in cfg.py file with setup_params, devices
# definitions in the format as below. In other case HWSIM cfg will be used.
#
setup_params = {"setup_hw" : "./tests/setup_hw.sh",
"hostapd" : "./tests/hostapd",
"wpa_supplicant" : "./tests/wpa_supplicant",
"iperf" : "iperf",
"wlantest" : "./tests/wlantest",
"wlantest_cli" : "./tests/wlantest_cli",
"country" : "US",
"log_dir" : "/tmp/",
"ipv4_test_net" : "192.168.12.0",
"trace_start" : "./tests/trace_start.sh",
"trace_stop" : "./tests/trace_stop.sh",
"perf_start" : "./tests/perf_start.sh",
"perf_stop" : "./tests/perf_stop.sh"}
#
#devices = [{"hostname": "192.168.254.58", "ifname" : "wlan0", "port": "9877", "name" : "t2-ath9k", "flags" : "AP_HT40 STA_HT40"},
# {"hostname": "192.168.254.58", "ifname" : "wlan1", "port": "9877", "name" : "t2-ath10k", "flags" : "AP_VHT80"},
# {"hostname": "192.168.254.58", "ifname" : "wlan3", "port": "9877", "name" : "t2-intel7260", "flags" : "STA_VHT80"},
# {"hostname": "192.168.254.55", "ifname" : "wlan0, wlan1, wlan2", "port": "", "name" : "t3-monitor"},
# {"hostname": "192.168.254.50", "ifname" : "wlan0", "port": "9877", "name" : "t1-ath9k"},
# {"hostname": "192.168.254.50", "ifname" : "wlan1", "port": "9877", "name" : "t1-ath10k"}]
#
# HWSIM - ifaces available after modprobe mac80211_hwsim
#
devices = [{"hostname": "localhost", "ifname": "wlan0", "port": "9868", "name": "hwsim0", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan1", "port": "9878", "name": "hwsim1", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan2", "port": "9888", "name": "hwsim2", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan3", "port": "9898", "name": "hwsim3", "flags": "AP_VHT80 STA_VHT80"},
{"hostname": "localhost", "ifname": "wlan4", "port": "9908", "name": "hwsim4", "flags": "AP_VHT80 STA_VHT80"}]
def get_setup_params(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.setup_params
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return setup_params
def get_devices(filename="cfg.py"):
try:
mod = __import__(filename.split(".")[0])
return mod.devices
except:
logger.debug("__import__(" + filename + ") failed, using static settings")
pass
return devices
def get_device(devices, name=None, flags=None, lock=False):
if name is None and flags is None:
raise Exception("Failed to get device")
for device in devices:
if device['name'] == name:
return device
for device in devices:
try:
device_flags = device['flags']
if device_flags.find(flags) != -1:
return device
except:
pass
raise Exception("Failed to get device " + name)
def put_device(devices, name):
pass<|fim▁end|> | |
<|file_name|>try-dict.py<|end_file_name|><|fim▁begin|>import plt, ipp
import os, string
print "Starting try-trace.py: dir() = %s" % dir()
# try-trace.py: test program for pypy plt
print "- - - - -"
print "NO_COMRESSION = %d" % plt.NO_COMPRESSION
#base = "/Users/jbro111" # OSX
base = "/home/nevil" # Ubuntu
#fn = "pypy/small-sample.erf"
#fn = "tcp-analyse/fdt-p5.pcap"
#fn = "pypy/small-sample.pcap"
fn = "pypy/1000packets.pcap.gz"
full_fn = base + '/' + fn
print "%s: isfile %s" % (full_fn, os.path.isfile(full_fn))
#try:
# with open(full_fn) as file:
# print "File opened OK"
# file.close()
#except IOError as e:
# print "Unable to open file" #Does not exist OR no read permissions
trace_format = "pcapfile"
#trace_format = "erf"
uri = trace_format + ':' + full_fn
print ">> uri = %s" % uri
t = plt.trace(uri)
t.start()
test_dict = {}
def print_first(s, n):
for x in range(n):
if x%8 == 0:
print "",
print "%02x" % ord(s[x]),
for n,pkt in enumerate(t):
ip = pkt.ip
print "--- n=%d ---" % n
print "pkt linktype %d, ethertype %04x, vlan_id %d" % (
pkt.linktype, pkt.ethertype, pkt.vlan_id)
print "ip.seconds = %.6f, ip.ts_sec = %d, ip.time = %s" % (
ip.seconds, ip.ts_sec, ip.time)
print "ip.erf_time = %s" % ip.erf_time
print "ip.wire_len = %s, ip.capture_len = %s, direction = %s" % (
ip.wire_len, ip.capture_len, ip.direction)
ba = ip.data
print "@@ 1 @@ ba = %s" % ba
print "IP.data:",
for b in ba:
print "%02x" % b, # ba[x],
print
sa = ip.src_prefix; da = ip.dst_prefix
print "*** %s -> %s" % (sa, da)
print "sa.addr = %s" % sa.addr[0:4]
for v in sa.addr:
print "%02x" % v,
print
print "- - - - -"
bsa = bytearray(sa.addr)
for v in bsa:
print "%02x" % v,
print
print "ba = %s" % plt.ipp.IPprefix(4, bsa)
# If we import plt, ipp above, we couild say ipp.IPprefix here
print "= = = = ="
#exit()
s = str(sa.addr) + '|' + str(da.addr) # Have to convert to str explicitly
print "*** %s -> %s %s" % (sa, da, s)
print_first(s, 9)
print " ident = %04x" % ip.ident
v = test_dict.get(s)
if not v:
test_dict[s] = 1
else:
test_dict[s] = v+1
<|fim▁hole|>
#exit()
def ca2str(cdata_array):
s = string.join(cdata_array, '')
n = string.find(s, '\x00')
return s[0:n]
keys = sorted(test_dict)
for k in keys:
print "%8d %s" % (test_dict[k], k)
ka = k.split('*')
for j in range(0,4):
print "%02x" % ord(ka[0][j]),
print
psa = ipp.IPprefix(4, bytearray(ka[0]))
print "psa = %s" % psa
print "%8d %s -> %s" % (test_dict[k],
ka[0], ka[1])<|fim▁end|> | if n == 0: # Zero-org
break
print "EOF - - -" |
<|file_name|>records-view.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit} from '@angular/core';
import {ActivityService} from '../../services/activity.service';
import {Activity} from "../../models/activity";
import {BarChartComponent} from "../bar-chart/bar-chart.component";
@Component({
selector: 'records-view',<|fim▁hole|> styleUrls: ['records-view.component.css'],
directives: [BarChartComponent]
})
export class RecordsViewComponent implements OnInit {
calBurnActs:Activity[];
longestActs:Activity[];
constructor(private activityService:ActivityService) {
}
getData() {
this.activityService.getActivities('totalCalories','desc',6).then(
data => this.calBurnActs = data
);
this.activityService.getActivities('totalDistance','desc',6).then(
data => this.longestActs = data
);
}
ngOnInit() {
this.getData();
}
}<|fim▁end|> | moduleId: module.id,
templateUrl: 'records-view.component.html', |
<|file_name|>helpdialog.cpp<|end_file_name|><|fim▁begin|>#include <QWebView>
#include "helpdialog.h"
/**
* Constructor of Help Dialog
*/
HelpDialog::HelpDialog(QWidget *parent) : QWebPage(parent) {
view = new QWebView();
}
/**
* Shows the help dialog
*/
void HelpDialog::showHelpDialog() {
createWindow(QWebPage::WebModalDialog);
view->setPage(this);
view->setWindowIcon(QIcon(":/images/help-icon.png"));
view->setWindowTitle("Othello FPG - Help");
view->load(QUrl("qrc:/html/index.html"));
view->show();<|fim▁hole|><|fim▁end|> | } |
<|file_name|>preferences.py<|end_file_name|><|fim▁begin|># Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import anki.lang
import aqt
from aqt import AnkiQt
from aqt.profiles import RecordingDriver, VideoDriver
from aqt.qt import *
from aqt.utils import (
TR,
HelpPage,
disable_help_button,
openHelp,
showInfo,
showWarning,
tr,
)
def video_driver_name_for_platform(driver: VideoDriver) -> str:
if driver == VideoDriver.ANGLE:
return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE)
elif driver == VideoDriver.Software:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER)
else:
if isMac:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC)
else:
return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER)
class Preferences(QDialog):
def __init__(self, mw: AnkiQt) -> None:
QDialog.__init__(self, mw, Qt.Window)
self.mw = mw
self.prof = self.mw.pm.profile
self.form = aqt.forms.preferences.Ui_Preferences()
self.form.setupUi(self)
disable_help_button(self)
self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False)
self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
qconnect(
self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES)
)
self.silentlyClose = True
self.prefs = self.mw.col.get_preferences()
self.setupLang()
self.setupCollection()
self.setupNetwork()
self.setupBackup()
self.setupOptions()
self.show()
def accept(self) -> None:
# avoid exception if main window is already closed
if not self.mw.col:
return
self.updateCollection()
self.updateNetwork()
self.updateBackup()
self.updateOptions()
self.mw.pm.save()
self.mw.reset()
self.done(0)
aqt.dialogs.markClosed("Preferences")
def reject(self) -> None:
self.accept()
# Language
######################################################################
def setupLang(self) -> None:
f = self.form
f.lang.addItems([x[0] for x in anki.lang.langs])
f.lang.setCurrentIndex(self.langIdx())
qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged)
def langIdx(self) -> int:
codes = [x[1] for x in anki.lang.langs]
lang = anki.lang.currentLang
if lang in anki.lang.compatMap:
lang = anki.lang.compatMap[lang]
else:
lang = lang.replace("-", "_")
try:
return codes.index(lang)
except:
return codes.index("en_US")
def onLangIdxChanged(self, idx: int) -> None:
code = anki.lang.langs[idx][1]
self.mw.pm.setLang(code)
showInfo(
tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self
)
# Collection options
######################################################################
def setupCollection(self) -> None:
import anki.consts as c
f = self.form
qc = self.mw.col.conf
self.setup_video_driver()
f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values()))
f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True)))
s = self.prefs.sched
f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0))
f.timeLimit.setValue(int(s.time_limit_secs / 60.0))
f.showEstimates.setChecked(s.show_intervals_on_buttons)
f.showProgress.setChecked(s.show_remaining_due_counts)
f.newSpread.setCurrentIndex(s.new_review_mix)
f.dayLearnFirst.setChecked(s.day_learn_first)
f.dayOffset.setValue(s.rollover)
if s.scheduler_version < 2:
f.dayLearnFirst.setVisible(False)
f.legacy_timezone.setVisible(False)
else:
f.legacy_timezone.setChecked(not s.new_timezone)
def setup_video_driver(self) -> None:
self.video_drivers = VideoDriver.all_for_platform()
names = [
tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers
]
self.form.video_driver.addItems(names)
self.form.video_driver.setCurrentIndex(
self.video_drivers.index(self.mw.pm.video_driver())
)
def update_video_driver(self) -> None:
new_driver = self.video_drivers[self.form.video_driver.currentIndex()]
if new_driver != self.mw.pm.video_driver():
self.mw.pm.set_video_driver(new_driver)
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))
def updateCollection(self) -> None:
f = self.form
d = self.mw.col
self.update_video_driver()
qc = d.conf
qc["addToCur"] = not f.useCurrent.currentIndex()
s = self.prefs.sched
s.show_remaining_due_counts = f.showProgress.isChecked()
s.show_intervals_on_buttons = f.showEstimates.isChecked()
s.new_review_mix = f.newSpread.currentIndex()
s.time_limit_secs = f.timeLimit.value() * 60
s.learn_ahead_secs = f.lrnCutoff.value() * 60
s.day_learn_first = f.dayLearnFirst.isChecked()
s.rollover = f.dayOffset.value()
s.new_timezone = not f.legacy_timezone.isChecked()
self.mw.col.set_preferences(self.prefs)
d.setMod()
# Network
######################################################################
def setupNetwork(self) -> None:
self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON))<|fim▁hole|> self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"])
self.form.syncMedia.setChecked(self.prof["syncMedia"])
self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0)
if not self.prof["syncKey"]:
self._hideAuth()
else:
self.form.syncUser.setText(self.prof.get("syncUser", ""))
qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth)
self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON))
def on_media_log(self) -> None:
self.mw.media_syncer.show_sync_log()
def _hideAuth(self) -> None:
self.form.syncDeauth.setVisible(False)
self.form.syncUser.setText("")
self.form.syncLabel.setText(
tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC)
)
def onSyncDeauth(self) -> None:
if self.mw.media_syncer.is_syncing():
showWarning("Can't log out while sync in progress.")
return
self.prof["syncKey"] = None
self.mw.col.media.force_resync()
self._hideAuth()
def updateNetwork(self) -> None:
self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked()
self.prof["syncMedia"] = self.form.syncMedia.isChecked()
self.mw.pm.set_auto_sync_media_minutes(
self.form.autoSyncMedia.isChecked() and 15 or 0
)
if self.form.fullSync.isChecked():
self.mw.col.modSchema(check=False)
self.mw.col.setMod()
# Backup
######################################################################
def setupBackup(self) -> None:
self.form.numBackups.setValue(self.prof["numBackups"])
def updateBackup(self) -> None:
self.prof["numBackups"] = self.form.numBackups.value()
# Basic & Advanced Options
######################################################################
def setupOptions(self) -> None:
self.form.pastePNG.setChecked(self.prof.get("pastePNG", False))
self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100))
self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False))
self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True))
self.form.nightMode.setChecked(self.mw.pm.night_mode())
self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio())
self._recording_drivers = [
RecordingDriver.QtAudioInput,
RecordingDriver.PyAudio,
]
# The plan is to phase out PyAudio soon, so will hold off on
# making this string translatable for now.
self.form.recording_driver.addItems(
[
f"Voice recording driver: {driver.value}"
for driver in self._recording_drivers
]
)
self.form.recording_driver.setCurrentIndex(
self._recording_drivers.index(self.mw.pm.recording_driver())
)
def updateOptions(self) -> None:
restart_required = False
self.prof["pastePNG"] = self.form.pastePNG.isChecked()
self.prof["pasteInvert"] = self.form.pasteInvert.isChecked()
newScale = self.form.uiScale.value() / 100
if newScale != self.mw.pm.uiScale():
self.mw.pm.setUiScale(newScale)
restart_required = True
self.prof["showPlayButtons"] = self.form.showPlayButtons.isChecked()
if self.mw.pm.night_mode() != self.form.nightMode.isChecked():
self.mw.pm.set_night_mode(not self.mw.pm.night_mode())
restart_required = True
self.mw.pm.set_interrupt_audio(self.form.interrupt_audio.isChecked())
new_audio_driver = self._recording_drivers[
self.form.recording_driver.currentIndex()
]
if self.mw.pm.recording_driver() != new_audio_driver:
self.mw.pm.set_recording_driver(new_audio_driver)
if new_audio_driver == RecordingDriver.PyAudio:
showInfo(
"""\
The PyAudio driver will likely be removed in a future update. If you find it works better \
for you than the default driver, please let us know on the Anki forums."""
)
if restart_required:
showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU))<|fim▁end|> | qconnect(self.form.media_log.clicked, self.on_media_log) |
<|file_name|>test_liveness.py<|end_file_name|><|fim▁begin|>import graph
import dot
from core import *
import dataflow
def make_inst(g, addr, dest, op, *args):
def make_arg(a):
if a is None:
return None
if isinstance(a, int):
return VALUE(a)
if isinstance(a, str):
return REG(a)
return a
b = BBlock(addr)
args = [make_arg(a) for a in args]
b.add(Inst(make_arg(dest), op, args, addr))
g.add_node(addr, val=b)
def test_nielson_2_1_4():
g = graph.Graph()
make_inst(g, 1, "x", "=", 2)
make_inst(g, 2, "y", "=", 4)
make_inst(g, 3, "x", "=", 1)
make_inst(g, 4, None, "if", COND(EXPR(">", REG("x"), REG("y"))))
make_inst(g, 5, "z", "=", REG("y"))
make_inst(g, 6, "z", "*", REG("y"), REG("y"))
make_inst(g, 7, "x", "=", REG("z"))
g.add_edge(1, 2)
g.add_edge(2, 3)
g.add_edge(3, 4)
g.add_edge(4, 5)
g.add_edge(4, 6)
g.add_edge(5, 7)
g.add_edge(6, 7)
#dot.dot(g)
#ana = dataflow.LiveVarAnalysis(g)
#ana.init()
#g.print_nodes()
#print("===")
ana = dataflow.LiveVarAnalysis(g)
ana.solve()
#g.print_nodes()
LV_entry = {
1: set(),
2: set(),
3: {REG("y")},<|fim▁hole|> }
LV_exit = {
1: set(),
2: {REG("y")},
3: {REG("x"), REG("y")},
4: {REG("y")},
5: {REG("z")},
6: {REG("z")},
7: set(),
}
GEN_LV = {
1: set(),
2: set(),
3: set(),
4: {REG("x"), REG("y")},
5: {REG("y")},
6: {REG("y")},
7: {REG("z")},
}
KILL_LV = {
1: {REG("x")},
2: {REG("y")},
3: {REG("x")},
4: set(),
5: {REG("z")},
6: {REG("z")},
7: {REG("x")},
}
for i, info in g.iter_sorted_nodes():
assert info["live_gen"] == GEN_LV[i]
assert info["live_kill"] == KILL_LV[i]
assert info["live_in"] == LV_entry[i], (info["live_in"], LV_entry[i])
assert info["live_out"] == LV_exit[i]<|fim▁end|> | 4: {REG("x"), REG("y")},
5: {REG("y")},
6: {REG("y")},
7: {REG("z")}, |
<|file_name|>pattern.rs<|end_file_name|><|fim▁begin|>pub struct Pattern {
chars: Vec<char>,
}
impl Pattern {
pub fn test(&self, haystack: &[char]) -> bool {
test_fuzzy_ignorecase(haystack, &self.chars)
}
}
pub fn patterns_from_str(s: &str) -> Vec<Pattern> {
s.split_whitespace()
.map(|t| Pattern {
chars: t.chars().collect(),
})
.collect()
}
fn test_fuzzy_ignorecase(haystack: &[char], needle: &[char]) -> bool {
debug_assert!(!needle.is_empty());
let mut nidx = 0;
for ch in haystack.iter() {
let ch = ch.to_ascii_lowercase();
if ch == needle[nidx] {<|fim▁hole|> return true;
}
}
}
false
}<|fim▁end|> | nidx += 1;
if nidx == needle.len() { |
<|file_name|>version.rs<|end_file_name|><|fim▁begin|>/*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
use chrono::{DateTime, Utc};
use exempi::Xmp;
use std::path::Path;
use crate::audit::{
audit_get_array_value, audit_get_bool_value, audit_get_date_value, audit_get_dict_value,
audit_get_int_value, audit_get_str_value, Report, SkipReason,
};
use crate::custominfo::CustomInfoProperties;
use crate::exif::ExifProperties;
use crate::iptc::IptcProperties;
use crate::plutils::Value;
use crate::store;
use crate::xmp::ToXmp;
use crate::AplibObject;
use crate::AplibType;
use crate::PlistLoadable;
/// A rendered image. There is one for the orignal, and one per
/// actual version. `Version` are associated to a `Master`.
pub struct Version {
uuid: Option<String>,
model_id: Option<i64>,
/// The associated `Master`.
master_uuid: Option<String>,
/// uuid of the `Folder` project this reside in.
pub project_uuid: Option<String>,
/// uuid of the raw `Master`.
pub raw_master_uuid: Option<String>,
/// uuid of the non raw `Master`.
pub nonraw_master_uuid: Option<String>,
pub timezone_name: Option<String>,
pub create_date: Option<DateTime<Utc>>,
pub image_date: Option<DateTime<Utc>>,
pub export_image_change_date: Option<DateTime<Utc>>,
pub export_metadata_change_date: Option<DateTime<Utc>>,
pub version_number: Option<i64>,
pub db_version: Option<i64>,
pub db_minor_version: Option<i64>,
pub is_flagged: Option<bool>,
/// Indicate the version is the original.
pub is_original: Option<bool>,
pub is_editable: Option<bool>,
pub is_hidden: Option<bool>,
pub is_in_trash: Option<bool>,
pub file_name: Option<String>,
pub name: Option<String>,
pub rating: Option<i64>,
pub rotation: Option<i64>,
pub colour_label_index: Option<i64>,
pub iptc: Option<IptcProperties>,
pub exif: Option<ExifProperties>,
pub custom_info: Option<CustomInfoProperties>,
pub keywords: Option<Vec<Value>>,
}
impl PlistLoadable for Version {
/// Load the version object from the plist at plist_path.
fn from_path<P>(plist_path: P, mut auditor: Option<&mut Report>) -> Option<Version>
where
P: AsRef<Path>,
{
use crate::plutils::*;
let plist = parse_plist(plist_path);
match plist {
Value::Dictionary(ref dict) => {
let iptc = audit_get_dict_value(dict, "iptcProperties", &mut auditor);
let exif = audit_get_dict_value(dict, "exifProperties", &mut auditor);
let custom_info = audit_get_dict_value(dict, "customInfo", &mut auditor);
let result = Some(Version {
uuid: audit_get_str_value(dict, "uuid", &mut auditor),
master_uuid: audit_get_str_value(dict, "masterUuid", &mut auditor),
project_uuid: audit_get_str_value(dict, "projectUuid", &mut auditor),
raw_master_uuid: audit_get_str_value(dict, "rawMasterUuid", &mut auditor),
nonraw_master_uuid: audit_get_str_value(dict, "nonRawMasterUuid", &mut auditor),
timezone_name: audit_get_str_value(dict, "imageTimeZoneName", &mut auditor),
create_date: audit_get_date_value(dict, "createDate", &mut auditor),
image_date: audit_get_date_value(dict, "imageDate", &mut auditor),
export_image_change_date: audit_get_date_value(
dict,
"exportImageChangeDate",
&mut auditor,
),
export_metadata_change_date: audit_get_date_value(
dict,
"exportMetadataChangeDate",
&mut auditor,
),
version_number: audit_get_int_value(dict, "versionNumber", &mut auditor),
db_version: audit_get_int_value(dict, "version", &mut auditor),
db_minor_version: audit_get_int_value(dict, "minorVersion", &mut auditor),
is_flagged: audit_get_bool_value(dict, "isFlagged", &mut auditor),
is_original: audit_get_bool_value(dict, "isOriginal", &mut auditor),
is_editable: audit_get_bool_value(dict, "isEditable", &mut auditor),
is_hidden: audit_get_bool_value(dict, "isHidden", &mut auditor),
is_in_trash: audit_get_bool_value(dict, "isInTrash", &mut auditor),
file_name: audit_get_str_value(dict, "fileName", &mut auditor),
name: audit_get_str_value(dict, "name", &mut auditor),
model_id: audit_get_int_value(dict, "modelId", &mut auditor),
rating: audit_get_int_value(dict, "mainRating", &mut auditor),
rotation: audit_get_int_value(dict, "rotation", &mut auditor),
colour_label_index: audit_get_int_value(dict, "colorLabelIndex", &mut auditor),
iptc: IptcProperties::from(&iptc, &mut auditor),
exif: ExifProperties::from(&exif, &mut auditor),
custom_info: CustomInfoProperties::from(&custom_info, &mut auditor),
keywords: audit_get_array_value(dict, "keywords", &mut auditor),
});
if let Some(auditor) = &mut auditor {
auditor.skip("statistics", SkipReason::Ignore);
auditor.skip("thumbnailGroup", SkipReason::Ignore);
auditor.skip("faceDetectionIsFromPreview", SkipReason::Ignore);
auditor.skip("processedHeight", SkipReason::Ignore);
auditor.skip("processedWidth", SkipReason::Ignore);
auditor.skip("masterHeight", SkipReason::Ignore);
auditor.skip("masterWidth", SkipReason::Ignore);
auditor.skip("supportedStatus", SkipReason::Ignore);
auditor.skip("showInLibrary", SkipReason::Ignore);
auditor.skip("adjustmentProperties", SkipReason::Ignore); // don't know what to do yet
auditor.skip("RKImageAdjustments", SkipReason::Ignore);
auditor.skip("hasAdjustments", SkipReason::Ignore);
auditor.skip("hasEnabledAdjustments", SkipReason::Ignore);
auditor.skip("renderVersion", SkipReason::Ignore);
auditor.skip("imageProxyState", SkipReason::Ignore);
auditor.skip("plistWriteTimestamp", SkipReason::Ignore);
auditor.audit_ignored(dict, None);
}
result
}
_ => None,
}
}
}
impl AplibObject for Version {
fn obj_type(&self) -> AplibType {
AplibType::Version
}
fn uuid(&self) -> &Option<String> {
&self.uuid
}
fn parent(&self) -> &Option<String> {
&self.master_uuid
}
fn model_id(&self) -> i64 {
self.model_id.unwrap_or(0)
}
fn is_valid(&self) -> bool {
self.uuid.is_some()
}
fn wrap(obj: Version) -> store::Wrapper {
store::Wrapper::Version(Box::new(obj))
}
}
<|fim▁hole|>impl ToXmp for Version {
fn to_xmp(&self, xmp: &mut Xmp) -> bool {
// Here we make sure the Exif data are
// processed before Iptc.
if let Some(ref exif) = self.exif {
exif.to_xmp(xmp);
}
if let Some(ref iptc) = self.iptc {
iptc.to_xmp(xmp);
}
true
}
}
#[cfg(test)]
#[test]
fn test_version_parse() {
use crate::testutils;
use crate::xmp;
use exempi;
let version = Version::from_path(
testutils::get_test_file_path("Version-0.apversion").as_path(),
None,
);
assert!(version.is_some());
let version = version.unwrap();
assert_eq!(version.uuid.as_ref().unwrap(), "MHMIbw5CQaiMgQ3n7g2w2A");
assert!(version.is_original.unwrap());
assert_eq!(
version.master_uuid.as_ref().unwrap(),
"WZMCPPRHR%C3nffgeeS4IQ"
);
assert_eq!(version.name.as_ref().unwrap(), "img_3136");
assert!(version.iptc.is_some());
let iptc = version.iptc.as_ref().unwrap();
assert!(iptc.bag.contains_key("Byline"));
assert!(iptc.bag.contains_key("CiAdrCity"));
let exif = version.exif.as_ref().unwrap();
assert!(exif.bag.contains_key("ApertureValue"));
assert!(exif.bag.contains_key("Depth"));
// XXX fix when have actual audit.
// println!("report {:?}", report);
exempi::init();
let mut xmp = Xmp::new();
let result = version.to_xmp(&mut xmp);
assert!(result);
let mut options: exempi::PropFlags = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_DC, "creator", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "Hubert Figuiere");
options = exempi::PROP_NONE;
let value = xmp.get_property(xmp::ns::NS_EXIF, "ApertureValue", &mut options);
assert!(value.is_ok());
assert_eq!(value.unwrap().to_str(), "4");
}<|fim▁end|> | |
<|file_name|>SF-89.py<|end_file_name|><|fim▁begin|>'''
Created on 20 Sep 2013
@author: jowr
'''
# New example with R407F mixture
from pyrp.refpropClasses import RefpropSI
import CoolProp.CoolProp as cp
p = 30000
T = 273.15<|fim▁hole|>ref = False
if ref:
xkg = [0.473194694453358, 0.205109095413331, 0.321696210133311]
names = "R32|R125|R134a"
RP = RefpropSI()
RP.SETUPFLEX(xkg=xkg, FluidNames=names)
T_A, p_A, D_A, Dl_A, Dv_A, q_A, e_A, h_A, s_A, cv_A, cp_A, w_A = RP.PQFLSH(p, 0)
T_B, p_B, D_B, Dl_B, Dv_B, q_B, e_B, h_B, s_B, cv_B, cp_B, w_B = RP.PQFLSH(p, 1)
T_C, p_C, D_C, Dl_C, Dv_C, q_C, e_C, h_C, s_C, cv_C, cp_C, w_C = RP.TQFLSH(T, 0)
hlb = h_A / 1000.
hrb = h_B / 1000.
h200 = h_C / 1000.
print("Refprop: %s %s %s" % (hlb, hrb, h200))
else:
R407F = 'REFPROP-MIX:R32[0.473194694453358]&R125[0.205109095413331]&R134a[0.321696210133311]'
# R407F='REFPROP-MIX:R32[0.651669604033581]&R125[0.122438378639971]&R134a[0.225892017326446]'
hlb = cp.Props('H', 'P', 30, 'Q', 0, R407F) # 30 kPa saturated liquid
hrb = cp.Props('H', 'P', 30, 'Q', 1, R407F) # 30 kPa saturated vapour
h200 = cp.Props('H', 'T', 273.15, 'Q', 0, R407F) # saturated liquid at 0C IIR
print("CoolProp: %s %s %s" % (hlb, hrb, h200))<|fim▁end|> | |
<|file_name|>jquery.initialize-0.1.0.js<|end_file_name|><|fim▁begin|>/*!
* jquery.initialize. An basic element initializer plugin for jQuery.
*
* Copyright (c) 2014 Barış Güler
* http://hwclass.github.io
*
* Licensed under MIT
* http://www.opensource.org/licenses/mit-license.php
*
* http://docs.jquery.com/Plugins/Authoring
* jQuery authoring guidelines
*
* Launch : July 2014
* Version : 0.1.0
* Released: July 29th, 2014
*
*
* makes an element initialize for defined events with their names
*/
(function ($) {
$.fn.initialize = function (options) {
var currentElement = $(this),
opts = options;
var getSize = function(obj) {
var size = 0, key;
for (key in obj) {
if (obj.hasOwnProperty(key)) size++;
}
return size;
};
var setEvents = function () {
for (var countForEventsObj = 0, len = getSize(opts.events); countForEventsObj < len; countForEventsObj++) {
$(this).on(opts.events[countForEventsObj].name, opts.events[countForEventsObj].funcBody)
}
}
if (opts.init) {
setEvents();
}<|fim▁hole|>
return this;
}
})(jQuery);<|fim▁end|> | |
<|file_name|>test_filter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from unittest import TestCase
# from nose.tools import eq_
import numpy as np
from pysas import waveread, World
from pysas.mcep import estimate_alpha, spec2mcep_from_matrix, mcep2coef
from pysas.synthesis.mlsa import MLSAFilter
from pysas.synthesis import Synthesis
from pysas.excite import ExcitePulse
class SynthesisTest(TestCase):
def setUp(self):
signal, samplingrate, _ = waveread("test/cmu_arctic/arctic_a0001.wav")
self.world = World(samplingrate)
self.alpha = estimate_alpha(samplingrate)
self.samplingrate = samplingrate
self.signal = signal
self.f0, self.spec_mat, _ = self.world.analyze(signal)
self.ep = ExcitePulse(16000, 80, False)
self.order = 24
def test_synthesis_filter(self):
excite = self.ep.gen(self.f0)
mcep_mat = spec2mcep_from_matrix(self.spec_mat, self.order, self.alpha)
coef_mat = []
for i in range(mcep_mat.shape[0]):
coef_mat.append(mcep2coef(mcep_mat[i], 0.41))
coef_mat = np.array(coef_mat)<|fim▁hole|> syn = Synthesis(80, mlsa)
syn.synthesis(excite, coef_mat)<|fim▁end|> | mlsa = MLSAFilter(self.order, self.alpha, 5) |
<|file_name|>test_tenant_negative.py<|end_file_name|><|fim▁begin|># Copyright 2013 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class TenantsNegativeTestJSON(base.BaseIdentityV2AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('ca9bb202-63dd-4240-8a07-8ef9c19c04bb')
def test_list_tenants_by_unauthorized_user(self):
# Non-administrator user should not be able to list tenants
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_tenants)
@test.attr(type=['negative'])
@test.idempotent_id('df33926c-1c96-4d8d-a762-79cc6b0c3cf4')
def test_list_tenant_request_without_token(self):
# Request to list tenants without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.list_tenants)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('162ba316-f18b-4987-8c0c-fd9140cd63ed')
def test_tenant_delete_by_unauthorized_user(self):
# Non-administrator user should not be able to delete a tenant
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.delete_tenant, tenant['id'])
@test.attr(type=['negative'])
@test.idempotent_id('e450db62-2e9d-418f-893a-54772d6386b1')
def test_tenant_delete_request_without_token(self):
# Request to delete a tenant without a valid token should fail
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']<|fim▁hole|> self.assertRaises(lib_exc.Unauthorized, self.client.delete_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('9c9a2aed-6e3c-467a-8f5c-89da9d1b516b')
def test_delete_non_existent_tenant(self):
# Attempt to delete a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.client.delete_tenant,
str(uuid.uuid4().hex))
@test.attr(type=['negative'])
@test.idempotent_id('af16f44b-a849-46cb-9f13-a751c388f739')
def test_tenant_create_duplicate(self):
# Tenant names should be unique
tenant_name = data_utils.rand_name(name='tenant')
body = self.client.create_tenant(tenant_name)['tenant']
tenant = body
self.data.tenants.append(tenant)
tenant1_id = body.get('id')
self.addCleanup(self.client.delete_tenant, tenant1_id)
self.addCleanup(self.data.tenants.remove, tenant)
self.assertRaises(lib_exc.Conflict, self.client.create_tenant,
tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('d26b278a-6389-4702-8d6e-5980d80137e0')
def test_create_tenant_by_unauthorized_user(self):
# Non-administrator user should not be authorized to create a tenant
tenant_name = data_utils.rand_name(name='tenant')
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.create_tenant, tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('a3ee9d7e-6920-4dd5-9321-d4b2b7f0a638')
def test_create_tenant_request_without_token(self):
# Create tenant request without a token should not be authorized
tenant_name = data_utils.rand_name(name='tenant')
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.create_tenant,
tenant_name)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('5a2e4ca9-b0c0-486c-9c48-64a94fba2395')
def test_create_tenant_with_empty_name(self):
# Tenant name should not be empty
self.assertRaises(lib_exc.BadRequest, self.client.create_tenant,
name='')
@test.attr(type=['negative'])
@test.idempotent_id('2ff18d1e-dfe3-4359-9dc3-abf582c196b9')
def test_create_tenants_name_length_over_64(self):
# Tenant name length should not be greater than 64 characters
tenant_name = 'a' * 65
self.assertRaises(lib_exc.BadRequest, self.client.create_tenant,
tenant_name)
@test.attr(type=['negative'])
@test.idempotent_id('bd20dc2a-9557-4db7-b755-f48d952ad706')
def test_update_non_existent_tenant(self):
# Attempt to update a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.client.update_tenant,
str(uuid.uuid4().hex))
@test.attr(type=['negative'])
@test.idempotent_id('41704dc5-c5f7-4f79-abfa-76e6fedc570b')
def test_tenant_update_by_unauthorized_user(self):
# Non-administrator user should not be able to update a tenant
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.update_tenant, tenant['id'])
@test.attr(type=['negative'])
@test.idempotent_id('7a421573-72c7-4c22-a98e-ce539219c657')
def test_tenant_update_request_without_token(self):
# Request to update a tenant without a valid token should fail
tenant_name = data_utils.rand_name(name='tenant')
tenant = self.client.create_tenant(tenant_name)['tenant']
self.data.tenants.append(tenant)
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.update_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()<|fim▁end|> | self.data.tenants.append(tenant)
token = self.client.auth_provider.get_token()
self.client.delete_token(token) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.contrib.auth.models import User
import MySQLdb
# Create your models here.
class Comentario(models.Model):
"""Comentario"""
contenido = models.TextField(help_text='Escribe un comentario')
fecha_coment = models.DateField(auto_now=True)
def __unicode__(self):
return self.contenido
class Estado(models.Model):
"""Estado"""<|fim▁hole|> def __unicode__(self):
return nom_estado
class Categoria(models.Model):
"""Categoria"""
nombre = models.CharField(max_length=50)
descripcion = models.TextField(help_text='Escribe una descripcion de la categoria')
class Entrada(models.Model):
"""Entrada"""
autor = models.ForeignKey(User)
comentario = models.ForeignKey(Comentario)
estado = models.ForeignKey(Estado)
titulo = models.CharField(max_length=100)
contenido = models.TextField(help_text='Redacta el contenido')
fecha_pub = models.DateField(auto_now=True)
def __unicode__(self):
return self.titulo
class Agregador(models.Model):
"""agreador"""
entrada = models.ForeignKey(Entrada)
categoria = models.ManyToManyField(Categoria)<|fim▁end|> | nom_estado = models.CharField(max_length=50)
|
<|file_name|>audioReducer.js<|end_file_name|><|fim▁begin|>import * as types from '../constants/actionTypes';
const initialState = {
startTime: 0,
endTime: null
};
export default function counter(state = initialState, action) {
switch (action.type) {
case types.SET_PLAY_TIME:
return {
startTime: action.startTime,
endTime: action.endTime
};
default:<|fim▁hole|><|fim▁end|> | return state;
}
}; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for Geofency."""
import logging
from aiohttp import web
import voluptuous as vol
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_NAME,
CONF_WEBHOOK_ID,
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
STATE_NOT_HOME,
)
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util import slugify
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
CONF_MOBILE_BEACONS = "mobile_beacons"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_MOBILE_BEACONS, default=[]): vol.All(
cv.ensure_list, [cv.string]
)
}
)
},
extra=vol.ALLOW_EXTRA,
)
ATTR_ADDRESS = "address"
ATTR_BEACON_ID = "beaconUUID"
ATTR_CURRENT_LATITUDE = "currentLatitude"
ATTR_CURRENT_LONGITUDE = "currentLongitude"
ATTR_DEVICE = "device"
ATTR_ENTRY = "entry"
BEACON_DEV_PREFIX = "beacon"
LOCATION_ENTRY = "1"
LOCATION_EXIT = "0"
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
def _address(value: str) -> str:
r"""Coerce address by replacing '\n' with ' '."""
return value.replace("\n", " ")
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ADDRESS): vol.All(cv.string, _address),
vol.Required(ATTR_DEVICE): vol.All(cv.string, slugify),
vol.Required(ATTR_ENTRY): vol.Any(LOCATION_ENTRY, LOCATION_EXIT),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Required(ATTR_NAME): vol.All(cv.string, slugify),
vol.Optional(ATTR_CURRENT_LATITUDE): cv.latitude,
vol.Optional(ATTR_CURRENT_LONGITUDE): cv.longitude,
vol.Optional(ATTR_BEACON_ID): cv.string,
},
extra=vol.ALLOW_EXTRA,
)<|fim▁hole|>
async def async_setup(hass, hass_config):
"""Set up the Geofency component."""
config = hass_config.get(DOMAIN, {})
mobile_beacons = config.get(CONF_MOBILE_BEACONS, [])
hass.data[DOMAIN] = {
"beacons": [slugify(beacon) for beacon in mobile_beacons],
"devices": set(),
"unsub_device_tracker": {},
}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook from Geofency."""
try:
data = WEBHOOK_SCHEMA(dict(await request.post()))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
if _is_mobile_beacon(data, hass.data[DOMAIN]["beacons"]):
return _set_location(hass, data, None)
if data["entry"] == LOCATION_ENTRY:
location_name = data["name"]
else:
location_name = STATE_NOT_HOME
if ATTR_CURRENT_LATITUDE in data:
data[ATTR_LATITUDE] = data[ATTR_CURRENT_LATITUDE]
data[ATTR_LONGITUDE] = data[ATTR_CURRENT_LONGITUDE]
return _set_location(hass, data, location_name)
def _is_mobile_beacon(data, mobile_beacons):
"""Check if we have a mobile beacon."""
return ATTR_BEACON_ID in data and data["name"] in mobile_beacons
def _device_name(data):
"""Return name of device tracker."""
if ATTR_BEACON_ID in data:
return f"{BEACON_DEV_PREFIX}_{data['name']}"
return data["device"]
def _set_location(hass, data, location_name):
"""Fire HA event to set location."""
device = _device_name(data)
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
(data[ATTR_LATITUDE], data[ATTR_LONGITUDE]),
location_name,
data,
)
return web.Response(text=f"Setting location for {device}", status=HTTP_OK)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Geofency", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
return True
# pylint: disable=invalid-name
async_remove_entry = config_entry_flow.webhook_async_remove_entry<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" Define a Check monad and corresponding functions.
"""
from functools import (reduce, partial)
class Check:
""" This super class is not really necessary but helps make the structure
clear.
data Check a = Pass a | Fail Message
"""
pass
class Pass(Check):
def __init__(self, value):
self.value = value
class Fail(Check):
def __init__(self, message):
self.message = message
def is_(t, x):
""" Check whether the type of a given x is a given type t.
"""
return type(x) is t
is_check = partial(is_, Check)
is_pass = partial(is_, Pass)
is_fail = partial(is_, Fail)
<|fim▁hole|> """ Monadic return for the Check monad.
return :: a -> m a
return = Pass
"""
return Pass(x)
def bind(f):
""" Monadic bind for the Check monad.
(>>=) :: m a -> (a -> m b) -> m b
Fail x >>= f = Fail x
Pass x >>= f = f x
"""
def bind_impl(x):
if is_fail(x):
return x
if is_pass(x):
return f(x.value)
raise ValueError('Check has to be of type Pass | Fail.')
return bind_impl
def compose(f, g):
""" Kleisli composition of two (Check-)monadic functions f and g.
(>=>) :: (a -> m b) -> (b -> m c) -> (a -> m c)
"""
def compose_impl(x):
return bind(g)(f(x))
return compose_impl
def compose_many(*fs):
""" Reduces a variable number of functions with composition.
Same as repeatedly calling `compose` on pairs.
"""
return reduce(compose, fs)
def lift(f, message):
""" Lifts a boolean function into the realm of the Check monad.
lift :: (a -> bool) -> String -> (a -> Check a)
"""
def lift_impl(x):
if f(x):
return return_(x)
return Fail(message)
return lift_impl<|fim▁end|> |
def return_(x): |
<|file_name|>lockfile_metadata.py<|end_file_name|><|fim▁begin|># Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import hashlib
import json
from dataclasses import dataclass
from enum import Enum
from typing import Any, Callable, Iterable, Set, TypeVar
from pkg_resources import Requirement
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.util.ordered_set import FrozenOrderedSet
BEGIN_LOCKFILE_HEADER = b"# --- BEGIN PANTS LOCKFILE METADATA: DO NOT EDIT OR REMOVE ---"
END_LOCKFILE_HEADER = b"# --- END PANTS LOCKFILE METADATA ---"
_concrete_metadata_classes: dict[int, type[LockfileMetadata]] = {}
def _lockfile_metadata_version(
version: int,
) -> Callable[[type[LockfileMetadata]], type[LockfileMetadata]]:
"""Decorator to register a Lockfile metadata version subclass with a given version number.
The class must be a frozen dataclass
"""
def _dec(cls: type[LockfileMetadata]) -> type[LockfileMetadata]:
# Only frozen dataclasses may be registered as lockfile metadata:
cls_dataclass_params = getattr(cls, "__dataclass_params__", None)
if not cls_dataclass_params or not cls_dataclass_params.frozen:
raise ValueError(
"Classes registered with `_lockfile_metadata_version` may only be "
"frozen dataclasses"
)
_concrete_metadata_classes[version] = cls
return cls
return _dec
class InvalidLockfileError(Exception):
pass
@dataclass(frozen=True)
class LockfileMetadata:
"""Base class for metadata that is attached to a given lockfiles.
This class, and provides the external API for serializing, deserializing, and validating the
contents of individual lockfiles. New versions of metadata implement a concrete subclass and
provide deserialization and validation logic, along with specialist serialization logic.
To construct an instance of the most recent concrete subclass, call `LockfileMetadata.new()`.
"""
_LockfileMetadataSubclass = TypeVar("_LockfileMetadataSubclass", bound="LockfileMetadata")
valid_for_interpreter_constraints: InterpreterConstraints
@staticmethod
def new(
valid_for_interpreter_constraints: InterpreterConstraints,
requirements: set[Requirement],
) -> LockfileMetadata:
"""Call the most recent version of the `LockfileMetadata` class to construct a concrete
instance.
This static method should be used in place of the `LockfileMetadata` constructor. This gives
calling sites a predictable method to call to construct a new `LockfileMetadata` for
writing, while still allowing us to support _reading_ older, deprecated metadata versions.
"""
return LockfileMetadataV2(valid_for_interpreter_constraints, requirements)
@staticmethod
def from_lockfile(
lockfile: bytes, lockfile_path: str | None = None, resolve_name: str | None = None
) -> LockfileMetadata:
"""Parse all relevant metadata from the lockfile's header."""
in_metadata_block = False
metadata_lines = []
for line in lockfile.splitlines():
if line == BEGIN_LOCKFILE_HEADER:
in_metadata_block = True
elif line == END_LOCKFILE_HEADER:
break
elif in_metadata_block:
metadata_lines.append(line[2:])
error_suffix = (
"To resolve this error, you will need to regenerate the lockfile by running "
"`./pants generate-lockfiles"
)
if resolve_name:
error_suffix += " --resolve={tool_name}"
error_suffix += "`."
if lockfile_path is not None and resolve_name is not None:
lockfile_description = f"the lockfile `{lockfile_path}` for `{resolve_name}`"
elif lockfile_path is not None:
lockfile_description = f"the lockfile `{lockfile_path}`"
elif resolve_name is not None:
lockfile_description = f"the lockfile for `{resolve_name}`"
else:
lockfile_description = "this lockfile"
if not metadata_lines:
raise InvalidLockfileError(
f"Could not find a Pants metadata block in {lockfile_description}. {error_suffix}"
)
try:
metadata = json.loads(b"\n".join(metadata_lines))
except json.decoder.JSONDecodeError:
raise InvalidLockfileError(
f"Metadata header in {lockfile_description} is not a valid JSON string and can't "
"be decoded. " + error_suffix
)
concrete_class = _concrete_metadata_classes[metadata["version"]]
return concrete_class._from_json_dict(metadata, lockfile_description, error_suffix)
@classmethod
def _from_json_dict(
cls: type[_LockfileMetadataSubclass],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> _LockfileMetadataSubclass:
"""Construct a `LockfileMetadata` subclass from the supplied JSON dict.
*** Not implemented. Subclasses should override. ***
`lockfile_description` is a detailed, human-readable description of the lockfile, which can
be read by the user to figure out which lockfile is broken in case of an error.
`error_suffix` is a string describing how to fix the lockfile.
"""
raise NotImplementedError(
"`LockfileMetadata._from_json_dict` should not be directly " "called."
)
def add_header_to_lockfile(self, lockfile: bytes, *, regenerate_command: str) -> bytes:
metadata_dict = self._header_dict()
metadata_json = json.dumps(metadata_dict, ensure_ascii=True, indent=2).splitlines()
metadata_as_a_comment = "\n".join(f"# {l}" for l in metadata_json).encode("ascii")
header = b"%b\n%b\n%b" % (BEGIN_LOCKFILE_HEADER, metadata_as_a_comment, END_LOCKFILE_HEADER)
regenerate_command_bytes = (
f"# This lockfile was autogenerated by Pants. To regenerate, run:\n#\n"
f"# {regenerate_command}"
).encode()
return b"%b\n#\n%b\n\n%b" % (regenerate_command_bytes, header, lockfile)
def _header_dict(self) -> dict[Any, Any]:
"""Produce a dictionary to be serialized into the lockfile header.
Subclasses should call `super` and update the resulting dictionary.
"""
version: int
for ver, cls in _concrete_metadata_classes.items():
if isinstance(self, cls):
version = ver
break
else:
raise ValueError("Trying to serialize an unregistered `LockfileMetadata` subclass.")
return {
"version": version,
"valid_for_interpreter_constraints": [
str(ic) for ic in self.valid_for_interpreter_constraints
],
}
def is_valid_for(
self,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[Requirement] | None,
) -> LockfileMetadataValidation:
"""Returns Truthy if this `LockfileMetadata` can be used in the current execution
context."""
raise NotImplementedError("call `is_valid_for` on subclasses only")
@_lockfile_metadata_version(1)
@dataclass(frozen=True)
class LockfileMetadataV1(LockfileMetadata):
requirements_invalidation_digest: str
<|fim▁hole|> json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> LockfileMetadataV1:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
requirements_digest = metadata("requirements_invalidation_digest", str, None)
return LockfileMetadataV1(interpreter_constraints, requirements_digest)
def _header_dict(self) -> dict[Any, Any]:
d = super()._header_dict()
d["requirements_invalidation_digest"] = self.requirements_invalidation_digest
return d
def is_valid_for(
self,
expected_invalidation_digest: str | None,
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
_: Iterable[Requirement] | None, # User requirements are not used by V1
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidLockfileReason] = set()
if expected_invalidation_digest is None:
return LockfileMetadataValidation(failure_reasons)
if self.requirements_invalidation_digest != expected_invalidation_digest:
failure_reasons.add(InvalidLockfileReason.INVALIDATION_DIGEST_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
@_lockfile_metadata_version(2)
@dataclass(frozen=True)
class LockfileMetadataV2(LockfileMetadata):
"""Lockfile version that permits specifying a requirements as a set rather than a digest.
Validity is tested by the set of requirements strings being the same in the user requirements as
those in the stored requirements.
"""
requirements: set[Requirement]
@classmethod
def _from_json_dict(
cls: type[LockfileMetadataV2],
json_dict: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> LockfileMetadataV2:
metadata = _get_metadata(json_dict, lockfile_description, error_suffix)
requirements = metadata(
"generated_with_requirements",
Set[Requirement],
lambda l: {Requirement.parse(i) for i in l},
)
interpreter_constraints = metadata(
"valid_for_interpreter_constraints", InterpreterConstraints, InterpreterConstraints
)
return LockfileMetadataV2(interpreter_constraints, requirements)
def _header_dict(self) -> dict[Any, Any]:
out = super()._header_dict()
# Requirements need to be stringified then sorted so that tests are deterministic. Sorting
# followed by stringifying does not produce a meaningful result.
out["generated_with_requirements"] = (
sorted(str(i) for i in self.requirements) if self.requirements is not None else None
)
return out
def is_valid_for(
self,
_: str | None, # Validation digests are not used by V2; this param will be deprecated
user_interpreter_constraints: InterpreterConstraints,
interpreter_universe: Iterable[str],
user_requirements: Iterable[Requirement] | None,
) -> LockfileMetadataValidation:
failure_reasons: set[InvalidLockfileReason] = set()
if user_requirements is None:
return LockfileMetadataValidation(failure_reasons)
if self.requirements != set(user_requirements):
failure_reasons.add(InvalidLockfileReason.REQUIREMENTS_MISMATCH)
if not self.valid_for_interpreter_constraints.contains(
user_interpreter_constraints, interpreter_universe
):
failure_reasons.add(InvalidLockfileReason.INTERPRETER_CONSTRAINTS_MISMATCH)
return LockfileMetadataValidation(failure_reasons)
def calculate_invalidation_digest(requirements: Iterable[str]) -> str:
"""Returns an invalidation digest for the given requirements."""
m = hashlib.sha256()
inputs = {
# `FrozenOrderedSet` deduplicates while keeping ordering, which speeds up the sorting if
# the input was already sorted.
"requirements": sorted(FrozenOrderedSet(requirements)),
}
m.update(json.dumps(inputs).encode("utf-8"))
return m.hexdigest()
class InvalidLockfileReason(Enum):
INVALIDATION_DIGEST_MISMATCH = "invalidation_digest_mismatch"
INTERPRETER_CONSTRAINTS_MISMATCH = "interpreter_constraints_mismatch"
REQUIREMENTS_MISMATCH = "requirements_mismatch"
class LockfileMetadataValidation:
"""Boolean-like value which additionally carries reasons why a validation failed."""
failure_reasons: set[InvalidLockfileReason]
def __init__(self, failure_reasons: Iterable[InvalidLockfileReason] = ()):
self.failure_reasons = set(failure_reasons)
def __bool__(self):
return not self.failure_reasons
T = TypeVar("T")
def _get_metadata(
metadata: dict[Any, Any],
lockfile_description: str,
error_suffix: str,
) -> Callable[[str, type[T], Callable[[Any], T] | None], T]:
"""Returns a function that will get a given key from the `metadata` dict, and optionally do some
verification and post-processing to return a value of the correct type."""
def get_metadata(key: str, type_: type[T], coerce: Callable[[Any], T] | None) -> T:
val: Any
try:
val = metadata[key]
except KeyError:
raise InvalidLockfileError(
f"Required key `{key}` is not present in metadata header for "
f"{lockfile_description}. {error_suffix}"
)
if not coerce:
if isinstance(val, type_):
return val
raise InvalidLockfileError(
f"Metadata value `{key}` in {lockfile_description} must "
f"be a {type(type_).__name__}. {error_suffix}"
)
else:
try:
return coerce(val)
except Exception:
raise InvalidLockfileError(
f"Metadata value `{key}` in {lockfile_description} must be able to "
f"be converted to a {type(type_).__name__}. {error_suffix}"
)
return get_metadata<|fim▁end|> | @classmethod
def _from_json_dict(
cls: type[LockfileMetadataV1], |
<|file_name|>dom_wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! A safe wrapper for DOM nodes that prevents layout from mutating the DOM, from letting DOM nodes
//! escape, and from generally doing anything that it isn't supposed to. This is accomplished via
//! a simple whitelist of allowed operations, along with some lifetime magic to prevent nodes from
//! escaping.
//!
//! As a security wrapper is only as good as its whitelist, be careful when adding operations to
//! this list. The cardinal rules are:
//!
//! 1. Layout is not allowed to mutate the DOM.
//!
//! 2. Layout is not allowed to see anything with `LayoutDom` in the name, because it could hang
//! onto these objects and cause use-after-free.
//!
//! When implementing wrapper functions, be careful that you do not touch the borrow flags, or you
//! will race and cause spurious thread failure. (Note that I do not believe these races are
//! exploitable, but they'll result in brokenness nonetheless.)
//!
//! Rules of the road for this file:
//!
//! * Do not call any methods on DOM nodes without checking to see whether they use borrow flags.
//!
//! o Instead of `get_attr()`, use `.get_attr_val_for_layout()`.
//!
//! o Instead of `html_element_in_html_document()`, use
//! `html_element_in_html_document_for_layout()`.
#![allow(unsafe_code)]
use atomic_refcell::{AtomicRef, AtomicRefCell, AtomicRefMut};
use gfx_traits::ByteIndex;
use html5ever::{LocalName, Namespace};
use layout::element_data::LayoutDataForElement;
use layout::wrapper::GetStyleAndLayoutData;
use msg::constellation_msg::{BrowsingContextId, PipelineId};
use net_traits::image::base::{Image, ImageMetadata};
use range::Range;
use script::layout_exports::NodeFlags;
use script::layout_exports::ShadowRoot;
use script::layout_exports::{
CharacterDataTypeId, DocumentFragmentTypeId, ElementTypeId, HTMLElementTypeId, NodeTypeId,
TextTypeId,
};
use script::layout_exports::{Document, Element, Node, Text};
use script::layout_exports::{LayoutCharacterDataHelpers, LayoutDocumentHelpers};
use script::layout_exports::{
LayoutDom, LayoutElementHelpers, LayoutNodeHelpers, LayoutShadowRootHelpers,
};
use script_layout_interface::wrapper_traits::{
DangerousThreadSafeLayoutNode, GetStyleAndOpaqueLayoutData, LayoutNode,
};
use script_layout_interface::wrapper_traits::{
PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode,
};
use script_layout_interface::{
HTMLCanvasData, HTMLMediaData, LayoutNodeType, StyleAndOpaqueLayoutData,
};
use script_layout_interface::{SVGSVGData, StyleData, TrustedNodeAddress};
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::VisitedHandlingMode;
use selectors::matching::{ElementSelectorFlags, MatchingContext, QuirksMode};
use selectors::sink::Push;
use servo_arc::{Arc, ArcBorrow};
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::borrow::Cow;
use std::fmt;
use std::fmt::Debug;
use std::hash::{Hash, Hasher};
use std::sync::atomic::Ordering;
use std::sync::Arc as StdArc;
use style::animation::AnimationSetKey;
use style::applicable_declarations::ApplicableDeclarationBlock;
use style::attr::AttrValue;
use style::context::SharedStyleContext;
use style::data::ElementData;
use style::dom::{DomChildren, LayoutIterator, NodeInfo, OpaqueNode};
use style::dom::{TDocument, TElement, TNode, TShadowRoot};
use style::element_state::*;
use style::font_metrics::ServoMetricsProvider;
use style::media_queries::Device;
use style::properties::{ComputedValues, PropertyDeclarationBlock};
use style::selector_parser::{extended_filtering, PseudoElement, SelectorImpl};
use style::selector_parser::{AttrValue as SelectorAttrValue, Lang, NonTSPseudoClass};
use style::shared_lock::{
Locked as StyleLocked, SharedRwLock as StyleSharedRwLock, SharedRwLockReadGuard,
};
use style::str::is_whitespace;
use style::stylist::CascadeData;
use style::CaseSensitivityExt;
#[derive(Clone, Copy)]
pub struct ServoLayoutNode<'dom> {
/// The wrapped node.
node: LayoutDom<'dom, Node>,
}
// Those are supposed to be sound, but they aren't because the entire system
// between script and layout so far has been designed to work around their
// absence. Switching the entire thing to the inert crate infra will help.
unsafe impl Send for ServoLayoutNode<'_> {}
unsafe impl Sync for ServoLayoutNode<'_> {}
impl<'ln> Debug for ServoLayoutNode<'ln> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if let Some(el) = self.as_element() {
el.fmt(f)
} else {
if self.is_text_node() {
write!(f, "<text node> ({:#x})", self.opaque().0)
} else {
write!(f, "<non-text node> ({:#x})", self.opaque().0)
}
}
}
}
impl<'a> PartialEq for ServoLayoutNode<'a> {
#[inline]
fn eq(&self, other: &ServoLayoutNode) -> bool {
self.node == other.node
}
}
impl<'ln> ServoLayoutNode<'ln> {
fn from_layout_js(n: LayoutDom<'ln, Node>) -> Self {
ServoLayoutNode { node: n }
}
pub unsafe fn new(address: &TrustedNodeAddress) -> Self {
ServoLayoutNode::from_layout_js(LayoutDom::from_trusted_node_address(*address))
}
fn script_type_id(&self) -> NodeTypeId {
self.node.type_id_for_layout()
}
}
impl<'ln> NodeInfo for ServoLayoutNode<'ln> {
fn is_element(&self) -> bool {
self.node.is_element_for_layout()
}
fn is_text_node(&self) -> bool {
self.script_type_id() ==
NodeTypeId::CharacterData(CharacterDataTypeId::Text(TextTypeId::Text))
}
}
#[derive(Clone, Copy, PartialEq)]
pub struct ServoShadowRoot<'dom> {
/// The wrapped shadow root.
shadow_root: LayoutDom<'dom, ShadowRoot>,
}
impl<'lr> Debug for ServoShadowRoot<'lr> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_node().fmt(f)
}
}
impl<'lr> TShadowRoot for ServoShadowRoot<'lr> {
type ConcreteNode = ServoLayoutNode<'lr>;
fn as_node(&self) -> Self::ConcreteNode {
ServoLayoutNode::from_layout_js(self.shadow_root.upcast())
}
fn host(&self) -> ServoLayoutElement<'lr> {
ServoLayoutElement::from_layout_js(self.shadow_root.get_host_for_layout())
}
fn style_data<'a>(&self) -> Option<&'a CascadeData>
where
Self: 'a,
{
Some(&self.shadow_root.get_style_data_for_layout())
}
}
impl<'lr> ServoShadowRoot<'lr> {
fn from_layout_js(shadow_root: LayoutDom<'lr, ShadowRoot>) -> Self {
ServoShadowRoot { shadow_root }
}
pub unsafe fn flush_stylesheets(
&self,
device: &Device,
quirks_mode: QuirksMode,
guard: &SharedRwLockReadGuard,
) {
self.shadow_root
.flush_stylesheets::<ServoLayoutElement>(device, quirks_mode, guard)
}
}
impl<'ln> TNode for ServoLayoutNode<'ln> {
type ConcreteDocument = ServoLayoutDocument<'ln>;
type ConcreteElement = ServoLayoutElement<'ln>;
type ConcreteShadowRoot = ServoShadowRoot<'ln>;
fn parent_node(&self) -> Option<Self> {
self.node
.composed_parent_node_ref()
.map(Self::from_layout_js)
}
fn first_child(&self) -> Option<Self> {
self.node.first_child_ref().map(Self::from_layout_js)
}
fn last_child(&self) -> Option<Self> {
self.node.last_child_ref().map(Self::from_layout_js)
}
fn prev_sibling(&self) -> Option<Self> {
self.node.prev_sibling_ref().map(Self::from_layout_js)
}
fn next_sibling(&self) -> Option<Self> {
self.node.next_sibling_ref().map(Self::from_layout_js)
}
fn owner_doc(&self) -> Self::ConcreteDocument {
ServoLayoutDocument::from_layout_js(self.node.owner_doc_for_layout())
}
fn traversal_parent(&self) -> Option<ServoLayoutElement<'ln>> {
let parent = self.parent_node()?;
if let Some(shadow) = parent.as_shadow_root() {
return Some(shadow.host());
};
parent.as_element()
}
fn opaque(&self) -> OpaqueNode {
unsafe { self.get_jsmanaged().opaque() }
}
fn debug_id(self) -> usize {
self.opaque().0
}
fn as_element(&self) -> Option<ServoLayoutElement<'ln>> {
as_element(self.node)
}
fn as_document(&self) -> Option<ServoLayoutDocument<'ln>> {
self.node
.downcast()
.map(ServoLayoutDocument::from_layout_js)
}
fn as_shadow_root(&self) -> Option<ServoShadowRoot<'ln>> {
self.node.downcast().map(ServoShadowRoot::from_layout_js)
}
fn is_in_document(&self) -> bool {
unsafe { self.node.get_flag(NodeFlags::IS_IN_DOC) }
}
}
impl<'ln> LayoutNode<'ln> for ServoLayoutNode<'ln> {
type ConcreteThreadSafeLayoutNode = ServoThreadSafeLayoutNode<'ln>;
fn to_threadsafe(&self) -> Self::ConcreteThreadSafeLayoutNode {
ServoThreadSafeLayoutNode::new(*self)
}
fn type_id(&self) -> LayoutNodeType {
self.script_type_id().into()
}
unsafe fn initialize_data(&self) {
if self.get_style_and_layout_data().is_none() {
let opaque = StyleAndOpaqueLayoutData::new(
StyleData::new(),
AtomicRefCell::new(LayoutDataForElement::default()),
);
self.init_style_and_opaque_layout_data(opaque);
};
}
unsafe fn init_style_and_opaque_layout_data(&self, data: Box<StyleAndOpaqueLayoutData>) {
self.get_jsmanaged().init_style_and_opaque_layout_data(data);
}
unsafe fn take_style_and_opaque_layout_data(&self) -> Box<StyleAndOpaqueLayoutData> {
self.get_jsmanaged().take_style_and_opaque_layout_data()
}
fn is_connected(&self) -> bool {
unsafe { self.node.get_flag(NodeFlags::IS_CONNECTED) }
}
}
impl<'dom> GetStyleAndOpaqueLayoutData<'dom> for ServoLayoutNode<'dom> {
fn get_style_and_opaque_layout_data(self) -> Option<&'dom StyleAndOpaqueLayoutData> {
unsafe { self.get_jsmanaged().get_style_and_opaque_layout_data() }
}
}
impl<'dom> GetStyleAndOpaqueLayoutData<'dom> for ServoLayoutElement<'dom> {
fn get_style_and_opaque_layout_data(self) -> Option<&'dom StyleAndOpaqueLayoutData> {
self.as_node().get_style_and_opaque_layout_data()
}
}
impl<'dom> GetStyleAndOpaqueLayoutData<'dom> for ServoThreadSafeLayoutNode<'dom> {
fn get_style_and_opaque_layout_data(self) -> Option<&'dom StyleAndOpaqueLayoutData> {
self.node.get_style_and_opaque_layout_data()
}
}
impl<'dom> GetStyleAndOpaqueLayoutData<'dom> for ServoThreadSafeLayoutElement<'dom> {
fn get_style_and_opaque_layout_data(self) -> Option<&'dom StyleAndOpaqueLayoutData> {
self.element.as_node().get_style_and_opaque_layout_data()
}
}
impl<'ln> ServoLayoutNode<'ln> {
/// Returns the interior of this node as a `LayoutDom`. This is highly unsafe for layout to
/// call and as such is marked `unsafe`.
pub unsafe fn get_jsmanaged(&self) -> LayoutDom<'ln, Node> {
self.node
}
}
// A wrapper around documents that ensures ayout can only ever access safe properties.
#[derive(Clone, Copy)]
pub struct ServoLayoutDocument<'dom> {
document: LayoutDom<'dom, Document>,
}
impl<'ld> TDocument for ServoLayoutDocument<'ld> {
type ConcreteNode = ServoLayoutNode<'ld>;
fn as_node(&self) -> Self::ConcreteNode {
ServoLayoutNode::from_layout_js(self.document.upcast())
}
fn quirks_mode(&self) -> QuirksMode {
self.document.quirks_mode()
}
fn is_html_document(&self) -> bool {
self.document.is_html_document_for_layout()
}
fn shared_lock(&self) -> &StyleSharedRwLock {
self.document.style_shared_lock()
}
}
impl<'ld> ServoLayoutDocument<'ld> {
pub fn root_element(&self) -> Option<ServoLayoutElement<'ld>> {
self.as_node()
.dom_children()
.flat_map(|n| n.as_element())
.next()
}
pub fn needs_paint_from_layout(&self) {
unsafe { self.document.needs_paint_from_layout() }
}
pub fn will_paint(&self) {
unsafe { self.document.will_paint() }
}
pub fn style_shared_lock(&self) -> &StyleSharedRwLock {
self.document.style_shared_lock()
}
pub fn shadow_roots(&self) -> Vec<ServoShadowRoot> {
unsafe {
self.document
.shadow_roots()
.iter()
.map(|sr| {
debug_assert!(sr.upcast::<Node>().get_flag(NodeFlags::IS_CONNECTED));
ServoShadowRoot::from_layout_js(*sr)
})
.collect()
}
}
pub fn flush_shadow_roots_stylesheets(
&self,
device: &Device,
quirks_mode: QuirksMode,
guard: &SharedRwLockReadGuard,
) {
unsafe {
if !self.document.shadow_roots_styles_changed() {
return;
}
self.document.flush_shadow_roots_stylesheets();
for shadow_root in self.shadow_roots() {
shadow_root.flush_stylesheets(device, quirks_mode, guard);
}
}
}
pub fn from_layout_js(doc: LayoutDom<'ld, Document>) -> Self {
ServoLayoutDocument { document: doc }
}
}
/// A wrapper around elements that ensures layout can only ever access safe properties.
#[derive(Clone, Copy)]
pub struct ServoLayoutElement<'dom> {
element: LayoutDom<'dom, Element>,
}
impl<'le> fmt::Debug for ServoLayoutElement<'le> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<{}", self.element.local_name())?;
if let Some(id) = self.id() {
write!(f, " id={}", id)?;
}
write!(f, "> ({:#x})", self.as_node().opaque().0)
}
}
impl<'le> TElement for ServoLayoutElement<'le> {
type ConcreteNode = ServoLayoutNode<'le>;
type TraversalChildrenIterator = DomChildren<Self::ConcreteNode>;
type FontMetricsProvider = ServoMetricsProvider;
fn as_node(&self) -> ServoLayoutNode<'le> {
ServoLayoutNode::from_layout_js(self.element.upcast())
}
fn traversal_children(&self) -> LayoutIterator<Self::TraversalChildrenIterator> {
LayoutIterator(if let Some(shadow) = self.shadow_root() {
shadow.as_node().dom_children()
} else {
self.as_node().dom_children()
})
}
fn is_html_element(&self) -> bool {
self.element.is_html_element()
}
fn is_mathml_element(&self) -> bool {
*self.element.namespace() == ns!(mathml)
}
fn is_svg_element(&self) -> bool {
*self.element.namespace() == ns!(svg)
}
fn has_part_attr(&self) -> bool {
false
}
fn exports_any_part(&self) -> bool {
false
}
fn style_attribute(&self) -> Option<ArcBorrow<StyleLocked<PropertyDeclarationBlock>>> {
unsafe {
(*self.element.style_attribute())
.as_ref()
.map(|x| x.borrow_arc())
}
}
fn may_have_animations(&self) -> bool {
true
}
fn animation_rule(
&self,
context: &SharedStyleContext,
) -> Option<Arc<StyleLocked<PropertyDeclarationBlock>>> {
let node = self.as_node();
let document = node.owner_doc();
context.animations.get_animation_declarations(
&AnimationSetKey::new_for_non_pseudo(node.opaque()),
context.current_time_for_animations,
document.style_shared_lock(),
)
}
fn transition_rule(
&self,
context: &SharedStyleContext,
) -> Option<Arc<StyleLocked<PropertyDeclarationBlock>>> {
let node = self.as_node();
let document = node.owner_doc();
context.animations.get_transition_declarations(
&AnimationSetKey::new_for_non_pseudo(node.opaque()),
context.current_time_for_animations,
document.style_shared_lock(),
)
}
fn state(&self) -> ElementState {
self.element.get_state_for_layout()
}
#[inline]
fn has_attr(&self, namespace: &Namespace, attr: &LocalName) -> bool {
self.get_attr(namespace, attr).is_some()
}
#[inline]
fn id(&self) -> Option<&Atom> {
unsafe { (*self.element.id_attribute()).as_ref() }
}
#[inline(always)]
fn each_class<F>(&self, mut callback: F)
where
F: FnMut(&Atom),
{
if let Some(ref classes) = self.element.get_classes_for_layout() {
for class in *classes {
callback(class)
}
}
}
fn has_dirty_descendants(&self) -> bool {
unsafe {
self.as_node()
.node
.get_flag(NodeFlags::HAS_DIRTY_DESCENDANTS)
}
}
fn has_snapshot(&self) -> bool {
unsafe { self.as_node().node.get_flag(NodeFlags::HAS_SNAPSHOT) }
}
fn handled_snapshot(&self) -> bool {
unsafe { self.as_node().node.get_flag(NodeFlags::HANDLED_SNAPSHOT) }
}
unsafe fn set_handled_snapshot(&self) {
self.as_node()
.node
.set_flag(NodeFlags::HANDLED_SNAPSHOT, true);
}
unsafe fn set_dirty_descendants(&self) {
debug_assert!(self.as_node().is_connected());
self.as_node()
.node
.set_flag(NodeFlags::HAS_DIRTY_DESCENDANTS, true)
}
unsafe fn unset_dirty_descendants(&self) {
self.as_node()
.node
.set_flag(NodeFlags::HAS_DIRTY_DESCENDANTS, false)
}
fn store_children_to_process(&self, n: isize) {
let data = self.get_style_data().unwrap();
data.parallel
.children_to_process
.store(n, Ordering::Relaxed);
}
fn did_process_child(&self) -> isize {
let data = self.get_style_data().unwrap();
let old_value = data
.parallel
.children_to_process
.fetch_sub(1, Ordering::Relaxed);
debug_assert!(old_value >= 1);
old_value - 1
}
unsafe fn clear_data(&self) {
if self.get_style_and_layout_data().is_some() {
drop(self.as_node().take_style_and_opaque_layout_data());
}
}
unsafe fn ensure_data(&self) -> AtomicRefMut<ElementData> {
self.as_node().initialize_data();
self.mutate_data().unwrap()
}
/// Whether there is an ElementData container.
fn has_data(&self) -> bool {
self.get_style_data().is_some()
}
/// Immutably borrows the ElementData.
fn borrow_data(&self) -> Option<AtomicRef<ElementData>> {
self.get_style_data().map(|data| data.element_data.borrow())
}
/// Mutably borrows the ElementData.
fn mutate_data(&self) -> Option<AtomicRefMut<ElementData>> {
self.get_style_data()
.map(|data| data.element_data.borrow_mut())
}
fn skip_item_display_fixup(&self) -> bool {
false
}
unsafe fn set_selector_flags(&self, flags: ElementSelectorFlags) {
self.element.insert_selector_flags(flags);
}
fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool {
self.element.has_selector_flags(flags)
}
fn has_animations(&self, context: &SharedStyleContext) -> bool {
// This is not used for pseudo elements currently so we can pass None.
return self.has_css_animations(context, /* pseudo_element = */ None) ||
self.has_css_transitions(context, /* pseudo_element = */ None);
}
fn has_css_animations(
&self,
context: &SharedStyleContext,
pseudo_element: Option<PseudoElement>,
) -> bool {
let key = AnimationSetKey::new(self.as_node().opaque(), pseudo_element);
context.animations.has_active_animations(&key)
}
fn has_css_transitions(
&self,
context: &SharedStyleContext,
pseudo_element: Option<PseudoElement>,
) -> bool {
let key = AnimationSetKey::new(self.as_node().opaque(), pseudo_element);
context.animations.has_active_transitions(&key)
}
#[inline]
fn lang_attr(&self) -> Option<SelectorAttrValue> {
self.get_attr(&ns!(xml), &local_name!("lang"))
.or_else(|| self.get_attr(&ns!(), &local_name!("lang")))
.map(|v| String::from(v as &str))
}<|fim▁hole|> override_lang: Option<Option<SelectorAttrValue>>,
value: &Lang,
) -> bool {
// Servo supports :lang() from CSS Selectors 4, which can take a comma-
// separated list of language tags in the pseudo-class, and which
// performs RFC 4647 extended filtering matching on them.
//
// FIXME(heycam): This is wrong, since extended_filtering accepts
// a string containing commas (separating each language tag in
// a list) but the pseudo-class instead should be parsing and
// storing separate <ident> or <string>s for each language tag.
//
// FIXME(heycam): Look at `element`'s document's Content-Language
// HTTP header for language tags to match `value` against. To
// do this, we should make `get_lang_for_layout` return an Option,
// so we can decide when to fall back to the Content-Language check.
let element_lang = match override_lang {
Some(Some(lang)) => lang,
Some(None) => String::new(),
None => self.element.get_lang_for_layout(),
};
extended_filtering(&element_lang, &*value)
}
fn is_html_document_body_element(&self) -> bool {
// This is only used for the "tables inherit from body" quirk, which we
// don't implement.
//
// FIXME(emilio): We should be able to give the right answer though!
false
}
fn synthesize_presentational_hints_for_legacy_attributes<V>(
&self,
_visited_handling: VisitedHandlingMode,
hints: &mut V,
) where
V: Push<ApplicableDeclarationBlock>,
{
self.element
.synthesize_presentational_hints_for_legacy_attributes(hints);
}
/// The shadow root this element is a host of.
fn shadow_root(&self) -> Option<ServoShadowRoot<'le>> {
self.element
.get_shadow_root_for_layout()
.map(ServoShadowRoot::from_layout_js)
}
/// The shadow root which roots the subtree this element is contained in.
fn containing_shadow(&self) -> Option<ServoShadowRoot<'le>> {
self.element
.upcast()
.containing_shadow_root_for_layout()
.map(ServoShadowRoot::from_layout_js)
}
fn local_name(&self) -> &LocalName {
self.element.local_name()
}
fn namespace(&self) -> &Namespace {
self.element.namespace()
}
}
impl<'le> PartialEq for ServoLayoutElement<'le> {
fn eq(&self, other: &Self) -> bool {
self.as_node() == other.as_node()
}
}
impl<'le> Hash for ServoLayoutElement<'le> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.element.hash(state);
}
}
impl<'le> Eq for ServoLayoutElement<'le> {}
impl<'le> ServoLayoutElement<'le> {
fn from_layout_js(el: LayoutDom<'le, Element>) -> Self {
ServoLayoutElement { element: el }
}
#[inline]
fn get_attr_enum(&self, namespace: &Namespace, name: &LocalName) -> Option<&AttrValue> {
self.element.get_attr_for_layout(namespace, name)
}
#[inline]
fn get_attr(&self, namespace: &Namespace, name: &LocalName) -> Option<&str> {
self.element.get_attr_val_for_layout(namespace, name)
}
fn get_style_data(&self) -> Option<&StyleData> {
self.get_style_and_opaque_layout_data()
.map(|data| &data.style_data)
}
pub unsafe fn unset_snapshot_flags(&self) {
self.as_node()
.node
.set_flag(NodeFlags::HAS_SNAPSHOT | NodeFlags::HANDLED_SNAPSHOT, false);
}
pub unsafe fn set_has_snapshot(&self) {
self.as_node().node.set_flag(NodeFlags::HAS_SNAPSHOT, true);
}
}
fn as_element<'dom>(node: LayoutDom<'dom, Node>) -> Option<ServoLayoutElement<'dom>> {
node.downcast().map(ServoLayoutElement::from_layout_js)
}
impl<'le> ::selectors::Element for ServoLayoutElement<'le> {
type Impl = SelectorImpl;
fn opaque(&self) -> ::selectors::OpaqueElement {
::selectors::OpaqueElement::new(unsafe { &*(self.as_node().opaque().0 as *const ()) })
}
fn parent_element(&self) -> Option<ServoLayoutElement<'le>> {
self.element
.upcast()
.composed_parent_node_ref()
.and_then(as_element)
}
fn parent_node_is_shadow_root(&self) -> bool {
match self.as_node().parent_node() {
None => false,
Some(node) => {
node.script_type_id() ==
NodeTypeId::DocumentFragment(DocumentFragmentTypeId::ShadowRoot)
},
}
}
fn containing_shadow_host(&self) -> Option<Self> {
self.containing_shadow().map(|s| s.host())
}
fn prev_sibling_element(&self) -> Option<ServoLayoutElement<'le>> {
let mut node = self.as_node();
while let Some(sibling) = node.prev_sibling() {
if let Some(element) = sibling.as_element() {
return Some(element);
}
node = sibling;
}
None
}
fn next_sibling_element(&self) -> Option<ServoLayoutElement<'le>> {
let mut node = self.as_node();
while let Some(sibling) = node.next_sibling() {
if let Some(element) = sibling.as_element() {
return Some(element);
}
node = sibling;
}
None
}
fn attr_matches(
&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&String>,
) -> bool {
match *ns {
NamespaceConstraint::Specific(ref ns) => self
.get_attr_enum(ns, local_name)
.map_or(false, |value| value.eval_selector(operation)),
NamespaceConstraint::Any => self
.element
.get_attr_vals_for_layout(local_name)
.iter()
.any(|value| value.eval_selector(operation)),
}
}
fn is_root(&self) -> bool {
match self.as_node().parent_node() {
None => false,
Some(node) => match node.script_type_id() {
NodeTypeId::Document(_) => true,
_ => false,
},
}
}
fn is_empty(&self) -> bool {
self.as_node()
.dom_children()
.all(|node| match node.script_type_id() {
NodeTypeId::Element(..) => false,
NodeTypeId::CharacterData(CharacterDataTypeId::Text(TextTypeId::Text)) => {
node.node.downcast().unwrap().data_for_layout().is_empty()
},
_ => true,
})
}
#[inline]
fn has_local_name(&self, name: &LocalName) -> bool {
self.element.local_name() == name
}
#[inline]
fn has_namespace(&self, ns: &Namespace) -> bool {
self.element.namespace() == ns
}
#[inline]
fn is_same_type(&self, other: &Self) -> bool {
self.element.local_name() == other.element.local_name() &&
self.element.namespace() == other.element.namespace()
}
fn is_pseudo_element(&self) -> bool {
false
}
fn match_pseudo_element(
&self,
_pseudo: &PseudoElement,
_context: &mut MatchingContext<Self::Impl>,
) -> bool {
false
}
fn match_non_ts_pseudo_class<F>(
&self,
pseudo_class: &NonTSPseudoClass,
_: &mut MatchingContext<Self::Impl>,
_: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
match *pseudo_class {
// https://github.com/servo/servo/issues/8718
NonTSPseudoClass::Link | NonTSPseudoClass::AnyLink => self.is_link(),
NonTSPseudoClass::Visited => false,
NonTSPseudoClass::Lang(ref lang) => self.match_element_lang(None, &*lang),
NonTSPseudoClass::ServoNonZeroBorder => {
match self
.element
.get_attr_for_layout(&ns!(), &local_name!("border"))
{
None | Some(&AttrValue::UInt(_, 0)) => false,
_ => true,
}
},
NonTSPseudoClass::ReadOnly => !self
.element
.get_state_for_layout()
.contains(pseudo_class.state_flag()),
NonTSPseudoClass::Active |
NonTSPseudoClass::Focus |
NonTSPseudoClass::Fullscreen |
NonTSPseudoClass::Hover |
NonTSPseudoClass::Defined |
NonTSPseudoClass::Enabled |
NonTSPseudoClass::Disabled |
NonTSPseudoClass::Checked |
NonTSPseudoClass::Indeterminate |
NonTSPseudoClass::ReadWrite |
NonTSPseudoClass::PlaceholderShown |
NonTSPseudoClass::Target => self
.element
.get_state_for_layout()
.contains(pseudo_class.state_flag()),
}
}
#[inline]
fn is_link(&self) -> bool {
match self.as_node().script_type_id() {
// https://html.spec.whatwg.org/multipage/#selector-link
NodeTypeId::Element(ElementTypeId::HTMLElement(
HTMLElementTypeId::HTMLAnchorElement,
)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLAreaElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLLinkElement)) => {
self.element
.get_attr_val_for_layout(&ns!(), &local_name!("href"))
.is_some()
},
_ => false,
}
}
#[inline]
fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool {
unsafe {
(*self.element.id_attribute())
.as_ref()
.map_or(false, |atom| case_sensitivity.eq_atom(atom, id))
}
}
#[inline]
fn is_part(&self, _name: &Atom) -> bool {
false
}
fn imported_part(&self, _: &Atom) -> Option<Atom> {
None
}
#[inline]
fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool {
self.element.has_class_for_layout(name, case_sensitivity)
}
fn is_html_slot_element(&self) -> bool {
self.element.is_html_element() && self.local_name() == &local_name!("slot")
}
fn is_html_element_in_html_document(&self) -> bool {
if !self.element.is_html_element() {
return false;
}
self.as_node().owner_doc().is_html_document()
}
}
#[derive(Clone, Copy, Debug)]
pub struct ServoThreadSafeLayoutNode<'ln> {
/// The wrapped node.
node: ServoLayoutNode<'ln>,
/// The pseudo-element type, with (optionally)
/// a specified display value to override the stylesheet.
pseudo: PseudoElementType,
}
impl<'a> PartialEq for ServoThreadSafeLayoutNode<'a> {
#[inline]
fn eq(&self, other: &ServoThreadSafeLayoutNode<'a>) -> bool {
self.node == other.node
}
}
impl<'ln> DangerousThreadSafeLayoutNode<'ln> for ServoThreadSafeLayoutNode<'ln> {
unsafe fn dangerous_first_child(&self) -> Option<Self> {
self.get_jsmanaged()
.first_child_ref()
.map(ServoLayoutNode::from_layout_js)
.map(Self::new)
}
unsafe fn dangerous_next_sibling(&self) -> Option<Self> {
self.get_jsmanaged()
.next_sibling_ref()
.map(ServoLayoutNode::from_layout_js)
.map(Self::new)
}
}
impl<'ln> ServoThreadSafeLayoutNode<'ln> {
/// Creates a new `ServoThreadSafeLayoutNode` from the given `ServoLayoutNode`.
pub fn new(node: ServoLayoutNode<'ln>) -> Self {
ServoThreadSafeLayoutNode {
node: node.clone(),
pseudo: PseudoElementType::Normal,
}
}
/// Returns the interior of this node as a `LayoutDom`. This is highly unsafe for layout to
/// call and as such is marked `unsafe`.
unsafe fn get_jsmanaged(&self) -> LayoutDom<'ln, Node> {
self.node.get_jsmanaged()
}
}
impl<'ln> NodeInfo for ServoThreadSafeLayoutNode<'ln> {
fn is_element(&self) -> bool {
self.node.is_element()
}
fn is_text_node(&self) -> bool {
self.node.is_text_node()
}
}
impl<'ln> ThreadSafeLayoutNode<'ln> for ServoThreadSafeLayoutNode<'ln> {
type ConcreteNode = ServoLayoutNode<'ln>;
type ConcreteThreadSafeLayoutElement = ServoThreadSafeLayoutElement<'ln>;
type ConcreteElement = ServoLayoutElement<'ln>;
type ChildrenIterator = ThreadSafeLayoutNodeChildrenIterator<Self>;
fn opaque(&self) -> OpaqueNode {
unsafe { self.get_jsmanaged().opaque() }
}
fn type_id(&self) -> Option<LayoutNodeType> {
if self.pseudo == PseudoElementType::Normal {
Some(self.node.type_id())
} else {
None
}
}
fn parent_style(&self) -> Arc<ComputedValues> {
let parent = self.node.parent_node().unwrap().as_element().unwrap();
let parent_data = parent.borrow_data().unwrap();
parent_data.styles.primary().clone()
}
fn debug_id(self) -> usize {
self.node.debug_id()
}
fn children(&self) -> LayoutIterator<Self::ChildrenIterator> {
if let Some(shadow) = self.node.as_element().and_then(|e| e.shadow_root()) {
return LayoutIterator(ThreadSafeLayoutNodeChildrenIterator::new(
shadow.as_node().to_threadsafe(),
));
}
LayoutIterator(ThreadSafeLayoutNodeChildrenIterator::new(*self))
}
fn as_element(&self) -> Option<ServoThreadSafeLayoutElement<'ln>> {
self.node
.as_element()
.map(|el| ServoThreadSafeLayoutElement {
element: el,
pseudo: self.pseudo,
})
}
fn get_style_and_opaque_layout_data(self) -> Option<&'ln StyleAndOpaqueLayoutData> {
self.node.get_style_and_opaque_layout_data()
}
fn is_ignorable_whitespace(&self, context: &SharedStyleContext) -> bool {
unsafe {
let text: LayoutDom<Text> = match self.get_jsmanaged().downcast() {
Some(text) => text,
None => return false,
};
if !is_whitespace(text.upcast().data_for_layout()) {
return false;
}
// NB: See the rules for `white-space` here:
//
// http://www.w3.org/TR/CSS21/text.html#propdef-white-space
//
// If you implement other values for this property, you will almost certainly
// want to update this check.
!self
.style(context)
.get_inherited_text()
.white_space
.preserve_newlines()
}
}
unsafe fn unsafe_get(self) -> Self::ConcreteNode {
self.node
}
fn node_text_content(self) -> Cow<'ln, str> {
unsafe { self.get_jsmanaged().text_content() }
}
fn selection(&self) -> Option<Range<ByteIndex>> {
let this = unsafe { self.get_jsmanaged() };
this.selection().map(|range| {
Range::new(
ByteIndex(range.start as isize),
ByteIndex(range.len() as isize),
)
})
}
fn image_url(&self) -> Option<ServoUrl> {
let this = unsafe { self.get_jsmanaged() };
this.image_url()
}
fn image_density(&self) -> Option<f64> {
let this = unsafe { self.get_jsmanaged() };
this.image_density()
}
fn image_data(&self) -> Option<(Option<StdArc<Image>>, Option<ImageMetadata>)> {
let this = unsafe { self.get_jsmanaged() };
this.image_data()
}
fn canvas_data(&self) -> Option<HTMLCanvasData> {
let this = unsafe { self.get_jsmanaged() };
this.canvas_data()
}
fn media_data(&self) -> Option<HTMLMediaData> {
let this = unsafe { self.get_jsmanaged() };
this.media_data()
}
fn svg_data(&self) -> Option<SVGSVGData> {
let this = unsafe { self.get_jsmanaged() };
this.svg_data()
}
// Can return None if the iframe has no nested browsing context
fn iframe_browsing_context_id(&self) -> Option<BrowsingContextId> {
let this = unsafe { self.get_jsmanaged() };
this.iframe_browsing_context_id()
}
// Can return None if the iframe has no nested browsing context
fn iframe_pipeline_id(&self) -> Option<PipelineId> {
let this = unsafe { self.get_jsmanaged() };
this.iframe_pipeline_id()
}
fn get_colspan(&self) -> u32 {
unsafe {
self.get_jsmanaged()
.downcast::<Element>()
.unwrap()
.get_colspan()
}
}
fn get_rowspan(&self) -> u32 {
unsafe {
self.get_jsmanaged()
.downcast::<Element>()
.unwrap()
.get_rowspan()
}
}
}
pub struct ThreadSafeLayoutNodeChildrenIterator<ConcreteNode> {
current_node: Option<ConcreteNode>,
parent_node: ConcreteNode,
}
impl<'dom, ConcreteNode> ThreadSafeLayoutNodeChildrenIterator<ConcreteNode>
where
ConcreteNode: DangerousThreadSafeLayoutNode<'dom>,
{
pub fn new(parent: ConcreteNode) -> Self {
let first_child: Option<ConcreteNode> = match parent.get_pseudo_element_type() {
PseudoElementType::Normal => parent
.get_before_pseudo()
.or_else(|| parent.get_details_summary_pseudo())
.or_else(|| unsafe { parent.dangerous_first_child() }),
PseudoElementType::DetailsContent | PseudoElementType::DetailsSummary => unsafe {
parent.dangerous_first_child()
},
_ => None,
};
ThreadSafeLayoutNodeChildrenIterator {
current_node: first_child,
parent_node: parent,
}
}
}
impl<'dom, ConcreteNode> Iterator for ThreadSafeLayoutNodeChildrenIterator<ConcreteNode>
where
ConcreteNode: DangerousThreadSafeLayoutNode<'dom>,
{
type Item = ConcreteNode;
fn next(&mut self) -> Option<ConcreteNode> {
use selectors::Element;
match self.parent_node.get_pseudo_element_type() {
PseudoElementType::Before | PseudoElementType::After => None,
PseudoElementType::DetailsSummary => {
let mut current_node = self.current_node.clone();
loop {
let next_node = if let Some(ref node) = current_node {
if let Some(element) = node.as_element() {
if element.has_local_name(&local_name!("summary")) &&
element.has_namespace(&ns!(html))
{
self.current_node = None;
return Some(node.clone());
}
}
unsafe { node.dangerous_next_sibling() }
} else {
self.current_node = None;
return None;
};
current_node = next_node;
}
},
PseudoElementType::DetailsContent => {
let node = self.current_node.clone();
let node = node.and_then(|node| {
if node.is_element() &&
node.as_element()
.unwrap()
.has_local_name(&local_name!("summary")) &&
node.as_element().unwrap().has_namespace(&ns!(html))
{
unsafe { node.dangerous_next_sibling() }
} else {
Some(node)
}
});
self.current_node = node.and_then(|node| unsafe { node.dangerous_next_sibling() });
node
},
PseudoElementType::Normal => {
let node = self.current_node.clone();
if let Some(ref node) = node {
self.current_node = match node.get_pseudo_element_type() {
PseudoElementType::Before => self
.parent_node
.get_details_summary_pseudo()
.or_else(|| unsafe { self.parent_node.dangerous_first_child() })
.or_else(|| self.parent_node.get_after_pseudo()),
PseudoElementType::Normal => unsafe { node.dangerous_next_sibling() }
.or_else(|| self.parent_node.get_after_pseudo()),
PseudoElementType::DetailsSummary => {
self.parent_node.get_details_content_pseudo()
},
PseudoElementType::DetailsContent => self.parent_node.get_after_pseudo(),
PseudoElementType::After => None,
};
}
node
},
}
}
}
/// A wrapper around elements that ensures layout can only
/// ever access safe properties and cannot race on elements.
#[derive(Clone, Copy, Debug)]
pub struct ServoThreadSafeLayoutElement<'le> {
element: ServoLayoutElement<'le>,
/// The pseudo-element type, with (optionally)
/// a specified display value to override the stylesheet.
pseudo: PseudoElementType,
}
impl<'le> ThreadSafeLayoutElement<'le> for ServoThreadSafeLayoutElement<'le> {
type ConcreteThreadSafeLayoutNode = ServoThreadSafeLayoutNode<'le>;
type ConcreteElement = ServoLayoutElement<'le>;
fn as_node(&self) -> ServoThreadSafeLayoutNode<'le> {
ServoThreadSafeLayoutNode {
node: self.element.as_node(),
pseudo: self.pseudo.clone(),
}
}
fn get_pseudo_element_type(&self) -> PseudoElementType {
self.pseudo
}
fn with_pseudo(&self, pseudo: PseudoElementType) -> Self {
ServoThreadSafeLayoutElement {
element: self.element.clone(),
pseudo,
}
}
fn type_id(&self) -> Option<LayoutNodeType> {
self.as_node().type_id()
}
unsafe fn unsafe_get(self) -> ServoLayoutElement<'le> {
self.element
}
fn get_attr_enum(&self, namespace: &Namespace, name: &LocalName) -> Option<&AttrValue> {
self.element.get_attr_enum(namespace, name)
}
fn get_attr<'a>(&'a self, namespace: &Namespace, name: &LocalName) -> Option<&'a str> {
self.element.get_attr(namespace, name)
}
fn style_data(&self) -> AtomicRef<ElementData> {
match self.element.borrow_data() {
Some(data) => data,
None => panic!("could not find styles for <{}>", self.element.local_name()),
}
}
fn is_shadow_host(&self) -> bool {
self.element.shadow_root().is_some()
}
}
/// This implementation of `::selectors::Element` is used for implementing lazy
/// pseudo-elements.
///
/// Lazy pseudo-elements in Servo only allows selectors using safe properties,
/// i.e., local_name, attributes, so they can only be used for **private**
/// pseudo-elements (like `::-servo-details-content`).
///
/// Probably a few more of this functions can be implemented (like `has_class`, etc.),
/// but they have no use right now.
///
/// Note that the element implementation is needed only for selector matching,
/// not for inheritance (styles are inherited appropiately).
impl<'le> ::selectors::Element for ServoThreadSafeLayoutElement<'le> {
type Impl = SelectorImpl;
fn opaque(&self) -> ::selectors::OpaqueElement {
::selectors::OpaqueElement::new(unsafe { &*(self.as_node().opaque().0 as *const ()) })
}
fn is_pseudo_element(&self) -> bool {
false
}
fn parent_element(&self) -> Option<Self> {
warn!("ServoThreadSafeLayoutElement::parent_element called");
None
}
fn parent_node_is_shadow_root(&self) -> bool {
false
}
fn containing_shadow_host(&self) -> Option<Self> {
None
}
// Skips non-element nodes
fn prev_sibling_element(&self) -> Option<Self> {
warn!("ServoThreadSafeLayoutElement::prev_sibling_element called");
None
}
// Skips non-element nodes
fn next_sibling_element(&self) -> Option<Self> {
warn!("ServoThreadSafeLayoutElement::next_sibling_element called");
None
}
fn is_html_slot_element(&self) -> bool {
self.element.is_html_slot_element()
}
fn is_html_element_in_html_document(&self) -> bool {
debug!("ServoThreadSafeLayoutElement::is_html_element_in_html_document called");
true
}
#[inline]
fn has_local_name(&self, name: &LocalName) -> bool {
self.element.local_name() == name
}
#[inline]
fn has_namespace(&self, ns: &Namespace) -> bool {
self.element.namespace() == ns
}
#[inline]
fn is_same_type(&self, other: &Self) -> bool {
self.element.local_name() == other.element.local_name() &&
self.element.namespace() == other.element.namespace()
}
fn match_pseudo_element(
&self,
_pseudo: &PseudoElement,
_context: &mut MatchingContext<Self::Impl>,
) -> bool {
false
}
fn attr_matches(
&self,
ns: &NamespaceConstraint<&Namespace>,
local_name: &LocalName,
operation: &AttrSelectorOperation<&String>,
) -> bool {
match *ns {
NamespaceConstraint::Specific(ref ns) => self
.get_attr_enum(ns, local_name)
.map_or(false, |value| value.eval_selector(operation)),
NamespaceConstraint::Any => {
let values = self.element.element.get_attr_vals_for_layout(local_name);
values.iter().any(|v| v.eval_selector(operation))
},
}
}
fn match_non_ts_pseudo_class<F>(
&self,
_: &NonTSPseudoClass,
_: &mut MatchingContext<Self::Impl>,
_: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// NB: This could maybe be implemented
warn!("ServoThreadSafeLayoutElement::match_non_ts_pseudo_class called");
false
}
fn is_link(&self) -> bool {
warn!("ServoThreadSafeLayoutElement::is_link called");
false
}
fn has_id(&self, _id: &Atom, _case_sensitivity: CaseSensitivity) -> bool {
debug!("ServoThreadSafeLayoutElement::has_id called");
false
}
#[inline]
fn is_part(&self, _name: &Atom) -> bool {
debug!("ServoThreadSafeLayoutElement::is_part called");
false
}
fn imported_part(&self, _: &Atom) -> Option<Atom> {
debug!("ServoThreadSafeLayoutElement::imported_part called");
None
}
fn has_class(&self, _name: &Atom, _case_sensitivity: CaseSensitivity) -> bool {
debug!("ServoThreadSafeLayoutElement::has_class called");
false
}
fn is_empty(&self) -> bool {
warn!("ServoThreadSafeLayoutElement::is_empty called");
false
}
fn is_root(&self) -> bool {
warn!("ServoThreadSafeLayoutElement::is_root called");
false
}
}<|fim▁end|> |
fn match_element_lang(
&self, |
<|file_name|>common.module.ts<|end_file_name|><|fim▁begin|>import {NgModule} from "@angular/core";
import {MatButtonModule} from "@angular/material/button";
import {MatIconModule} from "@angular/material/icon";
import {MatListModule} from "@angular/material/list";
import {MatMenuModule} from "@angular/material/menu";
import {MatProgressSpinnerModule} from "@angular/material/progress-spinner";
import {BrowserModule} from "@angular/platform-browser";
import {CovalentLoadingModule, CovalentMenuModule, CovalentNotificationsModule} from "@covalent/core";
import {KyloServicesModule} from "../services/services.module";
import {NotificationMenuComponent} from "./notifications/notification-menu.component";
@NgModule({
declarations: [
NotificationMenuComponent<|fim▁hole|> ],
imports: [
BrowserModule,
CovalentLoadingModule,
CovalentMenuModule,
CovalentNotificationsModule,
KyloServicesModule,
MatButtonModule,
MatIconModule,
MatListModule,
MatMenuModule,
MatProgressSpinnerModule
]
})
export class KyloCommonModule {
}<|fim▁end|> | ],
entryComponents: [
NotificationMenuComponent |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__all__ = []<|fim▁hole|>import inspect
# http://stackoverflow.com/questions/22209564/python-qualified-import-all-in-package
for loader, name, is_pkg in pkgutil.walk_packages(__path__):
module = loader.find_module(name).load_module(name)
for name, value in inspect.getmembers(module):
if name.startswith('__'):
continue
globals()[name] = value
__all__.append(name)<|fim▁end|> |
import pkgutil |
<|file_name|>ca.js<|end_file_name|><|fim▁begin|>FD40.ready(function($) {
var jQuery = $;// Catalan
jQuery.timeago.settings.strings = {
prefixAgo: "fa",
prefixFromNow: "d'aqui a",
suffixAgo: null,
suffixFromNow: null,
seconds: "menys d'1 minut",
minute: "1 minut",
minutes: "uns %d minuts",
hour: "1 hora",
hours: "unes %d hores",
day: "1 dia",
days: "%d dies",
month: "aproximadament un mes",
months: "%d mesos",
year: "aproximadament un any",<|fim▁hole|><|fim▁end|> | years: "%d anys"
};
}); |
<|file_name|>utils.ts<|end_file_name|><|fim▁begin|>import {Color, DeliveryData, DialogAlignment, FclElements, Position, StationData, TableMode} from './datatypes';
import {DialogPosition, MdDialog, MdDialogRef, MdMenuTrigger} from '@angular/material';
import * as ol from 'openlayers';
import {DialogAlertComponent, DialogAlertData} from '../dialog/dialog-alert/dialog-alert.component';
import {Constants} from './constants';
import {ElementRef} from '@angular/core';
export class Utils {
private static CY_TO_OL_FACTOR = 10000;
static latLonToPosition(lat: number, lon: number, zoom: number): Position {
const p = ol.proj.fromLonLat([lon, lat]);
return {
x: p[0] / Utils.CY_TO_OL_FACTOR * zoom,
y: -p[1] / Utils.CY_TO_OL_FACTOR * zoom
};
}
static panZoomToView(pan: Position, zoom: number, width: number, height: number): ol.View {
return new ol.View({
center: [(width / 2 - pan.x) / zoom * Utils.CY_TO_OL_FACTOR, -(height / 2 - pan.y) / zoom * Utils.CY_TO_OL_FACTOR],
resolution: Utils.CY_TO_OL_FACTOR / zoom
});
}
static getDialogPosition(alignment: DialogAlignment): DialogPosition {
switch (alignment) {
case DialogAlignment.LEFT:
return {left: '0px'};
case DialogAlignment.CENTER:
return {};
case DialogAlignment.RIGHT:
return {right: '0px'};
}
return null;
}
static getTableElements(mode: TableMode, data: FclElements): (StationData | DeliveryData)[] {
if (mode === TableMode.STATIONS) {
return data.stations;
} else if (mode === TableMode.DELIVERIES) {
return data.deliveries;
}
return null;
}
static getTableProperties(mode: TableMode, stationColumns: string[], deliveryColumns: string[]): string[] {
if (mode === TableMode.STATIONS) {
return stationColumns;
} else if (mode === TableMode.DELIVERIES) {
return deliveryColumns;
}
return null;
}
static getAllTableProperties(mode: TableMode, data: FclElements): string[] {
let properties: string[];
if (mode === TableMode.STATIONS) {
properties = Constants.STATION_PROPERTIES.toArray();
} else if (mode === TableMode.DELIVERIES) {
properties = Constants.DELIVERY_PROPERTIES.toArray();
}
const additionalProps: Set<string> = new Set();
for (const element of Utils.getTableElements(mode, data)) {
for (const p of element.properties) {
additionalProps.add(p.name);
}
}
return properties.filter(prop => Constants.PROPERTIES.has(prop)).concat(Array.from(additionalProps));
}
static openSaveDialog(url: string, fileName: string) {
const a = document.createElement('a');
a.style.display = 'none';
a.target = '_blank';
a.href = url;
a.download = fileName;
document.body.appendChild(a);
a.click();
a.remove();
}
static showErrorMessage(dialogService: MdDialog, message: string): MdDialogRef<any> {
const dialogData: DialogAlertData = {
title: 'Error',
message: message
};
return dialogService.open(DialogAlertComponent, {role: 'alertdialog', data: dialogData});
}
static openMenu(trigger: MdMenuTrigger, triggerElement: ElementRef, pos: Position) {
const style = (<HTMLElement>triggerElement.nativeElement).style;
style.position = 'fixed';
style.left = pos.x + 'px';
style.top = pos.y + 'px';
trigger.openMenu();
}
static colorToCss(color: Color): string {
return 'rgb(' + color.r + ', ' + color.g + ', ' + color.b + ')';
}
static mixColors(color1: Color, color2: Color): Color {
return {
r: Math.round((color1.r + color2.r) / 2),
g: Math.round((color1.g + color2.g) / 2),
b: Math.round((color1.b + color2.b) / 2)
};
}
static getAllCombinations(values: any[]): any[][] {
const n = Math.pow(2, values.length);
const combinations = [];
for (let i = 1; i < n; i++) {
const bits = i.toString(2).split('').reverse().join('');
const combination = [];
for (let j = 0; j < values.length; j++) {
if (bits[j] === '1') {
combination.push(values[j]);
}
}
combinations.push(combination);
}
combinations.sort((c1, c2) => c1.length - c2.length);
return combinations;
}
static getCenter(positions: Position[]): Position {
let xSum = 0;
let ySum = 0;
for (const pos of positions) {
xSum += pos.x;
ySum += pos.y;
}
return {
x: xSum / positions.length,
y: ySum / positions.length
};
}
static sum(position1: Position, position2: Position): Position {
return {
x: position1.x + position2.x,
y: position1.y + position2.y
};
}
static difference(position1: Position, position2: Position): Position {
return {
x: position1.x - position2.x,
y: position1.y - position2.y
};
}
static stringToDate(dateString: string): Date {
if (dateString != null) {
const date = new Date(dateString);
if (isNaN(date.getTime())) {
throw new SyntaxError('Invalid date: ' + dateString);
} else {
return date;
}
} else {
return null;
}
}
static dateToString(date: Date): string {
if (date != null) {
const isoString = date.toISOString();
return isoString.substring(0, isoString.indexOf('T'));
} else {
return null;
}<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>modules.d.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|> * See the License for the specific language governing permissions and
* limitations under the License.
*/
declare module '*.less';
declare function la(textToLocalize: string): string;<|fim▁end|> | |
<|file_name|>Cert_5_6_09_NetworkDataForwarding.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
ROUTER1 = 2
ROUTER2 = 3
ED = 4
SED = 5
MTDS = [ED, SED]
class Cert_5_6_9_NetworkDataForwarding(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,6):
self.nodes[i] = node.Node(i, (i in MTDS))
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER2].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER1].set_panid(0xface)
self.nodes[ROUTER1].set_mode('rsdn')
self.nodes[ROUTER1].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[ED].get_addr64())
self.nodes[ROUTER1].add_whitelist(self.nodes[SED].get_addr64())
self.nodes[ROUTER1].enable_whitelist()
self.nodes[ROUTER1].set_router_selection_jitter(1)
self.nodes[ROUTER2].set_panid(0xface)
self.nodes[ROUTER2].set_mode('rsdn')
self.nodes[ROUTER2].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER2].enable_whitelist()
self.nodes[ROUTER2].set_router_selection_jitter(1)
self.nodes[ED].set_panid(0xface)
self.nodes[ED].set_mode('rsn')
self.nodes[ED].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[ED].enable_whitelist()
self.nodes[SED].set_panid(0xface)
self.nodes[SED].set_mode('s')
self.nodes[SED].add_whitelist(self.nodes[ROUTER1].get_addr64())
self.nodes[SED].enable_whitelist()
self.nodes[SED].set_timeout(3)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER1].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
self.nodes[ROUTER2].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.nodes[ED].start()
time.sleep(5)
self.assertEqual(self.nodes[ED].get_state(), 'child')<|fim▁hole|>
self.nodes[LEADER].add_prefix('2001:2:0:1::/64', 'paros', 'med')
self.nodes[LEADER].add_route('2001:2:0:2::/64', 'med')
self.nodes[LEADER].register_netdata()
time.sleep(10)
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'low')
self.nodes[ROUTER2].add_route('2001:2:0:2::/64', 'high')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2001:2:0:2::1'))
self.assertFalse(self.nodes[SED].ping('2007::1'))
self.nodes[ROUTER2].remove_prefix('2001:2:0:1::/64')
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'high')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2007::1'))
self.nodes[ROUTER2].remove_prefix('2001:2:0:1::/64')
self.nodes[ROUTER2].add_prefix('2001:2:0:1::/64', 'paros', 'med')
self.nodes[ROUTER2].register_netdata()
time.sleep(10)
self.assertFalse(self.nodes[SED].ping('2007::1'))
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
self.nodes[SED].start()
time.sleep(5)
self.assertEqual(self.nodes[SED].get_state(), 'child') |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.test import TestCase
from .models import Person
class SaveDeleteHookTests(TestCase):
def test_basic(self):
p = Person(first_name="John", last_name="Smith")
self.assertEqual(p.data, [])
p.save()
self.assertEqual(p.data, [
"Before save",
"After save",
])
self.assertQuerysetEqual(
Person.objects.all(), [
"John Smith",
],
unicode
)
p.delete()
self.assertEqual(p.data, [<|fim▁hole|> "Before deletion",
"After deletion",
])
self.assertQuerysetEqual(Person.objects.all(), [])<|fim▁end|> | "Before save",
"After save", |
<|file_name|>serialize.rs<|end_file_name|><|fim▁begin|>use std::error::Error;
use std::fmt;
use std::io::prelude::*;
use std::io::{self, Cursor};
use rustc_serialize::{Encodable, Encoder};
use byteorder::{WriteBytesExt, BigEndian};
use super::marker as m;
use super::STRUCTURE_PREFIX;
pub fn encode<T: Encodable>(object: &T) -> EncodeResult<Vec<u8>> {
let mut buf = Cursor::new(Vec::new());
{
let mut encoder = PackstreamEncoder::new(&mut buf);
try!(object.encode(&mut encoder));
}
Ok(buf.into_inner())
}
#[derive(Debug)]
pub enum EncoderError {
IoError(io::Error),
InvalidStructureLength,
}
impl Error for EncoderError {
fn description(&self) -> &str { "encoder error" }
}
impl fmt::Display for EncoderError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self, f)
}
}
impl From<io::Error> for EncoderError {
fn from(error: io::Error) -> Self {
EncoderError::IoError(error)
}
}
pub type EncodeResult<T> = Result<T, EncoderError>;
struct PackstreamEncoder<'a, W: Write + 'a> {
writer: &'a mut W,
}
impl<'a, W: Write> PackstreamEncoder<'a, W> {
pub fn new(writer: &'a mut W) -> Self {
PackstreamEncoder {
writer: writer,
}
}
}
impl<'a, W: Write> Encoder for PackstreamEncoder<'a, W> {
type Error = EncoderError;
// Primitive types:
fn emit_nil(&mut self) -> Result<(), Self::Error> {
try!(self.writer.write_u8(m::NULL));
Ok(())
}
fn emit_usize(&mut self, v: usize) -> Result<(), Self::Error> {
self.emit_u64(v as u64)
}
fn emit_u64(&mut self, v: u64) -> Result<(), Self::Error> {
if v >= m::RANGE_POS_INT_64.0 as u64 && v <= m::RANGE_POS_INT_64.1 as u64 {
try!(self.writer.write_u8(m::INT_64));
try!(self.writer.write_u64::<BigEndian>(v));
} else if v >= m::RANGE_POS_INT_32.0 as u64 && v <= m::RANGE_POS_INT_32.1 as u64 {
try!(self.writer.write_u8(m::INT_32));
try!(self.writer.write_u32::<BigEndian>(v as u32));
} else if v >= m::RANGE_POS_INT_16.0 as u64 && v <= m::RANGE_POS_INT_16.1 as u64 {
try!(self.writer.write_u8(m::INT_16));
try!(self.writer.write_u16::<BigEndian>(v as u16));
} else if v <= m::RANGE_TINY_INT.1 as u64 {
try!(self.writer.write_u8(v as u8));
}
Ok(())
}
fn emit_u32(&mut self, v: u32) -> Result<(), Self::Error> {
self.emit_u64(v as u64)
}
fn emit_u16(&mut self, v: u16) -> Result<(), Self::Error> {
self.emit_u64(v as u64)
}
fn emit_u8(&mut self, v: u8) -> Result<(), Self::Error> {
self.emit_u64(v as u64)
}
fn emit_isize(&mut self, v: isize) -> Result<(), Self::Error> {
self.emit_i64(v as i64)
}
fn emit_i64(&mut self, v: i64) -> Result<(), Self::Error> {
if (v >= m::RANGE_POS_INT_64.0 && v <= m::RANGE_POS_INT_64.1)
|| (v >= m::RANGE_NEG_INT_64.0 && v <= m::RANGE_NEG_INT_64.1)
{
try!(self.writer.write_u8(m::INT_64));
try!(self.writer.write_i64::<BigEndian>(v));
} else if (v >= m::RANGE_POS_INT_32.0 && v <= m::RANGE_POS_INT_32.1)
|| (v >= m::RANGE_NEG_INT_32.0 && v <= m::RANGE_NEG_INT_32.1)
{
try!(self.writer.write_u8(m::INT_32));
try!(self.writer.write_i32::<BigEndian>(v as i32));
} else if (v >= m::RANGE_POS_INT_16.0 && v <= m::RANGE_POS_INT_16.1)
|| (v >= m::RANGE_NEG_INT_16.0 && v <= m::RANGE_NEG_INT_16.1)
{
try!(self.writer.write_u8(m::INT_16));
try!(self.writer.write_i16::<BigEndian>(v as i16));
} else if v >= m::RANGE_TINY_INT.0 && v <= m::RANGE_TINY_INT.1 {
try!(self.writer.write_i8(v as i8));
} else if v >= m::RANGE_NEG_INT_8.0 && v <= m::RANGE_NEG_INT_8.1 {
try!(self.writer.write_u8(m::INT_8));
try!(self.writer.write_i8(v as i8));
}
Ok(())
}
fn emit_i32(&mut self, v: i32) -> Result<(), Self::Error> {
self.emit_i64(v as i64)
}
fn emit_i16(&mut self, v: i16) -> Result<(), Self::Error> {
self.emit_i64(v as i64)
}
fn emit_i8(&mut self, v: i8) -> Result<(), Self::Error> {
self.emit_i64(v as i64)
}
fn emit_bool(&mut self, v: bool) -> Result<(), Self::Error> {
if v {
try!(self.writer.write_u8(m::TRUE));
} else {
try!(self.writer.write_u8(m::FALSE));
}
Ok(())
}
fn emit_f64(&mut self, v: f64) -> Result<(), Self::Error> {
try!(self.writer.write_u8(m::FLOAT));
try!(self.writer.write_f64::<BigEndian>(v));
Ok(())
}
fn emit_f32(&mut self, v: f32) -> Result<(), Self::Error> {
self.emit_f64(v as f64)
}
fn emit_char(&mut self, v: char) -> Result<(), Self::Error> {
try!(self.writer.write_u8(m::TINY_STRING_NIBBLE | 0x01));
try!(self.writer.write_u8(v as u8));
Ok(())
}
fn emit_str(&mut self, v: &str) -> Result<(), Self::Error> {
let bytes = v.as_bytes();
let size = bytes.len();
if size <= m::USE_TINY_STRING {
try!(self.writer.write_u8(m::TINY_STRING_NIBBLE | size as u8));
} else if size <= m::USE_STRING_8 {
try!(self.writer.write_u8(m::STRING_8));
try!(self.writer.write_u8(size as u8));
} else if size <= m::USE_STRING_16 {
try!(self.writer.write_u8(m::STRING_16));
try!(self.writer.write_u16::<BigEndian>(size as u16));
} else if size <= m::USE_STRING_32 {
try!(self.writer.write_u8(m::STRING_32));
try!(self.writer.write_u32::<BigEndian>(size as u32));
}
try!(self.writer.write_all(bytes));
Ok(())
}
// Compound types:
fn emit_enum<F>(&mut self, _: &str, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_enum_variant<F>(&mut self, v_name: &str,
_: usize,
len: usize,
f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
if len == 0 {
self.emit_str(v_name)
} else {
try!(self.writer.write_u8(m::TINY_MAP_NIBBLE | 0x01));
try!(self.emit_str(v_name));
self.emit_seq(len, f)
}
}
fn emit_enum_variant_arg<F>(&mut self, _: usize, f: F)
-> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_enum_struct_variant<F>(&mut self, v_name: &str,
_: usize,
len: usize,
f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
if len == 0 {
self.emit_str(v_name)
} else {
self.emit_map(len, f)
}
}
fn emit_enum_struct_variant_field<F>(&mut self,
f_name: &str,
f_idx: usize,
f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
try!(self.emit_str(f_name));
self.emit_map_elt_val(f_idx, f)
}
fn emit_struct<F>(&mut self, name: &str, len: usize, f: F)
-> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
if name.starts_with(STRUCTURE_PREFIX) {
debug_assert!(name.len() == STRUCTURE_PREFIX.len() + 1, "Invalid structure name: '{}'", name);
// it is garanteed that the name is not empty
let signature = *name.as_bytes().last().unwrap();
if len <= m::USE_TINY_STRUCT {
try!(self.writer.write_u8(m::TINY_STRUCT_NIBBLE | len as u8));
try!(self.writer.write_u8(signature));
} else if len <= m::USE_STRUCT_8 {
try!(self.writer.write_u8(m::STRUCT_8));
try!(self.writer.write_u8(signature));
try!(self.writer.write_u8(len as u8));
} else if len <= m::USE_STRUCT_16 {
try!(self.writer.write_u8(m::STRUCT_16));
try!(self.writer.write_u8(signature));
try!(self.writer.write_u16::<BigEndian>(len as u16));
} else {
return Err(EncoderError::InvalidStructureLength)
}
f(self)
} else {
self.emit_map(len, f)
}
}
fn emit_struct_field<F>(&mut self, f_name: &str, _: usize, f: F)
-> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
try!(self.emit_str(f_name));
f(self)
}
fn emit_tuple<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
self.emit_seq(len, f)
}
fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
self.emit_seq_elt(idx, f)
}
fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F)
-> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
self.emit_seq(len, f)
}
fn emit_tuple_struct_arg<F>(&mut self, f_idx: usize, f: F)
-> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
self.emit_seq_elt(f_idx, f)
}
// Specialized types:
fn emit_option<F>(&mut self, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_option_none(&mut self) -> Result<(), Self::Error> {
self.emit_nil()
}
fn emit_option_some<F>(&mut self, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
if len <= m::USE_TINY_LIST as usize {
try!(self.writer.write_u8(m::TINY_LIST_NIBBLE | len as u8));
} else if len <= m::USE_LIST_8 as usize {
try!(self.writer.write_u8(m::LIST_8));
try!(self.writer.write_u8(len as u8));
} else if len <= m::USE_LIST_16 as usize {
try!(self.writer.write_u8(m::LIST_16));
try!(self.writer.write_u16::<BigEndian>(len as u16));
} else if len <= m::USE_LIST_32 as usize {
try!(self.writer.write_u8(m::LIST_32));
try!(self.writer.write_u32::<BigEndian>(len as u32));
}
f(self)
}
fn emit_seq_elt<F>(&mut self, _: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
if len <= m::USE_TINY_MAP as usize {
try!(self.writer.write_u8(m::TINY_MAP_NIBBLE | len as u8));
} else if len <= m::USE_MAP_8 as usize {
try!(self.writer.write_u8(m::MAP_8));
try!(self.writer.write_u8(len as u8));
} else if len <= m::USE_MAP_16 as usize {
try!(self.writer.write_u8(m::MAP_16));
try!(self.writer.write_u16::<BigEndian>(len as u16));
} else if len <= m::USE_MAP_32 as usize {
try!(self.writer.write_u8(m::MAP_32));
try!(self.writer.write_u32::<BigEndian>(len as u32));
}
f(self)
}
fn emit_map_elt_key<F>(&mut self, _: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
fn emit_map_elt_val<F>(&mut self, _: usize, f: F) -> Result<(), Self::Error>
where F: FnOnce(&mut Self) -> Result<(), Self::Error> {
f(self)
}
}
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::string::String;
use super::encode;
use ::v1::packstream::marker as m;
#[test]
fn serialize_nil() {
let input = ();
assert_eq!(vec![m::NULL], encode(&input).unwrap());
let input: Option<()> = None;
assert_eq!(vec![m::NULL], encode(&input).unwrap());
}
#[test]
fn serialize_bool() {
assert_eq!(vec![m::TRUE], encode(&true).unwrap());
assert_eq!(vec![m::FALSE], encode(&false).unwrap());
}
#[test]
fn serialize_int64_positive() {
let result = encode(&m::RANGE_POS_INT_64.1).unwrap();
let expected = vec![m::INT_64, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF];
assert_eq!(expected, result);
}
#[test]
fn serialize_int64_negative() {
let result = encode(&m::RANGE_NEG_INT_64.0).unwrap();
let expected = vec![m::INT_64, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00];
assert_eq!(expected, result);
}
#[test]
fn serialize_int32_positive() {
let result = encode(&m::RANGE_POS_INT_32.1).unwrap();
let expected = vec![m::INT_32, 0x7F, 0xFF, 0xFF, 0xFF];
assert_eq!(expected, result);
}
#[test]
fn serialize_int32_negative() {
let result = encode(&m::RANGE_NEG_INT_32.0).unwrap();
let expected = vec![m::INT_32, 0x80, 0x00, 0x00, 0x00];
assert_eq!(expected, result);
}
#[test]
fn serialize_int16_positive() {
let result = encode(&m::RANGE_POS_INT_16.1).unwrap();
let expected = vec![m::INT_16, 0x7F, 0xFF];
assert_eq!(expected, result);
}
#[test]
fn serialize_int16_negative() {
let result = encode(&m::RANGE_NEG_INT_16.0).unwrap();
let expected = vec![m::INT_16, 0x80, 0x00];
assert_eq!(expected, result);
}
#[test]
fn serialize_int8_min() {
let result = encode(&m::RANGE_NEG_INT_8.0).unwrap();
let expected = vec![m::INT_8, 0x80];
assert_eq!(expected, result);
}
#[test]
fn serialize_int8_max() {
let result = encode(&m::RANGE_NEG_INT_8.1).unwrap();
let expected = vec![m::INT_8, 0xEF];
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_int_min() {
let result = encode(&m::RANGE_TINY_INT.0).unwrap();
let expected = vec![0xF0];
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_int_max() {
let result = encode(&m::RANGE_TINY_INT.1).unwrap();
let expected = vec![0x7F];
assert_eq!(expected, result);
}
#[test]
fn serialize_float_positive() {
let result = encode(&1.1).unwrap();
let expected = vec![m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A];
assert_eq!(expected, result);
}
#[test]
fn serialize_float_negative() {
let result = encode(&-1.1).unwrap();
let expected = vec![m::FLOAT, 0xBF, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A];
assert_eq!(expected, result);
}
#[test]
fn serialize_string32() {
let size = 70_000;
let input = (0..size).fold(String::new(), |mut acc, _| { acc.push('A'); acc });
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::STRING_32, 0x00, 0x01, 0x11, 0x70],
|mut acc, _| { acc.push(b'A'); acc }
);<|fim▁hole|>
assert_eq!(expected, result);
}
#[test]
fn serialize_string16() {
let size = 5_000;
let input = (0..size).fold(String::new(), |mut acc, _| { acc.push('A'); acc });
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::STRING_16, 0x13, 0x88],
|mut acc, _| { acc.push(b'A'); acc }
);
assert_eq!(expected, result);
}
#[test]
fn serialize_string8() {
let size = 200;
let input = (0..size).fold(String::new(), |mut acc, _| { acc.push('A'); acc });
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::STRING_8, 0xC8],
|mut acc, _| { acc.push(b'A'); acc }
);
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_string() {
for marker in 0x80..0x8F {
let size = marker - m::TINY_STRING_NIBBLE;
let input = (0..size).fold(String::new(), |mut acc, _| { acc.push('A'); acc });
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![marker],
|mut acc, _| { acc.push(b'A'); acc }
);
assert_eq!(expected, result);
}
}
#[test]
fn serialize_char() {
for c in b'A'..b'Z' {
let result: Vec<u8> = encode(&(c as char)).unwrap();
let expected = vec![0x81, c];
assert_eq!(expected, result);
}
}
#[test]
fn serialize_list32() {
let size = 70_000;
let input = vec![1; size];
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::LIST_32, 0x00, 0x01, 0x11, 0x70],
|mut acc, _| { acc.push(0x01); acc }
);
assert_eq!(expected, result);
}
#[test]
fn serialize_list16() {
let size = 5_000;
let input = vec![1; size];
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::LIST_16, 0x13, 0x88],
|mut acc, _| { acc.push(0x01); acc }
);
assert_eq!(expected, result);
}
#[test]
fn serialize_list8() {
let size = 200;
let input = vec![1; size];
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::LIST_8, 0xC8],
|mut acc, _| { acc.push(0x01); acc }
);
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_list() {
for marker in 0x90..0x9F {
let size = (marker - m::TINY_LIST_NIBBLE) as usize;
let input = vec![1; size];
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![marker],
|mut acc, _| { acc.push(0x01); acc }
);
assert_eq!(expected, result);
}
}
#[test]
fn serialize_list_of_string() {
let size = 3;
let input = vec!["abcdefghijklmnopqrstuvwxyz"; size];
let result = encode(&input).unwrap();
let expected = vec![m::TINY_LIST_NIBBLE + size as u8,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A];
assert_eq!(expected, result);
}
#[test]
fn serialize_list_of_int() {
let size = 3;
let input = vec![32_000; size];
let result = encode(&input).unwrap();
let expected = vec![m::TINY_LIST_NIBBLE + size as u8,
m::INT_16, 0x7D, 0x00,
m::INT_16, 0x7D, 0x00,
m::INT_16, 0x7D, 0x00];
assert_eq!(expected, result);
}
#[test]
fn serialize_list_of_float() {
let size = 3;
let input = vec![1.1; size];
let result = encode(&input).unwrap();
let expected = vec![m::TINY_LIST_NIBBLE + size as u8,
m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A];
assert_eq!(expected, result);
}
#[test]
fn serialize_list_of_bool() {
let size = 4;
let input = vec![true, false, true, false];
let result = encode(&input).unwrap();
let expected = vec![m::TINY_LIST_NIBBLE + size as u8,
m::TRUE, m::FALSE, m::TRUE, m::FALSE];
assert_eq!(expected, result);
}
#[test]
fn serialize_tuple() {
let size = 3;
let input = (1, 1.1, "A");
let result = encode(&input).unwrap();
let expected = vec![m::TINY_LIST_NIBBLE + size as u8,
0x01,
m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
m::TINY_STRING_NIBBLE + 1, 0x41];
assert_eq!(expected, result);
}
#[test]
fn serialize_map32() {
let size = 70_000;
let input = (0..size).fold(
BTreeMap::<String, u32>::new(),
|mut acc, i| { acc.insert(format!("{:05}", i), 1); acc }
);
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::MAP_32, 0x00, 0x01, 0x11, 0x70],
|mut acc, i| {
let b1 = 48 + ((i % 100000) / 10000) as u8;
let b2 = 48 + ((i % 10000) / 1000) as u8;
let b3 = 48 + ((i % 1000) / 100) as u8;
let b4 = 48 + ((i % 100) / 10) as u8;
let b5 = 48 + (i % 10) as u8;
acc.extend([0x85, b1, b2, b3, b4, b5, 0x01].iter());
acc
}
);
assert_eq!(expected, result);
}
#[test]
fn serialize_map16() {
let size = 5_000;
let input = (0..size).fold(
BTreeMap::<String, u32>::new(),
|mut acc, i| { acc.insert(format!("{:04}", i), 1); acc }
);
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::MAP_16, 0x13, 0x88],
|mut acc, i| {
let b1 = 48 + ((i % 10000) / 1000) as u8;
let b2 = 48 + ((i % 1000) / 100) as u8;
let b3 = 48 + ((i % 100) / 10) as u8;
let b4 = 48 + (i % 10) as u8;
acc.extend([0x84, b1, b2, b3, b4, 0x01].iter());
acc
}
);
assert_eq!(expected, result);
}
#[test]
fn serialize_map8() {
let size = 200;
let input = (0..size).fold(
BTreeMap::<String, u32>::new(),
|mut acc, i| { acc.insert(format!("{:03}", i), 1); acc }
);
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::MAP_8, 0xC8],
|mut acc, i| {
let b1 = 48 + ((i % 1000) / 100) as u8;
let b2 = 48 + ((i % 100) / 10) as u8;
let b3 = 48 + (i % 10) as u8;
acc.extend([0x83, b1, b2, b3, 0x01].iter());
acc
}
);
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_map() {
let size = 3;
let input = (0..size).fold(
BTreeMap::<String, u32>::new(),
|mut acc, i| { acc.insert(format!("{}", i), 1); acc }
);
let result = encode(&input).unwrap();
let expected = (0..size).fold(
vec![m::TINY_MAP_NIBBLE + size],
|mut acc, i| {
acc.extend([0x81, 0x30 + i].iter());
acc.push(0x01);
acc
}
);
assert_eq!(expected, result);
}
#[test]
fn serialize_map_of_string() {
let size = 3;
let input = {
let mut input: BTreeMap<&'static str, &'static str> = BTreeMap::new();
input.insert("A", "abcdefghijklmnopqrstuvwxyz");
input.insert("B", "abcdefghijklmnopqrstuvwxyz");
input.insert("C", "abcdefghijklmnopqrstuvwxyz");
input
};
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + size,
0x81, 0x41,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A,
0x81, 0x42,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A,
0x81, 0x43,
m::STRING_8, 0x1A, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66,
0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E,
0x6F, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76,
0x77, 0x78, 0x79, 0x7A];
assert_eq!(expected, result);
}
#[test]
fn serialize_map_of_int() {
let size = 3;
let input = {
let mut input: BTreeMap<&'static str, u32> = BTreeMap::new();
input.insert("A", 32_000);
input.insert("B", 32_000);
input.insert("C", 32_000);
input
};
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + size,
0x81, 0x41, m::INT_16, 0x7D, 0x00,
0x81, 0x42, m::INT_16, 0x7D, 0x00,
0x81, 0x43, m::INT_16, 0x7D, 0x00];
assert_eq!(expected, result);
}
#[test]
fn serialize_map_of_float() {
let size = 3;
let input = {
let mut input: BTreeMap<&'static str, f64> = BTreeMap::new();
input.insert("A", 1.1);
input.insert("B", 1.1);
input.insert("C", 1.1);
input
};
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + size,
0x81, 0x41, m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
0x81, 0x42, m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
0x81, 0x43, m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A];
assert_eq!(expected, result);
}
#[test]
fn serialize_map_of_bool() {
let size = 4;
let input = {
let mut input: BTreeMap<&'static str, bool> = BTreeMap::new();
input.insert("A", true);
input.insert("B", false);
input.insert("C", true);
input.insert("D", false);
input
};
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + size,
0x81, 0x41, m::TRUE,
0x81, 0x42, m::FALSE,
0x81, 0x43, m::TRUE,
0x81, 0x44, m::FALSE];
assert_eq!(expected, result);
}
#[test]
fn serialize_struct16() {
#[derive(RustcEncodable)]
#[allow(non_snake_case)]
struct MyStruct {
A001: u16, A002: u16, A003: u16, A004: u16, A005: u16, A006: u16, A007: u16, A008: u16,
A009: u16, A010: u16, A011: u16, A012: u16, A013: u16, A014: u16, A015: u16, A016: u16,
A017: u16, A018: u16, A019: u16, A020: u16, A021: u16, A022: u16, A023: u16, A024: u16,
A025: u16, A026: u16, A027: u16, A028: u16, A029: u16, A030: u16, A031: u16, A032: u16,
A033: u16, A034: u16, A035: u16, A036: u16, A037: u16, A038: u16, A039: u16, A040: u16,
A041: u16, A042: u16, A043: u16, A044: u16, A045: u16, A046: u16, A047: u16, A048: u16,
A049: u16, A050: u16, A051: u16, A052: u16, A053: u16, A054: u16, A055: u16, A056: u16,
A057: u16, A058: u16, A059: u16, A060: u16, A061: u16, A062: u16, A063: u16, A064: u16,
A065: u16, A066: u16, A067: u16, A068: u16, A069: u16, A070: u16, A071: u16, A072: u16,
A073: u16, A074: u16, A075: u16, A076: u16, A077: u16, A078: u16, A079: u16, A080: u16,
A081: u16, A082: u16, A083: u16, A084: u16, A085: u16, A086: u16, A087: u16, A088: u16,
A089: u16, A090: u16, A091: u16, A092: u16, A093: u16, A094: u16, A095: u16, A096: u16,
A097: u16, A098: u16, A099: u16, A100: u16, A101: u16, A102: u16, A103: u16, A104: u16,
A105: u16, A106: u16, A107: u16, A108: u16, A109: u16, A110: u16, A111: u16, A112: u16,
A113: u16, A114: u16, A115: u16, A116: u16, A117: u16, A118: u16, A119: u16, A120: u16,
A121: u16, A122: u16, A123: u16, A124: u16, A125: u16, A126: u16, A127: u16, A128: u16,
A129: u16, A130: u16, A131: u16, A132: u16, A133: u16, A134: u16, A135: u16, A136: u16,
A137: u16, A138: u16, A139: u16, A140: u16, A141: u16, A142: u16, A143: u16, A144: u16,
A145: u16, A146: u16, A147: u16, A148: u16, A149: u16, A150: u16, A151: u16, A152: u16,
A153: u16, A154: u16, A155: u16, A156: u16, A157: u16, A158: u16, A159: u16, A160: u16,
A161: u16, A162: u16, A163: u16, A164: u16, A165: u16, A166: u16, A167: u16, A168: u16,
A169: u16, A170: u16, A171: u16, A172: u16, A173: u16, A174: u16, A175: u16, A176: u16,
A177: u16, A178: u16, A179: u16, A180: u16, A181: u16, A182: u16, A183: u16, A184: u16,
A185: u16, A186: u16, A187: u16, A188: u16, A189: u16, A190: u16, A191: u16, A192: u16,
A193: u16, A194: u16, A195: u16, A196: u16, A197: u16, A198: u16, A199: u16, A200: u16,
A201: u16, A202: u16, A203: u16, A204: u16, A205: u16, A206: u16, A207: u16, A208: u16,
A209: u16, A210: u16, A211: u16, A212: u16, A213: u16, A214: u16, A215: u16, A216: u16,
A217: u16, A218: u16, A219: u16, A220: u16, A221: u16, A222: u16, A223: u16, A224: u16,
A225: u16, A226: u16, A227: u16, A228: u16, A229: u16, A230: u16, A231: u16, A232: u16,
A233: u16, A234: u16, A235: u16, A236: u16, A237: u16, A238: u16, A239: u16, A240: u16,
A241: u16, A242: u16, A243: u16, A244: u16, A245: u16, A246: u16, A247: u16, A248: u16,
A249: u16, A250: u16, A251: u16, A252: u16, A253: u16, A254: u16, A255: u16, A256: u16,
}
let input = MyStruct {
A001: 1, A002: 1, A003: 1, A004: 1, A005: 1, A006: 1, A007: 1, A008: 1,
A009: 1, A010: 1, A011: 1, A012: 1, A013: 1, A014: 1, A015: 1, A016: 1,
A017: 1, A018: 1, A019: 1, A020: 1, A021: 1, A022: 1, A023: 1, A024: 1,
A025: 1, A026: 1, A027: 1, A028: 1, A029: 1, A030: 1, A031: 1, A032: 1,
A033: 1, A034: 1, A035: 1, A036: 1, A037: 1, A038: 1, A039: 1, A040: 1,
A041: 1, A042: 1, A043: 1, A044: 1, A045: 1, A046: 1, A047: 1, A048: 1,
A049: 1, A050: 1, A051: 1, A052: 1, A053: 1, A054: 1, A055: 1, A056: 1,
A057: 1, A058: 1, A059: 1, A060: 1, A061: 1, A062: 1, A063: 1, A064: 1,
A065: 1, A066: 1, A067: 1, A068: 1, A069: 1, A070: 1, A071: 1, A072: 1,
A073: 1, A074: 1, A075: 1, A076: 1, A077: 1, A078: 1, A079: 1, A080: 1,
A081: 1, A082: 1, A083: 1, A084: 1, A085: 1, A086: 1, A087: 1, A088: 1,
A089: 1, A090: 1, A091: 1, A092: 1, A093: 1, A094: 1, A095: 1, A096: 1,
A097: 1, A098: 1, A099: 1, A100: 1, A101: 1, A102: 1, A103: 1, A104: 1,
A105: 1, A106: 1, A107: 1, A108: 1, A109: 1, A110: 1, A111: 1, A112: 1,
A113: 1, A114: 1, A115: 1, A116: 1, A117: 1, A118: 1, A119: 1, A120: 1,
A121: 1, A122: 1, A123: 1, A124: 1, A125: 1, A126: 1, A127: 1, A128: 1,
A129: 1, A130: 1, A131: 1, A132: 1, A133: 1, A134: 1, A135: 1, A136: 1,
A137: 1, A138: 1, A139: 1, A140: 1, A141: 1, A142: 1, A143: 1, A144: 1,
A145: 1, A146: 1, A147: 1, A148: 1, A149: 1, A150: 1, A151: 1, A152: 1,
A153: 1, A154: 1, A155: 1, A156: 1, A157: 1, A158: 1, A159: 1, A160: 1,
A161: 1, A162: 1, A163: 1, A164: 1, A165: 1, A166: 1, A167: 1, A168: 1,
A169: 1, A170: 1, A171: 1, A172: 1, A173: 1, A174: 1, A175: 1, A176: 1,
A177: 1, A178: 1, A179: 1, A180: 1, A181: 1, A182: 1, A183: 1, A184: 1,
A185: 1, A186: 1, A187: 1, A188: 1, A189: 1, A190: 1, A191: 1, A192: 1,
A193: 1, A194: 1, A195: 1, A196: 1, A197: 1, A198: 1, A199: 1, A200: 1,
A201: 1, A202: 1, A203: 1, A204: 1, A205: 1, A206: 1, A207: 1, A208: 1,
A209: 1, A210: 1, A211: 1, A212: 1, A213: 1, A214: 1, A215: 1, A216: 1,
A217: 1, A218: 1, A219: 1, A220: 1, A221: 1, A222: 1, A223: 1, A224: 1,
A225: 1, A226: 1, A227: 1, A228: 1, A229: 1, A230: 1, A231: 1, A232: 1,
A233: 1, A234: 1, A235: 1, A236: 1, A237: 1, A238: 1, A239: 1, A240: 1,
A241: 1, A242: 1, A243: 1, A244: 1, A245: 1, A246: 1, A247: 1, A248: 1,
A249: 1, A250: 1, A251: 1, A252: 1, A253: 1, A254: 1, A255: 1, A256: 1,
};
let result = encode(&input).unwrap();
let expected = vec![m::MAP_16, 0x01, 0x00,
0x84, 0x41, 0x30, 0x30, 0x31, 0x01, 0x84, 0x41, 0x30, 0x30, 0x32, 0x01, 0x84, 0x41, 0x30, 0x30, 0x33, 0x01, 0x84, 0x41, 0x30, 0x30, 0x34, 0x01, 0x84, 0x41, 0x30, 0x30, 0x35, 0x01, 0x84, 0x41, 0x30, 0x30, 0x36, 0x01, 0x84, 0x41, 0x30, 0x30, 0x37, 0x01, 0x84, 0x41, 0x30, 0x30, 0x38, 0x01,
0x84, 0x41, 0x30, 0x30, 0x39, 0x01, 0x84, 0x41, 0x30, 0x31, 0x30, 0x01, 0x84, 0x41, 0x30, 0x31, 0x31, 0x01, 0x84, 0x41, 0x30, 0x31, 0x32, 0x01, 0x84, 0x41, 0x30, 0x31, 0x33, 0x01, 0x84, 0x41, 0x30, 0x31, 0x34, 0x01, 0x84, 0x41, 0x30, 0x31, 0x35, 0x01, 0x84, 0x41, 0x30, 0x31, 0x36, 0x01,
0x84, 0x41, 0x30, 0x31, 0x37, 0x01, 0x84, 0x41, 0x30, 0x31, 0x38, 0x01, 0x84, 0x41, 0x30, 0x31, 0x39, 0x01, 0x84, 0x41, 0x30, 0x32, 0x30, 0x01, 0x84, 0x41, 0x30, 0x32, 0x31, 0x01, 0x84, 0x41, 0x30, 0x32, 0x32, 0x01, 0x84, 0x41, 0x30, 0x32, 0x33, 0x01, 0x84, 0x41, 0x30, 0x32, 0x34, 0x01,
0x84, 0x41, 0x30, 0x32, 0x35, 0x01, 0x84, 0x41, 0x30, 0x32, 0x36, 0x01, 0x84, 0x41, 0x30, 0x32, 0x37, 0x01, 0x84, 0x41, 0x30, 0x32, 0x38, 0x01, 0x84, 0x41, 0x30, 0x32, 0x39, 0x01, 0x84, 0x41, 0x30, 0x33, 0x30, 0x01, 0x84, 0x41, 0x30, 0x33, 0x31, 0x01, 0x84, 0x41, 0x30, 0x33, 0x32, 0x01,
0x84, 0x41, 0x30, 0x33, 0x33, 0x01, 0x84, 0x41, 0x30, 0x33, 0x34, 0x01, 0x84, 0x41, 0x30, 0x33, 0x35, 0x01, 0x84, 0x41, 0x30, 0x33, 0x36, 0x01, 0x84, 0x41, 0x30, 0x33, 0x37, 0x01, 0x84, 0x41, 0x30, 0x33, 0x38, 0x01, 0x84, 0x41, 0x30, 0x33, 0x39, 0x01, 0x84, 0x41, 0x30, 0x34, 0x30, 0x01,
0x84, 0x41, 0x30, 0x34, 0x31, 0x01, 0x84, 0x41, 0x30, 0x34, 0x32, 0x01, 0x84, 0x41, 0x30, 0x34, 0x33, 0x01, 0x84, 0x41, 0x30, 0x34, 0x34, 0x01, 0x84, 0x41, 0x30, 0x34, 0x35, 0x01, 0x84, 0x41, 0x30, 0x34, 0x36, 0x01, 0x84, 0x41, 0x30, 0x34, 0x37, 0x01, 0x84, 0x41, 0x30, 0x34, 0x38, 0x01,
0x84, 0x41, 0x30, 0x34, 0x39, 0x01, 0x84, 0x41, 0x30, 0x35, 0x30, 0x01, 0x84, 0x41, 0x30, 0x35, 0x31, 0x01, 0x84, 0x41, 0x30, 0x35, 0x32, 0x01, 0x84, 0x41, 0x30, 0x35, 0x33, 0x01, 0x84, 0x41, 0x30, 0x35, 0x34, 0x01, 0x84, 0x41, 0x30, 0x35, 0x35, 0x01, 0x84, 0x41, 0x30, 0x35, 0x36, 0x01,
0x84, 0x41, 0x30, 0x35, 0x37, 0x01, 0x84, 0x41, 0x30, 0x35, 0x38, 0x01, 0x84, 0x41, 0x30, 0x35, 0x39, 0x01, 0x84, 0x41, 0x30, 0x36, 0x30, 0x01, 0x84, 0x41, 0x30, 0x36, 0x31, 0x01, 0x84, 0x41, 0x30, 0x36, 0x32, 0x01, 0x84, 0x41, 0x30, 0x36, 0x33, 0x01, 0x84, 0x41, 0x30, 0x36, 0x34, 0x01,
0x84, 0x41, 0x30, 0x36, 0x35, 0x01, 0x84, 0x41, 0x30, 0x36, 0x36, 0x01, 0x84, 0x41, 0x30, 0x36, 0x37, 0x01, 0x84, 0x41, 0x30, 0x36, 0x38, 0x01, 0x84, 0x41, 0x30, 0x36, 0x39, 0x01, 0x84, 0x41, 0x30, 0x37, 0x30, 0x01, 0x84, 0x41, 0x30, 0x37, 0x31, 0x01, 0x84, 0x41, 0x30, 0x37, 0x32, 0x01,
0x84, 0x41, 0x30, 0x37, 0x33, 0x01, 0x84, 0x41, 0x30, 0x37, 0x34, 0x01, 0x84, 0x41, 0x30, 0x37, 0x35, 0x01, 0x84, 0x41, 0x30, 0x37, 0x36, 0x01, 0x84, 0x41, 0x30, 0x37, 0x37, 0x01, 0x84, 0x41, 0x30, 0x37, 0x38, 0x01, 0x84, 0x41, 0x30, 0x37, 0x39, 0x01, 0x84, 0x41, 0x30, 0x38, 0x30, 0x01,
0x84, 0x41, 0x30, 0x38, 0x31, 0x01, 0x84, 0x41, 0x30, 0x38, 0x32, 0x01, 0x84, 0x41, 0x30, 0x38, 0x33, 0x01, 0x84, 0x41, 0x30, 0x38, 0x34, 0x01, 0x84, 0x41, 0x30, 0x38, 0x35, 0x01, 0x84, 0x41, 0x30, 0x38, 0x36, 0x01, 0x84, 0x41, 0x30, 0x38, 0x37, 0x01, 0x84, 0x41, 0x30, 0x38, 0x38, 0x01,
0x84, 0x41, 0x30, 0x38, 0x39, 0x01, 0x84, 0x41, 0x30, 0x39, 0x30, 0x01, 0x84, 0x41, 0x30, 0x39, 0x31, 0x01, 0x84, 0x41, 0x30, 0x39, 0x32, 0x01, 0x84, 0x41, 0x30, 0x39, 0x33, 0x01, 0x84, 0x41, 0x30, 0x39, 0x34, 0x01, 0x84, 0x41, 0x30, 0x39, 0x35, 0x01, 0x84, 0x41, 0x30, 0x39, 0x36, 0x01,
0x84, 0x41, 0x30, 0x39, 0x37, 0x01, 0x84, 0x41, 0x30, 0x39, 0x38, 0x01, 0x84, 0x41, 0x30, 0x39, 0x39, 0x01, 0x84, 0x41, 0x31, 0x30, 0x30, 0x01, 0x84, 0x41, 0x31, 0x30, 0x31, 0x01, 0x84, 0x41, 0x31, 0x30, 0x32, 0x01, 0x84, 0x41, 0x31, 0x30, 0x33, 0x01, 0x84, 0x41, 0x31, 0x30, 0x34, 0x01,
0x84, 0x41, 0x31, 0x30, 0x35, 0x01, 0x84, 0x41, 0x31, 0x30, 0x36, 0x01, 0x84, 0x41, 0x31, 0x30, 0x37, 0x01, 0x84, 0x41, 0x31, 0x30, 0x38, 0x01, 0x84, 0x41, 0x31, 0x30, 0x39, 0x01, 0x84, 0x41, 0x31, 0x31, 0x30, 0x01, 0x84, 0x41, 0x31, 0x31, 0x31, 0x01, 0x84, 0x41, 0x31, 0x31, 0x32, 0x01,
0x84, 0x41, 0x31, 0x31, 0x33, 0x01, 0x84, 0x41, 0x31, 0x31, 0x34, 0x01, 0x84, 0x41, 0x31, 0x31, 0x35, 0x01, 0x84, 0x41, 0x31, 0x31, 0x36, 0x01, 0x84, 0x41, 0x31, 0x31, 0x37, 0x01, 0x84, 0x41, 0x31, 0x31, 0x38, 0x01, 0x84, 0x41, 0x31, 0x31, 0x39, 0x01, 0x84, 0x41, 0x31, 0x32, 0x30, 0x01,
0x84, 0x41, 0x31, 0x32, 0x31, 0x01, 0x84, 0x41, 0x31, 0x32, 0x32, 0x01, 0x84, 0x41, 0x31, 0x32, 0x33, 0x01, 0x84, 0x41, 0x31, 0x32, 0x34, 0x01, 0x84, 0x41, 0x31, 0x32, 0x35, 0x01, 0x84, 0x41, 0x31, 0x32, 0x36, 0x01, 0x84, 0x41, 0x31, 0x32, 0x37, 0x01, 0x84, 0x41, 0x31, 0x32, 0x38, 0x01,
0x84, 0x41, 0x31, 0x32, 0x39, 0x01, 0x84, 0x41, 0x31, 0x33, 0x30, 0x01, 0x84, 0x41, 0x31, 0x33, 0x31, 0x01, 0x84, 0x41, 0x31, 0x33, 0x32, 0x01, 0x84, 0x41, 0x31, 0x33, 0x33, 0x01, 0x84, 0x41, 0x31, 0x33, 0x34, 0x01, 0x84, 0x41, 0x31, 0x33, 0x35, 0x01, 0x84, 0x41, 0x31, 0x33, 0x36, 0x01,
0x84, 0x41, 0x31, 0x33, 0x37, 0x01, 0x84, 0x41, 0x31, 0x33, 0x38, 0x01, 0x84, 0x41, 0x31, 0x33, 0x39, 0x01, 0x84, 0x41, 0x31, 0x34, 0x30, 0x01, 0x84, 0x41, 0x31, 0x34, 0x31, 0x01, 0x84, 0x41, 0x31, 0x34, 0x32, 0x01, 0x84, 0x41, 0x31, 0x34, 0x33, 0x01, 0x84, 0x41, 0x31, 0x34, 0x34, 0x01,
0x84, 0x41, 0x31, 0x34, 0x35, 0x01, 0x84, 0x41, 0x31, 0x34, 0x36, 0x01, 0x84, 0x41, 0x31, 0x34, 0x37, 0x01, 0x84, 0x41, 0x31, 0x34, 0x38, 0x01, 0x84, 0x41, 0x31, 0x34, 0x39, 0x01, 0x84, 0x41, 0x31, 0x35, 0x30, 0x01, 0x84, 0x41, 0x31, 0x35, 0x31, 0x01, 0x84, 0x41, 0x31, 0x35, 0x32, 0x01,
0x84, 0x41, 0x31, 0x35, 0x33, 0x01, 0x84, 0x41, 0x31, 0x35, 0x34, 0x01, 0x84, 0x41, 0x31, 0x35, 0x35, 0x01, 0x84, 0x41, 0x31, 0x35, 0x36, 0x01, 0x84, 0x41, 0x31, 0x35, 0x37, 0x01, 0x84, 0x41, 0x31, 0x35, 0x38, 0x01, 0x84, 0x41, 0x31, 0x35, 0x39, 0x01, 0x84, 0x41, 0x31, 0x36, 0x30, 0x01,
0x84, 0x41, 0x31, 0x36, 0x31, 0x01, 0x84, 0x41, 0x31, 0x36, 0x32, 0x01, 0x84, 0x41, 0x31, 0x36, 0x33, 0x01, 0x84, 0x41, 0x31, 0x36, 0x34, 0x01, 0x84, 0x41, 0x31, 0x36, 0x35, 0x01, 0x84, 0x41, 0x31, 0x36, 0x36, 0x01, 0x84, 0x41, 0x31, 0x36, 0x37, 0x01, 0x84, 0x41, 0x31, 0x36, 0x38, 0x01,
0x84, 0x41, 0x31, 0x36, 0x39, 0x01, 0x84, 0x41, 0x31, 0x37, 0x30, 0x01, 0x84, 0x41, 0x31, 0x37, 0x31, 0x01, 0x84, 0x41, 0x31, 0x37, 0x32, 0x01, 0x84, 0x41, 0x31, 0x37, 0x33, 0x01, 0x84, 0x41, 0x31, 0x37, 0x34, 0x01, 0x84, 0x41, 0x31, 0x37, 0x35, 0x01, 0x84, 0x41, 0x31, 0x37, 0x36, 0x01,
0x84, 0x41, 0x31, 0x37, 0x37, 0x01, 0x84, 0x41, 0x31, 0x37, 0x38, 0x01, 0x84, 0x41, 0x31, 0x37, 0x39, 0x01, 0x84, 0x41, 0x31, 0x38, 0x30, 0x01, 0x84, 0x41, 0x31, 0x38, 0x31, 0x01, 0x84, 0x41, 0x31, 0x38, 0x32, 0x01, 0x84, 0x41, 0x31, 0x38, 0x33, 0x01, 0x84, 0x41, 0x31, 0x38, 0x34, 0x01,
0x84, 0x41, 0x31, 0x38, 0x35, 0x01, 0x84, 0x41, 0x31, 0x38, 0x36, 0x01, 0x84, 0x41, 0x31, 0x38, 0x37, 0x01, 0x84, 0x41, 0x31, 0x38, 0x38, 0x01, 0x84, 0x41, 0x31, 0x38, 0x39, 0x01, 0x84, 0x41, 0x31, 0x39, 0x30, 0x01, 0x84, 0x41, 0x31, 0x39, 0x31, 0x01, 0x84, 0x41, 0x31, 0x39, 0x32, 0x01,
0x84, 0x41, 0x31, 0x39, 0x33, 0x01, 0x84, 0x41, 0x31, 0x39, 0x34, 0x01, 0x84, 0x41, 0x31, 0x39, 0x35, 0x01, 0x84, 0x41, 0x31, 0x39, 0x36, 0x01, 0x84, 0x41, 0x31, 0x39, 0x37, 0x01, 0x84, 0x41, 0x31, 0x39, 0x38, 0x01, 0x84, 0x41, 0x31, 0x39, 0x39, 0x01, 0x84, 0x41, 0x32, 0x30, 0x30, 0x01,
0x84, 0x41, 0x32, 0x30, 0x31, 0x01, 0x84, 0x41, 0x32, 0x30, 0x32, 0x01, 0x84, 0x41, 0x32, 0x30, 0x33, 0x01, 0x84, 0x41, 0x32, 0x30, 0x34, 0x01, 0x84, 0x41, 0x32, 0x30, 0x35, 0x01, 0x84, 0x41, 0x32, 0x30, 0x36, 0x01, 0x84, 0x41, 0x32, 0x30, 0x37, 0x01, 0x84, 0x41, 0x32, 0x30, 0x38, 0x01,
0x84, 0x41, 0x32, 0x30, 0x39, 0x01, 0x84, 0x41, 0x32, 0x31, 0x30, 0x01, 0x84, 0x41, 0x32, 0x31, 0x31, 0x01, 0x84, 0x41, 0x32, 0x31, 0x32, 0x01, 0x84, 0x41, 0x32, 0x31, 0x33, 0x01, 0x84, 0x41, 0x32, 0x31, 0x34, 0x01, 0x84, 0x41, 0x32, 0x31, 0x35, 0x01, 0x84, 0x41, 0x32, 0x31, 0x36, 0x01,
0x84, 0x41, 0x32, 0x31, 0x37, 0x01, 0x84, 0x41, 0x32, 0x31, 0x38, 0x01, 0x84, 0x41, 0x32, 0x31, 0x39, 0x01, 0x84, 0x41, 0x32, 0x32, 0x30, 0x01, 0x84, 0x41, 0x32, 0x32, 0x31, 0x01, 0x84, 0x41, 0x32, 0x32, 0x32, 0x01, 0x84, 0x41, 0x32, 0x32, 0x33, 0x01, 0x84, 0x41, 0x32, 0x32, 0x34, 0x01,
0x84, 0x41, 0x32, 0x32, 0x35, 0x01, 0x84, 0x41, 0x32, 0x32, 0x36, 0x01, 0x84, 0x41, 0x32, 0x32, 0x37, 0x01, 0x84, 0x41, 0x32, 0x32, 0x38, 0x01, 0x84, 0x41, 0x32, 0x32, 0x39, 0x01, 0x84, 0x41, 0x32, 0x33, 0x30, 0x01, 0x84, 0x41, 0x32, 0x33, 0x31, 0x01, 0x84, 0x41, 0x32, 0x33, 0x32, 0x01,
0x84, 0x41, 0x32, 0x33, 0x33, 0x01, 0x84, 0x41, 0x32, 0x33, 0x34, 0x01, 0x84, 0x41, 0x32, 0x33, 0x35, 0x01, 0x84, 0x41, 0x32, 0x33, 0x36, 0x01, 0x84, 0x41, 0x32, 0x33, 0x37, 0x01, 0x84, 0x41, 0x32, 0x33, 0x38, 0x01, 0x84, 0x41, 0x32, 0x33, 0x39, 0x01, 0x84, 0x41, 0x32, 0x34, 0x30, 0x01,
0x84, 0x41, 0x32, 0x34, 0x31, 0x01, 0x84, 0x41, 0x32, 0x34, 0x32, 0x01, 0x84, 0x41, 0x32, 0x34, 0x33, 0x01, 0x84, 0x41, 0x32, 0x34, 0x34, 0x01, 0x84, 0x41, 0x32, 0x34, 0x35, 0x01, 0x84, 0x41, 0x32, 0x34, 0x36, 0x01, 0x84, 0x41, 0x32, 0x34, 0x37, 0x01, 0x84, 0x41, 0x32, 0x34, 0x38, 0x01,
0x84, 0x41, 0x32, 0x34, 0x39, 0x01, 0x84, 0x41, 0x32, 0x35, 0x30, 0x01, 0x84, 0x41, 0x32, 0x35, 0x31, 0x01, 0x84, 0x41, 0x32, 0x35, 0x32, 0x01, 0x84, 0x41, 0x32, 0x35, 0x33, 0x01, 0x84, 0x41, 0x32, 0x35, 0x34, 0x01, 0x84, 0x41, 0x32, 0x35, 0x35, 0x01, 0x84, 0x41, 0x32, 0x35, 0x36, 0x01,
];
assert_eq!(expected, result);
}
#[test]
fn serialize_struct8() {
let size = 16;
#[derive(RustcEncodable)]
#[allow(non_snake_case)]
struct MyStruct {
A: u16, B: u16, C: u16, D: u16,
E: u16, F: u16, G: u16, H: u16,
I: u16, J: u16, K: u16, L: u16,
M: u16, N: u16, O: u16, P: u16,
}
let input = MyStruct {
A: 1, B: 1, C: 1, D: 1,
E: 1, F: 1, G: 1, H: 1,
I: 1, J: 1, K: 1, L: 1,
M: 1, N: 1, O: 1, P: 1,
};
let result = encode(&input).unwrap();
let expected = vec![m::MAP_8, size,
0x81, 0x41, 0x01, 0x81, 0x42, 0x01, 0x81, 0x43, 0x01, 0x81, 0x44, 0x01,
0x81, 0x45, 0x01, 0x81, 0x46, 0x01, 0x81, 0x47, 0x01, 0x81, 0x48, 0x01,
0x81, 0x49, 0x01, 0x81, 0x4A, 0x01, 0x81, 0x4B, 0x01, 0x81, 0x4C, 0x01,
0x81, 0x4D, 0x01, 0x81, 0x4E, 0x01, 0x81, 0x4F, 0x01, 0x81, 0x50, 0x01];
assert_eq!(expected, result);
}
#[test]
fn serialize_tiny_struct() {
let size = 3;
#[derive(RustcEncodable)]
#[allow(non_snake_case)]
struct MyStruct {
A: u32,
B: f64,
C: &'static str,
}
let input = MyStruct {
A: 1,
B: 1.1,
C: "C",
};
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + size,
0x81, 0x41, 0x01,
0x81, 0x42, m::FLOAT, 0x3F, 0xF1, 0x99, 0x99, 0x99, 0x99, 0x99, 0x9A,
0x81, 0x43, 0x81, 0x43];
assert_eq!(expected, result);
}
#[test]
fn serialize_enum() {
#[derive(RustcEncodable)]
enum MyEnum {
A,
}
let input = MyEnum::A;
let result = encode(&input).unwrap();
let expected = vec![0x81, 0x41];
assert_eq!(expected, result);
}
#[test]
fn serialize_enum_tuple_variant() {
#[derive(RustcEncodable)]
enum MyEnum {
A(u16, u16),
}
let input = MyEnum::A(1, 2);
let result = encode(&input).unwrap();
let expected = vec![m::TINY_MAP_NIBBLE + 0x01,
0x81, 0x41,
0x92, 0x01, 0x02];
assert_eq!(expected, result);
}
// #[test]
// fn serialize_enum_struct_variant() {
// let size = 2;
//
// #[derive(RustcEncodable)]
// #[allow(non_snake_case)]
// enum MyEnum {
// A { A: u16, B: u16 },
// }
//
// let input = MyEnum::A { A: 1, B: 2 };
//
// let result = encode(&input).unwrap();
// let expected = vec![m::TINY_MAP_NIBBLE + size,
// 0x81, 0x41, 0x01,
// 0x81, 0x42, 0x02];
//
// assert_eq!(expected, result);
// }
}<|fim▁end|> | |
<|file_name|>pipe.rs<|end_file_name|><|fim▁begin|>use alloc::arc::{Arc, Weak};
use collections::{BTreeMap, VecDeque};
use core::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
use spin::{Mutex, Once, RwLock, RwLockReadGuard, RwLockWriteGuard};
use sync::WaitCondition;
use syscall::error::{Error, Result, EBADF, EPIPE};
use syscall::flag::O_NONBLOCK;
use syscall::scheme::Scheme;
/// Pipes list
pub static PIPE_SCHEME_ID: AtomicUsize = ATOMIC_USIZE_INIT;
static PIPE_NEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT;
static PIPES: Once<RwLock<(BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)>> = Once::new();
/// Initialize pipes, called if needed
fn init_pipes() -> RwLock<(BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> {
RwLock::new((BTreeMap::new(), BTreeMap::new()))
}
/// Get the global pipes list, const
fn pipes() -> RwLockReadGuard<'static, (BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> {
PIPES.call_once(init_pipes).read()<|fim▁hole|>fn pipes_mut() -> RwLockWriteGuard<'static, (BTreeMap<usize, PipeRead>, BTreeMap<usize, PipeWrite>)> {
PIPES.call_once(init_pipes).write()
}
pub fn pipe(flags: usize) -> (usize, usize) {
let mut pipes = pipes_mut();
let read_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst);
let write_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst);
let read = PipeRead::new(flags);
let write = PipeWrite::new(&read);
pipes.0.insert(read_id, read);
pipes.1.insert(write_id, write);
(read_id, write_id)
}
pub struct PipeScheme;
impl Scheme for PipeScheme {
fn dup(&self, id: usize, _buf: &[u8]) -> Result<usize> {
let mut pipes = pipes_mut();
let read_option = pipes.0.get(&id).map(|pipe| pipe.clone());
if let Some(pipe) = read_option {
let pipe_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst);
pipes.0.insert(pipe_id, pipe);
return Ok(pipe_id);
}
let write_option = pipes.1.get(&id).map(|pipe| pipe.clone());
if let Some(pipe) = write_option {
let pipe_id = PIPE_NEXT_ID.fetch_add(1, Ordering::SeqCst);
pipes.1.insert(pipe_id, pipe);
return Ok(pipe_id);
}
Err(Error::new(EBADF))
}
fn read(&self, id: usize, buf: &mut [u8]) -> Result<usize> {
let pipe_option = {
let pipes = pipes();
pipes.0.get(&id).map(|pipe| pipe.clone())
};
if let Some(pipe) = pipe_option {
pipe.read(buf)
} else {
Err(Error::new(EBADF))
}
}
fn write(&self, id: usize, buf: &[u8]) -> Result<usize> {
let pipe_option = {
let pipes = pipes();
pipes.1.get(&id).map(|pipe| pipe.clone())
};
if let Some(pipe) = pipe_option {
pipe.write(buf)
} else {
Err(Error::new(EBADF))
}
}
fn fsync(&self, _id: usize) -> Result<usize> {
Ok(0)
}
fn close(&self, id: usize) -> Result<usize> {
let mut pipes = pipes_mut();
drop(pipes.0.remove(&id));
drop(pipes.1.remove(&id));
Ok(0)
}
}
/// Read side of a pipe
#[derive(Clone)]
pub struct PipeRead {
flags: usize,
condition: Arc<WaitCondition>,
vec: Arc<Mutex<VecDeque<u8>>>
}
impl PipeRead {
pub fn new(flags: usize) -> Self {
PipeRead {
flags: flags,
condition: Arc::new(WaitCondition::new()),
vec: Arc::new(Mutex::new(VecDeque::new())),
}
}
fn read(&self, buf: &mut [u8]) -> Result<usize> {
loop {
{
let mut vec = self.vec.lock();
let mut i = 0;
while i < buf.len() {
if let Some(b) = vec.pop_front() {
buf[i] = b;
i += 1;
} else {
break;
}
}
if i > 0 {
return Ok(i);
}
}
if self.flags & O_NONBLOCK == O_NONBLOCK || Arc::weak_count(&self.vec) == 0 {
return Ok(0);
} else {
self.condition.wait();
}
}
}
}
/// Read side of a pipe
#[derive(Clone)]
pub struct PipeWrite {
condition: Arc<WaitCondition>,
vec: Weak<Mutex<VecDeque<u8>>>
}
impl PipeWrite {
pub fn new(read: &PipeRead) -> Self {
PipeWrite {
condition: read.condition.clone(),
vec: Arc::downgrade(&read.vec),
}
}
fn write(&self, buf: &[u8]) -> Result<usize> {
if let Some(vec_lock) = self.vec.upgrade() {
let mut vec = vec_lock.lock();
for &b in buf.iter() {
vec.push_back(b);
}
self.condition.notify();
Ok(buf.len())
} else {
Err(Error::new(EPIPE))
}
}
}
impl Drop for PipeWrite {
fn drop(&mut self) {
self.condition.notify();
}
}<|fim▁end|> | }
/// Get the global schemes list, mutable |
<|file_name|>dyn_form.py<|end_file_name|><|fim▁begin|>from django import template<|fim▁hole|>@register.filter
def dyn_form(forms, pk):
return forms[pk]<|fim▁end|> | from .. import forms
register = template.Library()
|
<|file_name|>context_processors.py<|end_file_name|><|fim▁begin|>"""Context processors, these get called and add things to template contexts"""
from django.conf import settings
def analytics_and_ads(request):
""" Adds the google analytics code to the context """
out = {}
if request.user.is_authenticated() and request.user.settings.no_analytics:
out["analytics_code"] = ""
else:<|fim▁hole|> out["ad_client"] = ""
else:
out["ad_client"] = settings.AD_CLIENT
out["ad_slot_top"] = settings.AD_SLOT_TOP
out["ad_slot_bottom"] = settings.AD_SLOT_BOTTOM
return out
def add_webstore_url(request):
return {"webstore_url":settings.CHROME_EXTENSION_WEBSTORE}<|fim▁end|> | out["analytics_code"] = settings.ANALYTICS_CODE
if request.user.is_authenticated() and request.user.settings.no_ads: |
<|file_name|>color.rs<|end_file_name|><|fim▁begin|>use std::f32::*;
use math::*;
#[derive(Debug,PartialEq)]
pub struct Color {
pub r: f32,
pub g: f32,
pub b: f32,
}
#[derive(Debug,PartialEq)]
pub struct HSL {
pub hue: f32,
pub saturation: f32,
pub lightness: f32,
}
impl Color {
pub fn from_floats(r: f32, g: f32, b: f32) -> Color {
Color {
r: clamp(r, 0.0, 1.0),
g: clamp(g, 0.0, 1.0),
b: clamp(b, 0.0, 1.0),
}
}
pub fn from_ints(r: u32, g: u32, b: u32) -> Color {
Color::from_floats((r as f32) / 255.0, (g as f32) / 255.0, (b as f32) / 255.0)
}
pub fn from_hex(hex: u32) -> Color {
Color::from_ints(hex >> 16 & 255, hex >> 8 & 255, hex & 255)
}
pub fn from_scalar(scalar: f32) -> Color {
Color::from_floats(scalar, scalar, scalar)
}
pub fn from_hsl(hsl: &HSL) -> Color {
let hue2rgb = |p: f32, q: f32, t: f32| -> f32 {
let mut mt = t;
if mt < 0.0 {
mt += 1.0;
}
if mt > 1.0 {
mt -= 1.0;
}
if mt < 1.0 / 6.0 {
return p + (q - p) * 6.0 * mt;
}
if mt < 1.0 / 2.0 {
return q;
}
if mt < 2.0 / 3.0 {
return p + (q - p) * 6.0 * (2.0 / 3.0 - mt);
}
p
};
// h,s,l ranges are in 0.0 - 1.0
let h_clamped = euclidean_modulo(hsl.hue, 1.0);
let s_clamped = clamp(hsl.saturation, 0.0, 1.0);
let l = clamp(hsl.lightness, 0.0, 1.0);
if s_clamped == 0.0 {
Color::from_scalar(1.0)
} else {
let p = if l <= 0.5 {
l * (1.0 + s_clamped)
} else {
l + s_clamped - (l * s_clamped)
};
let q = (2.0 * l) - p;
let one_third = 1.0 / 3.0;
Color {
r: hue2rgb(q, p, h_clamped + one_third),
g: hue2rgb(q, p, h_clamped),
b: hue2rgb(q, p, h_clamped - one_third),
}
}
}
pub fn gamma_to_linear(&self, gamma_factor: Option<f32>) -> Color {
let g = match gamma_factor {
Some(x) => x,
None => 2.0,
};
Color {
r: self.r.powf(g),
g: self.g.powf(g),
b: self.b.powf(g),
}
}
pub fn linear_to_gamma(&self, gamma_factor: Option<f32>) -> Color {
let g = match gamma_factor {
Some(x) => x,
None => 2.0,
};
let safe_inverse = if g > 0.0 { 1.0 / g } else { 1.0 };
Color {
r: self.r.powf(safe_inverse),
g: self.g.powf(safe_inverse),
b: self.b.powf(safe_inverse),
}
}
pub fn convert_gamma_to_linear(&self) -> Color {
Color {
r: self.r * self.r,
g: self.g * self.g,
b: self.b * self.b,
}
}
pub fn convert_linear_to_gamma(&self) -> Color {
Color {
r: self.r.sqrt(),
g: self.g.sqrt(),
b: self.b.sqrt(),
}
}
pub fn hex(&self) -> u32 {
let r = (self.r * 255.0) as u32;
let g = (self.g * 255.0) as u32;
let b = (self.b * 255.0) as u32;
r << 16 ^ g << 8 ^ b
}
pub fn hex_string(&self) -> String {
format!("{:x}", self.hex())
}
pub fn hsl(&self) -> HSL {
// h,s,l ranges are in 0.0 - 1.0
let &Color { r, g, b } = self;
let max = r.max(g).max(b);
let min = r.min(g).min(b);
let lightness = (min + max) / 2.0;
if (min - max).abs() < EPSILON {
HSL {
hue: 0.0,
saturation: 0.0,
lightness: lightness,
}
} else {
let delta = max - min;
let saturation = if lightness <= 0.5 {
delta / (max + min)
} else {
delta / (2.0 - max - min)
};
let hue = match max {
_ if (max - r).abs() < EPSILON => (g - b) / delta + (if g < b { 6.0 } else { 0.0 }),
_ if (max - g).abs() < EPSILON => (b - r) / delta + 2.0,
_ if (max - b).abs() < EPSILON => (r - g) / delta + 4.0,
_ => panic!("Oh noes"),
};
HSL {
hue: hue / 6.0,
saturation: saturation,
lightness: lightness,
}
}
}
pub fn offset_hsl(&self, hsl: &HSL) -> Color {
let mut _hsl = self.hsl();
_hsl.hue += hsl.hue;
_hsl.saturation += hsl.saturation;
_hsl.lightness += hsl.lightness;
Color::from_hsl(&_hsl)
}
pub fn add(&self, color: &Color) -> Color {
Color::from_floats(self.r + color.r, self.g + color.g, self.b + color.b)
}
pub fn add_scalar(&self, s: f32) -> Color {
Color::from_floats(self.r + s, self.g + s, self.b + s)
}
pub fn subtract(&self, color: &Color) -> Color {
Color::from_floats(self.r - color.r, self.g - color.g, self.b - color.b)
}
pub fn multiply(&self, color: &Color) -> Color {
Color::from_floats(self.r * color.r, self.g * color.g, self.b * color.b)
}
pub fn multiply_scalar(&self, s: f32) -> Color {
Color::from_floats(self.r * s, self.g * s, self.b * s)
}
pub fn lerp(&self, color: &Color, alpha: f32) -> Color {
Color {
r: self.r + ((color.r - self.r) * alpha),
g: self.g + ((color.g - self.g) * alpha),
b: self.b + ((color.b - self.b) * alpha),
}
}
}
pub const MAROON: Color = Color {
r: 0.50196,
g: 0.0,
b: 0.0,
};
pub const DARK_RED: Color = Color {
r: 0.54510,
g: 0.0,
b: 0.0,
};
pub const BROWN: Color = Color {
r: 0.64706,
g: 0.16471,
b: 0.16471,
};
pub const FIREBRICK: Color = Color {
r: 0.69804,
g: 0.13333,
b: 0.13333,
};
pub const CRIMSON: Color = Color {
r: 0.86275,
g: 0.07843,
b: 0.23529,
};
pub const RED: Color = Color {
r: 1.0,
g: 0.0,
b: 0.0,
};
pub const TOMATO: Color = Color {
r: 1.0,
g: 0.38824,
b: 0.27843,
};
pub const CORAL: Color = Color {
r: 1.0,
g: 0.49804,
b: 80.0 / 255.0,
};
pub const INDIAN_RED: Color = Color {
r: 205.0 / 255.0,
g: 92.0 / 255.0,
b: 92.0 / 255.0,
};
pub const LIGHT_CORAL: Color = Color {
r: 240.0 / 255.0,
g: 0.50196,
b: 0.50196,
};
pub const DARK_SALMON: Color = Color {
r: 233.0 / 255.0,
g: 150.0 / 255.0,
b: 122.0 / 255.0,
};
pub const SALMON: Color = Color {
r: 250.0 / 255.0,
g: 0.50196,
b: 114.0 / 255.0,
};
pub const LIGHT_SALMON: Color = Color {
r: 1.0,
g: 160.0 / 255.0,
b: 122.0 / 255.0,
};
pub const ORANGE_RED: Color = Color {
r: 1.0,
g: 69.0 / 255.0,
b: 0.0,
};
pub const DARK_ORANGE: Color = Color {
r: 1.0,
g: 140.0 / 255.0,
b: 0.0,
};
pub const ORANGE: Color = Color {
r: 1.0,
g: 0.64706,
b: 0.0,
};
pub const GOLD: Color = Color {
r: 1.0,
g: 215.0 / 255.0,
b: 0.0,
};
pub const DARK_GOLDEN_ROD: Color = Color {
r: 184.0 / 255.0,
g: 134.0 / 255.0,
b: 11.0 / 255.0,
};
pub const GOLDEN_ROD: Color = Color {
r: 218.0 / 255.0,
g: 0.64706,
b: 32.0 / 255.0,
};
pub const PALE_GOLDEN_ROD: Color = Color {
r: 238.0 / 255.0,
g: 232.0 / 255.0,
b: 170.0 / 255.0,
};
pub const DARK_KHAKI: Color = Color {
r: 189.0 / 255.0,
g: 183.0 / 255.0,
b: 107.0 / 255.0,
};
pub const KHAKI: Color = Color {
r: 240.0 / 255.0,
g: 230.0 / 255.0,
b: 140.0 / 255.0,
};
pub const OLIVE: Color = Color {
r: 0.50196,
g: 0.50196,
b: 0.0,
};
pub const YELLOW: Color = Color {
r: 1.0,
g: 1.0,
b: 0.0,
};
pub const YELLOW_GREEN: Color = Color {
r: 154.0 / 255.0,
g: 205.0 / 255.0,
b: 50.0 / 255.0,
};
pub const DARK_OLIVE_GREEN: Color = Color {
r: 85.0 / 255.0,
g: 107.0 / 255.0,
b: 47.0 / 255.0,
};
pub const OLIVE_DRAB: Color = Color {
r: 107.0 / 255.0,
g: 142.0 / 255.0,
b: 35.0 / 255.0,
};
pub const LAWN_GREEN: Color = Color {
r: 124.0 / 255.0,
g: 252.0 / 255.0,
b: 0.0,
};
pub const CHARTREUSE: Color = Color {
r: 0.49804,
g: 1.0,
b: 0.0,
};
pub const GREEN_YELLOW: Color = Color {
r: 173.0 / 255.0,
g: 1.0,
b: 47.0 / 255.0,
};
pub const DARK_GREEN: Color = Color {
r: 0.0,
g: 100.0 / 255.0,
b: 0.0,
};
pub const GREEN: Color = Color {
r: 0.0,
g: 0.50196,
b: 0.0,
};
pub const FOREST_GREEN: Color = Color {
r: 0.13333,
g: 0.54510,
b: 0.13333,
};
pub const LIME: Color = Color {
r: 0.0,
g: 1.0,
b: 0.0,
};
pub const LIME_GREEN: Color = Color {
r: 50.0 / 255.0,
g: 205.0 / 255.0,
b: 50.0 / 255.0,
};
pub const LIGHT_GREEN: Color = Color {
r: 144.0 / 255.0,
g: 238.0 / 255.0,
b: 144.0 / 255.0,
};
pub const PALE_GREEN: Color = Color {
r: 0.59608,
g: 251.0 / 255.0,
b: 0.59608,
};
pub const DARK_SEA_GREEN: Color = Color {
r: 143.0 / 255.0,
g: 188.0 / 255.0,
b: 143.0 / 255.0,
};
pub const MEDIUM_SPRING_GREEN: Color = Color {
r: 0.0,
g: 250.0 / 255.0,
b: 154.0 / 255.0,
};
pub const SPRING_GREEN: Color = Color {
r: 0.0,
g: 1.0,
b: 0.49804,
};
pub const SEA_GREEN: Color = Color {
r: 46.0 / 255.0,
g: 0.54510,
b: 87.0 / 255.0,
};
pub const MEDIUM_AQUA_MARINE: Color = Color {
r: 102.0 / 255.0,
g: 205.0 / 255.0,
b: 170.0 / 255.0,
};
pub const MEDIUM_SEA_GREEN: Color = Color {
r: 0.23529,
g: 179.0 / 255.0,
b: 113.0 / 255.0,
};
pub const LIGHT_SEA_GREEN: Color = Color {
r: 32.0 / 255.0,
g: 0.69804,
b: 170.0 / 255.0,
};
pub const DARK_SLATE_GRAY: Color = Color {
r: 47.0 / 255.0,
g: 79.0 / 255.0,
b: 79.0 / 255.0,
};
pub const TEAL: Color = Color {
r: 0.0,
g: 0.50196,
b: 0.50196,
};
pub const DARK_CYAN: Color = Color {
r: 0.0,
g: 0.54510,
b: 0.54510,
};
pub const AQUA: Color = Color {
r: 0.0,
g: 1.0,
b: 1.0,
};
pub const CYAN: Color = Color {
r: 0.0,
g: 1.0,
b: 1.0,
};
pub const LIGHT: Color = Color {
r: 224.0 / 255.0,
g: 1.0,
b: 1.0,
};
pub const DARK_TURQUOISE: Color = Color {
r: 0.0,
g: 206.0 / 255.0,
b: 209.0 / 255.0,
};
pub const TURQUOISE: Color = Color {
r: 64.0 / 255.0,
g: 224.0 / 255.0,
b: 208.0 / 255.0,
};
pub const MEDIUM_TURQUOISE: Color = Color {
r: 72.0 / 255.0,
g: 209.0 / 255.0,
b: 204.0 / 255.0,
};
pub const PALE_TURQUOISE: Color = Color {
r: 175.0 / 255.0,
g: 238.0 / 255.0,
b: 238.0 / 255.0,
};
pub const AQUA_MARINE: Color = Color {
r: 0.49804,
g: 1.0,
b: 212.0 / 255.0,
};
pub const POWDER_BLUE: Color = Color {
r: 176.0 / 255.0,
g: 224.0 / 255.0,
b: 230.0 / 255.0,
};
pub const CADET_BLUE: Color = Color {
r: 95.0 / 255.0,
g: 158.0 / 255.0,
b: 160.0 / 255.0,
};
pub const STEEL_BLUE: Color = Color {
r: 70.0 / 255.0,
g: 130.0 / 255.0,
b: 180.0 / 255.0,
};
pub const CORNFLOWER_BLUE: Color = Color {
r: 100.0 / 255.0,
g: 149.0 / 255.0,
b: 237.0 / 255.0,
};
pub const DEEP_SKY_BLUE: Color = Color {
r: 0.0,
g: 191.0 / 255.0,
b: 1.0,
};
pub const DODGER_BLUE: Color = Color {
r: 30.0 / 255.0,
g: 144.0 / 255.0,
b: 55.0 / 255.0,
};
pub const LIGHT_BLUE: Color = Color {
r: 173.0 / 255.0,
g: 216.0 / 255.0,
b: 230.0 / 255.0,
};
pub const SKY_BLUE: Color = Color {
r: 135.0 / 255.0,
g: 206.0 / 255.0,
b: 235.0 / 255.0,
};
pub const LIGHT_SKY_BLUE: Color = Color {
r: 135.0 / 255.0,
g: 206.0 / 255.0,
b: 250.0 / 255.0,
};
pub const MIDNIGHT_BLUE: Color = Color {
r: 25.0 / 255.0,
g: 25.0 / 255.0,
b: 112.0 / 255.0,
};
pub const NAVY: Color = Color {
r: 0.0,
g: 0.0,
b: 0.50196,
};
pub const DARK_BLUE: Color = Color {
r: 0.0,
g: 0.0,
b: 0.54510,
};
pub const MEDIUM_BLUE: Color = Color {
r: 0.0,
g: 0.0,
b: 205.0 / 255.0,
};
pub const BLUE: Color = Color {
r: 0.0,
g: 0.0,
b: 1.0,
};
pub const ROYAL_BLUE: Color = Color {
r: 65.0 / 255.0,
g: 105.0 / 255.0,
b: 225.0 / 255.0,
};
pub const BLUE_VIOLET: Color = Color {
r: 138.0 / 255.0,
g: 43.0 / 255.0,
b: 226.0 / 255.0,
};
pub const INDIGO: Color = Color {
r: 75.0 / 255.0,
g: 0.0,
b: 130.0 / 255.0,
};
pub const DARK_SLATE_BLUE: Color = Color {
r: 72.0 / 255.0,
g: 61.0 / 255.0,
b: 0.54510,
};
pub const SLATE_BLUE: Color = Color {
r: 106.0 / 255.0,
g: 90.0 / 255.0,
b: 205.0 / 255.0,
};
pub const MEDIUM_SLATE_BLUE: Color = Color {
r: 123.0 / 255.0,
g: 104.0 / 255.0,
b: 238.0 / 255.0,
};
pub const MEDIUM_PURPLE: Color = Color {
r: 147.0 / 255.0,
g: 112.0 / 255.0,
b: 219.0 / 255.0,
};
pub const DARK_MAGENTA: Color = Color {
r: 0.54510,
g: 0.0,
b: 0.54510,
};
pub const DARK_VIOLET: Color = Color {
r: 148.0 / 255.0,
g: 0.0,
b: 211.0 / 255.0,
};
pub const DARK_ORCHID: Color = Color {
r: 153.0 / 255.0,
g: 50.0 / 255.0,
b: 204.0 / 255.0,
};
pub const MEDIUM_ORCHID: Color = Color {
r: 186.0 / 255.0,
g: 85.0 / 255.0,
b: 211.0 / 255.0,
};
pub const PURPLE: Color = Color {
r: 0.50196,
g: 0.0,
b: 0.50196,
};
pub const THISTLE: Color = Color {
r: 216.0 / 255.0,
g: 191.0 / 255.0,
b: 216.0 / 255.0,
};
pub const PLUM: Color = Color {
r: 221.0 / 255.0,
g: 160.0 / 255.0,
b: 221.0 / 255.0,
};
pub const VIOLET: Color = Color {
r: 238.0 / 255.0,
g: 130.0 / 255.0,
b: 238.0 / 255.0,
};
pub const MAGENTA: Color = Color {
r: 1.0,
g: 0.0,
b: 1.0,
};
pub const ORCHID: Color = Color {
r: 218.0 / 255.0,
g: 112.0 / 255.0,
b: 214.0 / 255.0,
};
pub const MEDIUM_VIOLET_RED: Color = Color {
r: 199.0 / 255.0,
g: 21.0 / 255.0,
b: 133.0 / 255.0,
};
pub const PALE_VIOLET_RED: Color = Color {
r: 219.0 / 255.0,
g: 112.0 / 255.0,
b: 147.0 / 255.0,
};
pub const DEEP_PINK: Color = Color {
r: 1.0,
g: 0.07843,
b: 47.0 / 255.0,
};
pub const HOT_PINK: Color = Color {
r: 1.0,
g: 105.0 / 255.0,
b: 180.0 / 255.0,
};
pub const LIGHT_PINK: Color = Color {
r: 1.0,
g: 182.0 / 255.0,
b: 193.0 / 255.0,
};
pub const PINK: Color = Color {
r: 1.0,
g: 192.0 / 255.0,
b: 203.0 / 255.0,
};
pub const ANTIQUE_WHITE: Color = Color {
r: 250.0 / 255.0,
g: 235.0 / 255.0,
b: 215.0 / 255.0,
};
pub const BEIGE: Color = Color {
r: 245.0 / 255.0,
g: 245.0 / 255.0,
b: 0.86275,
};
pub const BISQUE: Color = Color {
r: 1.0,
g: 228.0 / 255.0,
b: 96.0 / 255.0,
};
pub const BLANCHED_ALMOND: Color = Color {
r: 1.0,
g: 235.0 / 255.0,
b: 205.0 / 255.0,
};
pub const WHEAT: Color = Color {
r: 245.0 / 255.0,
g: 222.0 / 255.0,
b: 179.0 / 255.0,
};
pub const CORN_SILK: Color = Color {
r: 1.0,
g: 248.0 / 255.0,
b: 0.86275,
};
pub const LEMON_CHIFFON: Color = Color {
r: 1.0,
g: 250.0 / 255.0,
b: 205.0 / 255.0,
};
pub const LIGHT_GOLDENROD_YELLOW: Color = Color {
r: 250.0 / 255.0,
g: 250.0 / 255.0,
b: 210.0 / 255.0,
};
pub const LIGHT_YELLOW: Color = Color {
r: 1.0,
g: 1.0,
b: 224.0 / 255.0,
};
pub const SADDLE_BROWN: Color = Color {
r: 0.54510,
g: 69.0 / 255.0,
b: 19.0 / 255.0,
};
pub const SIENNA: Color = Color {
r: 160.0 / 255.0,
g: 82.0 / 255.0,
b: 45.0 / 255.0,
};
pub const CHOCOLATE: Color = Color {
r: 210.0 / 255.0,
g: 105.0 / 255.0,
b: 30.0 / 255.0,
};
pub const PERU: Color = Color {
r: 205.0 / 255.0,
g: 133.0 / 255.0,
b: 63.0 / 255.0,
};
pub const SANDY_BROWN: Color = Color {
r: 244.0 / 255.0,
g: 164.0 / 255.0,
b: 96.0 / 255.0,
};
pub const BURLY_WOOD: Color = Color {
r: 222.0 / 255.0,
g: 184.0 / 255.0,
b: 135.0 / 255.0,
};
pub const TAN: Color = Color {
r: 210.0 / 255.0,
g: 180.0 / 255.0,
b: 140.0 / 255.0,
};
pub const ROSY_BROWN: Color = Color {
r: 188.0 / 255.0,
g: 143.0 / 255.0,
b: 143.0 / 255.0,
};
pub const MOCCASIN: Color = Color {
r: 1.0,
g: 228.0 / 255.0,
b: 181.0 / 255.0,
};
pub const NAVAJO_WHITE: Color = Color {
r: 1.0,
g: 222.0 / 255.0,
b: 173.0 / 255.0,
};
pub const PEACH_PUFF: Color = Color {
r: 1.0,
g: 218.0 / 255.0,
b: 185.0 / 255.0,
};
pub const MISTY_ROSE: Color = Color {
r: 1.0,
g: 228.0 / 255.0,
b: 225.0 / 255.0,
};
pub const LAVENDER_BLUSH: Color = Color {
r: 1.0,
g: 240.0 / 255.0,
b: 245.0 / 255.0,
};
pub const LINEN: Color = Color {
r: 250.0 / 255.0,
g: 240.0 / 255.0,
b: 230.0 / 255.0,
};
pub const OLD_LACE: Color = Color {
r: 253.0 / 255.0,
g: 245.0 / 255.0,
b: 230.0 / 255.0,
};
pub const PAPAYA_WHIP: Color = Color {
r: 1.0,
g: 239.0 / 255.0,
b: 213.0 / 255.0,
};
pub const SEA_SHELL: Color = Color {
r: 1.0,
g: 245.0 / 255.0,
b: 238.0 / 255.0,
};
pub const MINT_CREAM: Color = Color {
r: 245.0 / 255.0,
g: 1.0,
b: 250.0 / 255.0,
};
pub const SLATE_GRAY: Color = Color {
r: 112.0 / 255.0,
g: 0.50196,
b: 144.0 / 255.0,
};
pub const LIGHT_SLATE_GRAY: Color = Color {
r: 119.0 / 255.0,
g: 136.0 / 255.0,
b: 153.0 / 255.0,
};
pub const LIGHT_STEEL_BLUE: Color = Color {
r: 176.0 / 255.0,
g: 196.0 / 255.0,
b: 222.0 / 255.0,
};
pub const LAVENDER: Color = Color {
r: 230.0 / 255.0,
g: 230.0 / 255.0,
b: 250.0 / 255.0,
};
pub const FLORAL_WHITE: Color = Color {
r: 1.0,
g: 250.0 / 255.0,
b: 240.0 / 255.0,
};
pub const ALICE_BLUE: Color = Color {
r: 240.0 / 255.0,
g: 248.0 / 255.0,<|fim▁hole|>
pub const GHOST_WHITE: Color = Color {
r: 248.0 / 255.0,
g: 248.0 / 255.0,
b: 1.0,
};
pub const HONEYDEW: Color = Color {
r: 240.0 / 255.0,
g: 1.0,
b: 240.0 / 255.0,
};
pub const IVORY: Color = Color {
r: 1.0,
g: 1.0,
b: 240.0 / 255.0,
};
pub const AZURE: Color = Color {
r: 240.0 / 255.0,
g: 1.0,
b: 1.0,
};
pub const SNOW: Color = Color {
r: 1.0,
g: 250.0 / 255.0,
b: 250.0 / 255.0,
};
pub const BLACK: Color = Color {
r: 0.0,
g: 0.0,
b: 0.0,
};
pub const DIM_GRAY_DIM_GREY: Color = Color {
r: 105.0 / 255.0,
g: 105.0 / 255.0,
b: 105.0 / 255.0,
};
pub const GRAY_GREY: Color = Color {
r: 0.50196,
g: 0.50196,
b: 0.50196,
};
pub const DARK_GRAY_DARK_GREY: Color = Color {
r: 169.0 / 255.0,
g: 169.0 / 255.0,
b: 169.0 / 255.0,
};
pub const SILVER: Color = Color {
r: 192.0 / 255.0,
g: 192.0 / 255.0,
b: 192.0 / 255.0,
};
pub const LIGHT_GRAY_LIGHT_GREY: Color = Color {
r: 211.0 / 255.0,
g: 211.0 / 255.0,
b: 211.0 / 255.0,
};
pub const GAINSBORO: Color = Color {
r: 0.86275,
g: 0.86275,
b: 0.86275,
};
pub const WHITE_SMOKE: Color = Color {
r: 245.0 / 255.0,
g: 245.0 / 255.0,
b: 245.0 / 255.0,
};
pub const WHITE: Color = Color {
r: 1.0,
g: 1.0,
b: 1.0,
};
#[cfg(test)]
mod tests {
use math::*;
#[test]
fn set_rgb() {
let c = Color::from_floats(1.0, 0.2, 0.1);
assert_eq!(c.r, 1.0);
assert_eq!(c.g, 0.2);
assert_eq!(c.b, 0.1);
}
#[test]
fn copy_gamma_to_linear() {
let c2 = Color::from_floats(0.3, 0.5, 0.9);
let c = c2.gamma_to_linear(None);
assert_eq!(c.r, 0.09);
assert_eq!(c.g, 0.25);
assert_eq!(c.b, 0.80999994);
}
#[test]
fn copy_linear_to_gamma() {
let c2 = Color::from_floats(0.09, 0.25, 0.81);
let c = c2.linear_to_gamma(None);
assert_eq!(c.r, 0.3);
assert_eq!(c.g, 0.5);
assert_eq!(c.b, 0.9);
}
#[test]
fn convert_gamma_to_linear() {
let c = Color::from_floats(0.3, 0.5, 0.9).convert_gamma_to_linear();
assert_eq!(c.r, 0.09);
assert_eq!(c.g, 0.25);
assert_eq!(c.b, 0.80999994);
}
#[test]
fn convert_linear_to_gamma() {
let c = Color {
r: 4.0,
g: 9.0,
b: 16.0,
}
.convert_linear_to_gamma();
assert_eq!(c.r, 2.0);
assert_eq!(c.g, 3.0);
assert_eq!(c.b, 4.0);
}
#[test]
fn set_with_num() {
let c = Color::from_hex(0xFF0000);
assert_eq!(c.r, 1.0);
assert_eq!(c.g, 0.0);
assert_eq!(c.b, 0.0);
}
#[test]
fn lerp() {
let c = Color::from_ints(0, 0, 0);
let c2 = c.lerp(&WHITE, 0.2);
assert_eq!(c2.r, 0.2);
assert_eq!(c2.g, 0.2);
assert_eq!(c2.b, 0.2);
}
#[test]
fn get_hex() {
let res = RED.hex();
assert_eq!(res, 0xFF0000);
}
#[test]
fn set_hex() {
let c = Color::from_hex(0xFA8072);
assert_eq!(c.hex(), 0xFA8072);
}
#[test]
fn get_hex_string() {
let res = TOMATO.hex_string();
assert_eq!(res, "ff6346");
}
#[test]
fn get_hsl() {
let c = Color::from_hex(0x80ffff);
let hsl = c.hsl();
assert_eq!(hsl.hue, 0.5);
assert_eq!(hsl.saturation, 1.0);
assert_eq!((hsl.lightness * 100.0).round() / 100.0, 0.75);
}
#[test]
fn set_hsl() {
let c = Color::from_hsl(&HSL {
hue: 0.75,
saturation: 1.0,
lightness: 0.25,
});
let hsl = c.hsl();
assert_eq!(hsl.hue, 0.75);
assert_eq!(hsl.saturation, 1.00);
assert_eq!(hsl.lightness, 0.25);
}
}<|fim▁end|> | b: 1.0,
}; |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* Coder for Raspberry Pi
* A simple platform for experimenting with web stuff.
* http://goo.gl/coder
*
* Copyright 2013 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
exports.settings={};
//These are dynamically updated by the runtime
//settings.appname - the app id (folder) where your app is installed
//settings.viewpath - prefix to where your view html files are located
//settings.staticurl - base url path to static assets /static/apps/appname
//settings.appurl - base url path to this app /app/appname
exports.get_routes = [
{ path:'/', handler:'index_handler' },
];
exports.post_routes = [
];
exports.index_handler = function( req, res ) {
var tmplvars = {};
tmplvars['static_url'] = exports.settings.staticurl;
tmplvars['app_name'] = exports.settings.appname;
tmplvars['app_url'] = exports.settings.appurl;
tmplvars['device_name'] = exports.settings.device_name;
res.render( exports.settings.viewpath + '/index', tmplvars );
};
exports.on_destroy = function() {<|fim▁hole|>};<|fim▁end|> | |
<|file_name|>datacatalog_v1_generated_data_catalog_update_tag_sync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateTag
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-datacatalog
# [START datacatalog_v1_generated_DataCatalog_UpdateTag_sync]
from google.cloud import datacatalog_v1
def sample_update_tag():
# Create a client
client = datacatalog_v1.DataCatalogClient()
# Initialize request argument(s)
tag = datacatalog_v1.Tag()
tag.column = "column_value"
tag.template = "template_value"
request = datacatalog_v1.UpdateTagRequest(
tag=tag,
)
# Make the request
response = client.update_tag(request=request)
# Handle the response
print(response)
# [END datacatalog_v1_generated_DataCatalog_UpdateTag_sync]<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>customers.go<|end_file_name|><|fim▁begin|>package billing
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// CustomersClient is the billing client provides access to billing resources for Azure subscriptions.
type CustomersClient struct {
BaseClient
}
// NewCustomersClient creates an instance of the CustomersClient client.
func NewCustomersClient(subscriptionID string) CustomersClient {
return NewCustomersClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewCustomersClientWithBaseURI creates an instance of the CustomersClient client.
func NewCustomersClientWithBaseURI(baseURI string, subscriptionID string) CustomersClient {
return CustomersClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Get get the customer by id.
// Parameters:
// billingAccountName - billing Account Id.
// customerName - customer Id.
// expand - may be used to expand enabledAzureSkus, resellers.
func (client CustomersClient) Get(ctx context.Context, billingAccountName string, customerName string, expand string) (result Customer, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/CustomersClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, billingAccountName, customerName, expand)
if err != nil {
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client CustomersClient) GetPreparer(ctx context.Context, billingAccountName string, customerName string, expand string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"billingAccountName": autorest.Encode("path", billingAccountName),
"customerName": autorest.Encode("path", customerName),
}
const APIVersion = "2018-11-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(expand) > 0 {
queryParameters["$expand"] = autorest.Encode("query", expand)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/providers/Microsoft.Billing/billingAccounts/{billingAccountName}/customers/{customerName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client CustomersClient) GetSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
return autorest.SendWithSender(client, req, sd...)
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client CustomersClient) GetResponder(resp *http.Response) (result Customer, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListByBillingAccountName lists all customers which the current user can work with on-behalf of a partner.
// Parameters:
// billingAccountName - billing Account Id.
// filter - may be used to filter using hasPermission('{permissionId}') to only return customers for which the
// caller has the specified permission.
// skiptoken - skiptoken is only used if a previous operation returned a partial result. If a previous response
// contains a nextLink element, the value of the nextLink element will include a skiptoken parameter that
// specifies a starting point to use for subsequent calls.
func (client CustomersClient) ListByBillingAccountName(ctx context.Context, billingAccountName string, filter string, skiptoken string) (result CustomerListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/CustomersClient.ListByBillingAccountName")
defer func() {
sc := -1
if result.clr.Response.Response != nil {
sc = result.clr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listByBillingAccountNameNextResults
req, err := client.ListByBillingAccountNamePreparer(ctx, billingAccountName, filter, skiptoken)
if err != nil {
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "ListByBillingAccountName", nil, "Failure preparing request")
return
}
resp, err := client.ListByBillingAccountNameSender(req)
if err != nil {
result.clr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "ListByBillingAccountName", resp, "Failure sending request")
return
}
result.clr, err = client.ListByBillingAccountNameResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "billing.CustomersClient", "ListByBillingAccountName", resp, "Failure responding to request")
}
return
}
// ListByBillingAccountNamePreparer prepares the ListByBillingAccountName request.
func (client CustomersClient) ListByBillingAccountNamePreparer(ctx context.Context, billingAccountName string, filter string, skiptoken string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"billingAccountName": autorest.Encode("path", billingAccountName),
}
const APIVersion = "2018-11-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(filter) > 0 {
queryParameters["$filter"] = autorest.Encode("query", filter)
}
if len(skiptoken) > 0 {
queryParameters["$skiptoken"] = autorest.Encode("query", skiptoken)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/providers/Microsoft.Billing/billingAccounts/{billingAccountName}/customers", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByBillingAccountNameSender sends the ListByBillingAccountName request. The method will close the
// http.Response Body if it receives an error.
func (client CustomersClient) ListByBillingAccountNameSender(req *http.Request) (*http.Response, error) {
sd := autorest.GetSendDecorators(req.Context(), autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
return autorest.SendWithSender(client, req, sd...)
}
// ListByBillingAccountNameResponder handles the response to the ListByBillingAccountName request. The method always
// closes the http.Response Body.
func (client CustomersClient) ListByBillingAccountNameResponder(resp *http.Response) (result CustomerListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByBillingAccountNameNextResults retrieves the next set of results, if any.
func (client CustomersClient) listByBillingAccountNameNextResults(ctx context.Context, lastResults CustomerListResult) (result CustomerListResult, err error) {
req, err := lastResults.customerListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "billing.CustomersClient", "listByBillingAccountNameNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByBillingAccountNameSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}<|fim▁hole|> err = autorest.NewErrorWithError(err, "billing.CustomersClient", "listByBillingAccountNameNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByBillingAccountNameComplete enumerates all values, automatically crossing page boundaries as required.
func (client CustomersClient) ListByBillingAccountNameComplete(ctx context.Context, billingAccountName string, filter string, skiptoken string) (result CustomerListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/CustomersClient.ListByBillingAccountName")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListByBillingAccountName(ctx, billingAccountName, filter, skiptoken)
return
}<|fim▁end|> | return result, autorest.NewErrorWithError(err, "billing.CustomersClient", "listByBillingAccountNameNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByBillingAccountNameResponder(resp)
if err != nil { |
<|file_name|>carbon-upgrade.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
/**
* Copyright IBM Corp. 2019, 2019
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
/* eslint-disable no-console */
'use strict';
// Makes the script crash on unhandled rejections instead of silently
// ignoring them. In the future, promise rejections that are not handled will<|fim▁hole|>process.on('unhandledRejection', (error) => {
console.error(error);
});
var chalk = require('chalk');
var currentNodeVersion = process.versions.node;
var semver = currentNodeVersion.split('.');
var major = semver[0];
if (major < 14) {
console.error(
chalk.red(
`You are running Node ${currentNodeVersion}.\n` +
`carbon-upgrade requires Node 14 or higher, please update your ` +
`version of Node.`
)
);
process.exit(1);
}
var main = require('../src/cli');
main(process).catch((error) => {
console.error(error);
process.exit(1);
});<|fim▁end|> | // terminate the Node.js process with a non-zero exit code. |
<|file_name|>test_base_module.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Albert SHENOUDA <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.addons.recurring_contract.tests.test_base_contract \
import test_base_contract
import logging
logger = logging.getLogger(__name__)
class test_base_module(test_base_contract):
def setUp(self):
# Retrieve of income account
super(test_base_module, self).setUp()
self.property_account_income = self.env['account.account'].search([
('type', '=', 'other'),
('name', '=', 'Property Account Income Test')]).ids[0]
self.property_account_expense = self.env['account.account'].search([
('type', '=', 'other'),
('name', '=', 'Property Account Expense Test')
]).ids[0]
# Retrieve and modification of products
product_obj = self.env['product.product']
self.product_sp = product_obj.search(
[('name', '=', 'Sponsorship')])
self.product_gf = product_obj.search(
[('name', '=', 'General Fund')])
self.product_bf = product_obj.search(
[('name', '=', 'Birthday Gift')])
self.product_fg = product_obj.search(
[('name', '=', 'Family Gift')])
if self.product_sp:
self.product_sp[0].write({
'property_account_income': self.property_account_income,
'property_account_expense': self.property_account_expense,
})
if self.product_gf:
self.product_gf[0].write({
'property_account_income': self.property_account_income,
'property_account_expense': self.property_account_expense,
})
if self.product_bf:
self.product_bf[0].write({
'property_account_income': self.property_account_income,
'property_account_expense': self.property_account_expense,
})
if self.product_fg:
self.product_fg[0].write({
'property_account_income': self.property_account_income,
'property_account_expense': self.property_account_expense,
})
# Add of account for id's 1 product
product = self.env['product.product'].browse(1)
product.property_account_income = self.property_account_income
def _pay_invoice(self, invoice):
bank_journal = self.env['account.journal'].search(
[('code', '=', 'TBNK')])[0]
move_obj = self.env['account.move']
move_line_obj = self.env['account.move.line']
account_id = invoice.partner_id.property_account_receivable.id
move = move_obj.create({
'journal_id': bank_journal.id
})
move_line_obj.create({
<|fim▁hole|> 'partner_id': invoice.partner_id.id,
'account_id': bank_journal.default_debit_account_id.id,
'debit': invoice.amount_total,
'journal_id': bank_journal.id,
'period_id': invoice.period_id.id,
'date': invoice.date_due
})
mv_line = move_line_obj.create({
'name': 'PAY-' + invoice.number,
'move_id': move.id,
'partner_id': invoice.partner_id.id,
'account_id': account_id,
'credit': invoice.amount_total,
'journal_id': invoice.journal_id.id,
'period_id': invoice.period_id.id,
'date': invoice.date_due
})
move.button_validate()
to_reconcile = move_line_obj.search([
('move_id', '=', invoice.move_id.id),
('account_id', '=', account_id)]) + mv_line
to_reconcile.reconcile()<|fim▁end|> | 'name': 'BNK-' + invoice.number,
'move_id': move.id,
|
<|file_name|>time_based_atom_grouper.py<|end_file_name|><|fim▁begin|>from collections import OrderedDict
from app.master.atom_grouper import AtomGrouper
class TimeBasedAtomGrouper(object):
"""
This class implements the algorithm to best split & group atoms based on historic time values. This algorithm is
somewhat complicated, so I'm going to give a summary here.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Let N be the number of concurrent executors allocated for this job.
Let T be the aggregate serial time to execute all atoms on a single executor.
Both N and T are known values at the beginning of this algorithm.
In the ideal subjob atom-grouping, we would have exactly N subjobs, each allocated with T/N amount of work that
would all end at the same time. However, in reality, there are a few factors that makes this solution unfeasible:
- There is a significant amount of variability in the times of running these atoms, so numbers are never exact.
- Certain builds will introduce new tests (for which we don't have historical time data for).
- Not all of the machines are exactly the same, so we can't expect identical performance.
We have two aims for this algorithm:
- Minimize the amount of framework overhead (time spent sending and retrieving subjobs) and maximize the amount of
time the slaves actually spend running the build.
- Don't overload any single executor with too much work--this will cause the whole build to wait on a single
executor. We want to try to get all of the executors to end as close to the same time as possible in order to
get rid of any inefficient use of slave machines.
In order to accomplish this, the algorithm implemented by this class tries to split up the majority of the atoms
into N buckets, and splits up the rest of the atoms into smaller buckets. Hopefully, the timeline graph of
executed subjobs for each of the executors would end up looking like this:
[========================================================================][===][==][==]
[===============================================================================][==]
[====================================================================][====][===][==][=]
[========================================================================][===][==][=]
[=====================================================================][====][==][==]
[==================================================================================][=]
[===================================================================][======][==][==]
The algorithm has two stages of subjob creation: the 'big chunk' stage and the 'small chunk' stage. The 'big chunk'
stage creates exactly N large subjob groupings that will consist of the majority of atoms (in terms of runtime).
The 'small chunk' stage creates ~2N short subjob groupings that will be used to fill in the gaps in order to aim for
having all of the executors end at similar times.
Notes:
- For new atoms that we don't have historic times for, we will assign it the highest atom time value in order to
avoid underestimating the length of unknown atoms.
- We will have to try tweaking the percentage of T that we want to be allocated for the initial large batch of
big subjobs. Same goes for the number and size of the smaller buckets.
"""
BIG_CHUNK_FRACTION = 0.8
def __init__(self, atoms, max_executors, atom_time_map, project_directory):
"""
:param atoms: the list of atoms for this build
:type atoms: list[app.master.atom.Atom]
:param max_executors: the maximum number of executors for this build
:type max_executors: int
:param atom_time_map: a dictionary containing the historic times for atoms for this particular job
:type atom_time_map: dict[str, float]
:type project_directory: str
"""
self._atoms = atoms
self._max_executors = max_executors
self._atom_time_map = atom_time_map
self._project_directory = project_directory
def groupings(self):
"""
Group the atoms into subjobs using historic timing data.
:return: a list of lists of atoms
:rtype: list[list[app.master.atom.Atom]]
"""
# 1). Coalesce the atoms with historic atom times, and also get total estimated runtime
try:
total_estimated_runtime = self._set_expected_atom_times(
self._atoms, self._atom_time_map, self._project_directory)
except _AtomTimingDataError:
grouper = AtomGrouper(self._atoms, self._max_executors)
return grouper.groupings()
# 2). Sort them by decreasing time, and add them to an OrderedDict
atoms_by_decreasing_time = sorted(self._atoms, key=lambda atom: atom.expected_time, reverse=True)
sorted_atom_times_left = OrderedDict([(atom, atom.expected_time) for atom in atoms_by_decreasing_time])
# 3). Group them!
# Calculate what the target 'big subjob' time is going to be for each executor's initial subjob
big_subjob_time = (total_estimated_runtime * self.BIG_CHUNK_FRACTION) / self._max_executors
# Calculate what the target 'small subjob' time is going to be
small_subjob_time = (total_estimated_runtime * (1.0 - self.BIG_CHUNK_FRACTION)) / (2 * self._max_executors)
# _group_atoms_into_sized_buckets() will remove elements from sorted_atom_times_left.
subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, big_subjob_time, self._max_executors)
small_subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, small_subjob_time, None)
subjobs.extend(small_subjobs)
return subjobs
def _set_expected_atom_times(self, new_atoms, old_atoms_with_times, project_directory):
"""
Set the expected runtime (new_atom.expected_time) of each atom in new_atoms using historic timing data.
Additionally, return the total estimated serial-runtime for this build. Although this seems like an odd thing
for this method to return, it is done here for efficiency. There can be thousands of atoms, and iterating
through them multiple times seems inefficient.
:param new_atoms: the list of atoms that will be run in this build
:type new_atoms: list[app.master.atom.Atom]
:param old_atoms_with_times: a dictionary containing the historic times for atoms for this particular job
:type old_atoms_with_times: dict[str, float]
:type project_directory: str
:return: the total estimated runtime in seconds
:rtype: float
"""
atoms_without_timing_data = []
total_time = 0
max_atom_time = 0
# Generate list for atoms that have timing data
for new_atom in new_atoms:
if new_atom.command_string not in old_atoms_with_times:
atoms_without_timing_data.append(new_atom)
continue
new_atom.expected_time = old_atoms_with_times[new_atom.command_string]
# Discover largest single atom time to use as conservative estimates for atoms with unknown times
if max_atom_time < new_atom.expected_time:
max_atom_time = new_atom.expected_time
# We want to return the atom with the project directory still in it, as this data will directly be
# sent to the slave to be run.
total_time += new_atom.expected_time
# For the atoms without historic timing data, assign them the largest atom time we have
for new_atom in atoms_without_timing_data:
new_atom.expected_time = max_atom_time
if len(new_atoms) == len(atoms_without_timing_data):
raise _AtomTimingDataError
total_time += (max_atom_time * len(atoms_without_timing_data))
return total_time
def _group_atoms_into_sized_buckets(self, sorted_atom_time_dict, target_group_time, max_groups_to_create):
"""
Given a sorted dictionary (Python FTW) of [atom, time] pairs in variable sorted_atom_time_dict, return a list
of lists of atoms that each are estimated to take target_group_time seconds. This method will generate at most
max_groups_to_create groupings, and will return once this limit is reached or when sorted_atom_time_dict is
empty.
Note, this method will modify sorted_atom_time_dict's state by removing elements as needed (often from the
middle of the collection).
:param sorted_atom_time_dict: the sorted (longest first), double-ended queue containing [atom, time] pairs.
This OrderedDict will have elements removed from this method.
:type sorted_atom_time_dict: OrderedDict[app.master.atom.Atom, float]
:param target_group_time: how long each subjob should approximately take
:type target_group_time: float
:param max_groups_to_create: the maximum number of subjobs to create. Once max_groups_to_create limit is
reached, this method will return the subjobs that have already been grouped. If set to None, then there
is no limit.
:type max_groups_to_create: int|None
:return: the groups of grouped atoms, with each group taking an estimated target_group_time
:rtype: list[list[app.master.atom.Atom]]
"""
subjobs = []
subjob_time_so_far = 0
subjob_atoms = []<|fim▁hole|>
while (max_groups_to_create is None or len(subjobs) < max_groups_to_create) and len(sorted_atom_time_dict) > 0:
for atom, time in sorted_atom_time_dict.items():
if len(subjob_atoms) == 0 or (time + subjob_time_so_far) <= target_group_time:
subjob_time_so_far += time
subjob_atoms.append(atom)
sorted_atom_time_dict.pop(atom)
# If (number of subjobs created so far + atoms left) is less than or equal to the total number of
# subjobs we need to create, then have each remaining atom be a subjob and return.
# The "+ 1" is here to account for the current subjob being generated, but that hasn't been
# appended to subjobs yet.
if max_groups_to_create is not None and (len(subjobs) + len(sorted_atom_time_dict) + 1) <= max_groups_to_create:
subjobs.append(subjob_atoms)
for atom, _ in sorted_atom_time_dict.items():
sorted_atom_time_dict.pop(atom)
subjobs.append([atom])
return subjobs
subjobs.append(subjob_atoms)
subjob_atoms = []
subjob_time_so_far = 0
return subjobs
class _AtomTimingDataError(Exception):
"""
An exception to represent the case where the atom timing data is either not present or incorrect.
"""<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import base64
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
BlobKey,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implementing provided
kind.
Args:
kind: Entity kind string.
Returns:
Class implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def query_descendants(model_instance):
"""Returns a query for all the descendants of a model instance.
Args:
model_instance: Model instance to find the descendants of.
Returns:
Query that will retrieve all entities that have the given model instance
as an ancestor. Unlike normal ancestor queries, this does not include the
ancestor itself.
"""
result = Query().ancestor(model_instance);
result.filter(datastore_types._KEY_SPECIAL_PROPERTY + ' >',
model_instance.key());
return result;
def model_to_protobuf(model_instance, _entity_class=datastore.Entity):
"""Encodes a model instance as a protocol buffer.
Args:
model_instance: Model instance to encode.
Returns:
entity_pb.EntityProto representation of the model instance
"""
return model_instance._populate_entity(_entity_class).ToPb()
def model_from_protobuf(pb, _entity_class=datastore.Entity):
"""Decodes a model instance from a protocol buffer.
Args:
pb: The protocol buffer representation of the model instance. Can be an
entity_pb.EntityProto or str encoding of an entity_bp.EntityProto
Returns:
Model instance resulting from decoding the protocol buffer
"""
entity = _entity_class.FromPb(pb)
return class_for_kind(entity.kind()).from_entity(entity)
def _initialize_properties(model_class, name, bases, dct):
"""Initialize Property attributes for Model-class.
Args:
model_class: Model class to initialize properties for.
"""
model_class._properties = {}
property_source = {}
def get_attr_source(name, cls):
for src_cls in cls.mro():
if name in src_cls.__dict__:
return src_cls
defined = set()
for base in bases:
if hasattr(base, '_properties'):
property_keys = set(base._properties.keys())
duplicate_property_keys = defined & property_keys
for dupe_prop_name in duplicate_property_keys:
old_source = property_source[dupe_prop_name] = get_attr_source(
dupe_prop_name, property_source[dupe_prop_name])
new_source = get_attr_source(dupe_prop_name, base)
if old_source != new_source:
raise DuplicatePropertyError(
'Duplicate property, %s, is inherited from both %s and %s.' %
(dupe_prop_name, old_source.__name__, new_source.__name__))
property_keys -= duplicate_property_keys
if property_keys:
defined |= property_keys
property_source.update(dict.fromkeys(property_keys, base))
model_class._properties.update(base._properties)
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Property):
check_reserved_word(attr_name)
if attr_name in defined:
raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
defined.add(attr_name)
model_class._properties[attr_name] = attr
attr.__property_config__(model_class, attr_name)
model_class._unindexed_properties = frozenset(
name for name, prop in model_class._properties.items() if not prop.indexed)
def _coerce_to_key(value):
"""Returns the value's key.
Args:
value: a Model or Key instance or string encoded key or None
Returns:
The corresponding key, or None if value is None.
"""
if value is None:
return None
value, multiple = datastore.NormalizeAndTypeCheck(
value, (Model, Key, basestring))
if len(value) > 1:
raise datastore_errors.BadArgumentError('Expected only one model or key')
value = value[0]
if isinstance(value, Model):
return value.key()
elif isinstance(value, basestring):
return Key(value)
else:
return value
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
Used for initializing Properties defined in the context of a model.
By using a meta-class much of the configuration of a Property
descriptor becomes implicit. By using this meta-class, descriptors
that are of class Model are notified about which class they
belong to and what attribute they are associated with and can
do appropriate initialization via __property_config__.
Duplicate properties are not permitted.
"""
def __init__(cls, name, bases, dct, map_kind=True):
"""Initializes a class that might have property definitions.
This method is called when a class is created with the PropertiedClass
meta-class.
Loads all properties for this model and its base classes in to a dictionary
for easy reflection via the 'properties' method.
Configures each property defined in the new class.
Duplicate properties, either defined in the new class or defined separately
in two base classes are not permitted.
Properties may not assigned to names which are in the list of
_RESERVED_WORDS. It is still possible to store a property using a reserved
word in the datastore by using the 'name' keyword argument to the Property
constructor.
Args:
cls: Class being initialized.
name: Name of new class.
bases: Base classes of new class.
dct: Dictionary of new definitions for class.
Raises:
DuplicatePropertyError when a property is duplicated either in the new
class or separately in two base classes.
ReservedWordError when a property is given a name that is in the list of
reserved words, attributes of Model and names of the form '__.*__'.
"""
super(PropertiedClass, cls).__init__(name, bases, dct)
_initialize_properties(cls, name, bases, dct)
if map_kind:
_kind_map[cls.kind()] = cls
class Property(object):
"""A Property is an attribute of a Model.
It defines the type of the attribute, which determines how it is stored
in the datastore and how the property values are validated. Different property
types support different options, which change validation rules, default
values, etc. The simplest example of a property is a StringProperty:
class Story(db.Model):
title = db.StringProperty()
"""
creation_counter = 0
def __init__(self,
verbose_name=None,
name=None,
default=None,
required=False,
validator=None,
choices=None,
indexed=True):
"""Initializes this Property with the given options.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
default: Default value for property if none is assigned.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
indexed: Whether property is indexed.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.indexed = indexed
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
"""Configure property, connecting it to its model.
Configure the property so that it knows its property name and what class
it belongs to.
Args:
model_class: Model class which Property will belong to.
property_name: Name of property within Model instance to store property
values in. By default this will be the property name preceded by
an underscore, but may change for different subclasses.
"""
self.model_class = model_class
if self.name is None:
self.name = property_name
def __get__(self, model_instance, model_class):
"""Returns the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean."""
if model_instance is None:
return self
try:
return getattr(model_instance, self._attr_name())
except AttributeError:
return None
def __set__(self, model_instance, value):
"""Sets the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean.
"""
value = self.validate(value)
setattr(model_instance, self._attr_name(), value)
def default_value(self):
"""Default value for unassigned values.
Returns:
Default value as provided by __init__(default).
"""
return self.default
def validate(self, value):
"""Assert that provided value is compatible with this property.
Args:
value: Value to validate against this Property.
Returns:
A valid value, either the input unchanged or adapted to the
required type.
Raises:
BadValueError if the value is not appropriate for this
property in any way.
"""
if self.empty(value):
if self.required:
raise BadValueError('Property %s is required' % self.name)
else:
if self.choices:
match = False
for choice in self.choices:
if choice == value:
match = True
if not match:
raise BadValueError('Property %s is %r; must be one of %r' %
(self.name, value, self.choices))
if self.validator is not None:
self.validator(value)
return value
def empty(self, value):
"""Determine if value is empty in the context of this property.
For most kinds, this is equivalent to "not value", but for kinds like
bool, the test is more subtle, so subclasses can override this method
if necessary.
Args:
value: Value to validate against this Property.
Returns:
True if this value is considered empty in the context of this Property
type, otherwise False.
"""
return not value
def get_value_for_datastore(self, model_instance):
"""Datastore representation of this property.
Looks for this property in the given model instance, and returns the proper
datastore representation of the value that can be stored in a datastore
entity. Most critically, it will fetch the datastore key value for
reference properties.
Args:
model_instance: Instance to fetch datastore value from.
Returns:
Datastore representation of the model value in a form that is
appropriate for storing in the datastore.
"""
return self.__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Native representation of this property.
Given a value retrieved from a datastore entity, return a value,
possibly converted, to be stored on the model instance. Usually
this returns the value unchanged, but a property class may
override this when it uses a different datatype on the model
instance than on the entity.
This API is not quite symmetric with get_value_for_datastore(),
because the model instance on which to store the converted value
may not exist yet -- we may be collecting values to be passed to a
model constructor.
Args:
value: value retrieved from the datastore entity.
Returns:
The value converted for use as a model instance attribute.
"""
return value
def _require_parameter(self, kwds, parameter, value):
"""Sets kwds[parameter] to value.
If kwds[parameter] exists and is not value, raises ConfigurationError.
Args:
kwds: The parameter dict, which maps parameter names (strings) to values.
parameter: The name of the parameter to set.
value: The value to set it to.
"""
if parameter in kwds and kwds[parameter] != value:
raise ConfigurationError('%s must be %s.' % (parameter, value))
kwds[parameter] = value
def _attr_name(self):
"""Attribute name we use for this property in model instances.
DO NOT USE THIS METHOD.
"""
return '_' + self.name
data_type = str
def datastore_type(self):
"""Deprecated backwards-compatible accessor method for self.data_type."""
return self.data_type
class Model(object):
"""Model is the superclass of all object entities in the datastore.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want
to publish a story with title, body, and created date, you would do it like
this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
A model instance can have a single parent. Model instances without any
parent are root entities. It is possible to efficiently query for
instances by their shared parent. All descendents of a single root
instance also behave as a transaction group. This means that when you
work one member of the group within a transaction all descendents of that
root join the transaction. All operations within a transaction on this
group are ACID.
"""
__metaclass__ = PropertiedClass
def __init__(self,
parent=None,
key_name=None,
_app=None,
_from_entity=False,
**kwds):
"""Creates a new instance of this model.
To create a new entity, you instantiate a model and then call put(),
which saves the entity to the datastore:
person = Person()
person.name = 'Bret'
person.put()
You can initialize properties in the model in the constructor with keyword
arguments:
person = Person(name='Bret')
We initialize all other properties to the default value (as defined by the
properties in the model definition) if they are not provided in the
constructor.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_from_entity: Intentionally undocumented.
kwds: Keyword arguments mapping to properties of model. Also:
key: Key instance for this instance, if provided makes parent and
key_name redundant (they do not need to be set but if they are
they must match the key).
"""
key = kwds.get('key', None)
if key is not None:
if isinstance(key, (tuple, list)):
key = Key.from_path(*key)
if isinstance(key, basestring):
key = Key(encoded=key)
if not isinstance(key, Key):
raise TypeError('Expected Key type; received %s (is %s)' %
(key, key.__class__.__name__))
if not key.has_id_or_name():
raise BadKeyError('Key must have an id or name')
if key.kind() != self.kind():
raise BadKeyError('Expected Key kind to be %s; received %s' %
(self.kind(), key.kind()))
if _app is not None and key.app() != _app:
raise BadKeyError('Expected Key app to be %s; received %s' %
(_app, key.app()))
if key_name and key_name != key.name():
raise BadArgumentError('Cannot use key and key_name at the same time'
' with different values')
if parent and parent != key.parent():
raise BadArgumentError('Cannot use key and parent at the same time'
' with different values')
self._key = key
self._key_name = None
self._parent = None
self._parent_key = None
else:
if key_name == '':
raise BadKeyError('Name cannot be empty.')
elif key_name is not None and not isinstance(key_name, basestring):
raise BadKeyError('Name must be string type, not %s' %
key_name.__class__.__name__)
if parent is not None:
if not isinstance(parent, (Model, Key)):
raise TypeError('Expected Model type; received %s (is %s)' %
(parent, parent.__class__.__name__))
if isinstance(parent, Model) and not parent.has_key():
raise BadValueError(
"%s instance must have a complete key before it can be used as a "
"parent." % parent.kind())
if isinstance(parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = parent.key()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._key_name = key_name
self._key = None
self._entity = None
if _app is not None and isinstance(_app, Key):
raise BadArgumentError('_app should be a string; received Key(\'%s\'):\n'
' This may be the result of passing \'key\' as '
'a positional parameter in SDK 1.2.6. Please '
'only pass \'key\' as a keyword parameter.' % _app)
self._app = _app
for prop in self.properties().values():
if prop.name in kwds:
value = kwds[prop.name]
else:
value = prop.default_value()
try:
prop.__set__(self, value)
except DerivedPropertyError, e:
if prop.name in kwds and not _from_entity:
raise
def key(self):
"""Unique key for this entity.
This property is only available if this entity is already stored in the
datastore or if it has a full key, so it is available if this entity was
fetched returned from a query, or after put() is called the first time
for new entities, or if a complete key was given when constructed.
Returns:
Datastore key of persisted entity.
Raises:
NotSavedError when entity is not persistent.
"""
if self.is_saved():
return self._entity.key()
elif self._key:
return self._key
elif self._key_name:
parent = self._parent_key or (self._parent and self._parent.key())
self._key = Key.from_path(self.kind(), self._key_name, parent=parent)
return self._key
else:
raise NotSavedError()
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
Args:
entity: Entity to save information on.
"""
for prop in self.properties().values():
datastore_value = prop.get_value_for_datastore(self)
if datastore_value == []:
try:
del entity[prop.name]
except KeyError:
pass
else:
entity[prop.name] = datastore_value
entity.set_unindexed_properties(self._unindexed_properties)
def _populate_internal_entity(self, _entity_class=datastore.Entity):
"""Populates self._entity, saving its state to the datastore.
After this method is called, calling is_saved() will return True.
Returns:
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
for attr in ('_key_name', '_key'):
try:
delattr(self, attr)
except AttributeError:
pass
return self._entity
def put(self, **kwargs):
"""Writes this model instance to the datastore.
If this instance is new, we add an entity to the datastore.
Otherwise, we update this instance, and the key will remain the
same.
Returns:
The key of the instance (either the existing key or a new key).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
self._populate_internal_entity()
return datastore.Put(self._entity, rpc=rpc)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
"""Internal helper -- Populate self._entity or create a new one
if that one does not exist. Does not change any state of the instance
other than the internal state of the entity.
This method is separate from _populate_internal_entity so that it is
possible to call to_xml without changing the state of an unsaved entity
to saved.
Returns:
self._entity or a new Entity which is not stored on the instance.
"""
if self.is_saved():
entity = self._entity
else:
kwds = {'_app': self._app,
'unindexed_properties': self._unindexed_properties}
if self._key is not None:
if self._key.id():
kwds['id'] = self._key.id()
else:
kwds['name'] = self._key.name()
if self._key.parent():
kwds['parent'] = self._key.parent()
else:
if self._key_name is not None:
kwds['name'] = self._key_name
if self._parent_key is not None:
kwds['parent'] = self._parent_key
elif self._parent is not None:
kwds['parent'] = self._parent._entity
entity = _entity_class(self.kind(), **kwds)
self._to_entity(entity)
return entity
def delete(self, **kwargs):
"""Deletes this entity from the datastore.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
datastore.Delete(self.key(), rpc=rpc)
self._key = self.key()
self._key_name = None
self._parent_key = None
self._entity = None
def is_saved(self):
"""Determine if entity is persisted in the datastore.
New instances of Model do not start out saved in the data. Objects which
are saved to or loaded from the Datastore will have a True saved state.
Returns:
True if object has been persisted to the datastore, otherwise False.
"""
return self._entity is not None
def has_key(self):
"""Determine if this model instance has a complete key.
When not using a fully self-assigned Key, ids are not assigned until the
data is saved to the Datastore, but instances with a key name always have
a full key.
Returns:
True if the object has been persisted to the datastore or has a key
or has a key_name, otherwise False.
"""
return self.is_saved() or self._key or self._key_name
def dynamic_properties(self):
"""Returns a list of all dynamic properties defined for instance."""
return []
def instance_properties(self):
"""Alias for dyanmic_properties."""
return self.dynamic_properties()
def parent(self):
"""Get the parent of the model instance.
Returns:
Parent of contained entity or parent provided in constructor, None if
instance has no parent.
"""
if self._parent is None:
parent_key = self.parent_key()
if parent_key is not None:
self._parent = get(parent_key)
return self._parent
def parent_key(self):
"""Get the parent's key.
This method is useful for avoiding a potential fetch from the datastore
but still get information about the instances parent.
Returns:
Parent key of entity, None if there is no parent.
"""
if self._parent_key is not None:
return self._parent_key
elif self._parent is not None:
return self._parent.key()
elif self._entity is not None:
return self._entity.parent()
elif self._key is not None:
return self._key.parent()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
"""Generate an XML representation of this model instance.
atom and gd:namespace properties are converted to XML according to their
respective schemas. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
"""
entity = self._populate_entity(_entity_class)
return entity.ToXml()
@classmethod
def get(cls, keys, **kwargs):
"""Fetch instance from the datastore of a specific Model type using key.
We support Key objects and string keys (we convert them to Key objects
automatically).
Useful for ensuring that specific instance types are retrieved from the
datastore. It also helps that the source code clearly indicates what
kind of object is being retreived. Example:
story = Story.get(story_key)
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for provided class if it exists in the datastore, otherwise
None; if a list of keys was given: a list whose items are either
a Model instance or None.
Raises:
KindError if any of the retreived objects are not instances of the
type associated with call to 'get'.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = get(keys, rpc=rpc)
if results is None:
return None
if isinstance(results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if not(instance is None or isinstance(instance, cls)):
raise KindError('Kind %r is not a subclass of kind %r' %
(instance.kind(), cls.kind()))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None, **kwargs):
"""Get instance of Model class by its key's name.
Args:
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
try:
parent = _coerce_to_key(parent)
except BadKeyError, e:
raise BadArgumentError(str(e))
rpc = datastore.GetRpcFromKwargs(kwargs)
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_by_id(cls, ids, parent=None, **kwargs):
"""Get instance of Model class by id.
Args:
key_names: A single id or a list of ids.
parent: Parent of instances to get. Can be a model or key.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if isinstance(parent, Model):
parent = parent.key()
ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
for id in ids]
if multiple:
return get(keys, rpc=rpc)
else:
return get(keys[0], rpc=rpc)
@classmethod
def get_or_insert(cls, key_name, **kwds):
"""Transactionally retrieve or create an instance of Model class.
This acts much like the Python dictionary setdefault() method, where we
first try to retrieve a Model instance with the given key name and parent.
If it's not present, then we create a new instance (using the *kwds
supplied) and insert that with the supplied key name.
Subsequent calls to this method with the same key_name and parent will
always yield the same entity (though not the same actual object instance),
regardless of the *kwds supplied. If the specified entity has somehow
been deleted separately, then the next call will create a new entity and
return it.
If the 'parent' keyword argument is supplied, it must be a Model instance.
It will be used as the parent of the new instance of this Model class if
one is created.
This method is especially useful for having just one unique entity for
a specific identifier. Insertion/retrieval is done transactionally, which
guarantees uniqueness.
Example usage:
class WikiTopic(db.Model):
creation_date = db.DatetimeProperty(auto_now_add=True)
body = db.TextProperty(required=True)
# The first time through we'll create the new topic.
wiki_word = 'CommonIdioms'
topic = WikiTopic.get_or_insert(wiki_word,
body='This topic is totally new!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
# The second time through will just retrieve the entity.
overwrite_topic = WikiTopic.get_or_insert(wiki_word,
body='A totally different message!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
Args:
key_name: Key name to retrieve or create.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists, the
rest of these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key_name and parent
or a new one that has just been created.
Raises:
TransactionFailedError if the specified Model instance could not be
retrieved or created transactionally (due to high contention, etc).
"""
def txn():
entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwds)
entity.put()
return entity
return run_in_transaction(txn)
@classmethod
def all(cls, **kwds):
"""Returns a query over all instances of this model from the datastore.
Returns:
Query that will retrieve all instances from entity collection.
"""
return Query(cls, **kwds)
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a query using GQL query string.
See appengine/ext/gql for more information about GQL.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
*args, **kwds)
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Loads attributes which are not defined as part of the entity in
to the model instance.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = {}
for prop in cls.properties().values():
if prop.name in entity:
try:
value = prop.make_value_from_datastore(entity[prop.name])
entity_values[prop.name] = value
except KeyError:
entity_values[prop.name] = []
return entity_values
@classmethod
def from_entity(cls, entity):
"""Converts the entity representation of this model to an instance.
Converts datastore.Entity instance to an instance of cls.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when cls is incorrect model for entity.
"""
if cls.kind() != entity.kind():
raise KindError('Class %s cannot handle kind \'%s\'' %
(repr(cls), entity.kind()))
entity_values = cls._load_entity_values(entity)
if entity.key().has_id_or_name():
entity_values['key'] = entity.key()
instance = cls(None, _from_entity=True, **entity_values)
if entity.is_saved():
instance._entity = entity
del instance._key_name
del instance._key
return instance
@classmethod
def kind(cls):
"""Returns the datastore kind we use for this model.
We just use the name of the model for now, ignoring potential collisions.
"""
return cls.__name__
@classmethod
def entity_type(cls):
"""Soon to be removed alias for kind."""
return cls.kind()
@classmethod
def properties(cls):
"""Returns a dictionary of all the properties defined for this model."""
return dict(cls._properties)
@classmethod
def fields(cls):
"""Soon to be removed alias for properties."""
return cls.properties()
def create_rpc(deadline=None, callback=None, read_policy=STRONG_CONSISTENCY):
"""Create an rpc for use in configuring datastore calls.
Args:
deadline: float, deadline for calls in seconds.
callback: callable, a callback triggered when this rpc completes,
accepts one argument: the returned rpc.
read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
consistent reads
Returns:
A datastore.DatastoreRPC instance.
"""
return datastore.CreateRPC(
deadline=deadline, callback=callback, read_policy=read_policy)
def get(keys, **kwargs):
"""Fetch the specific Model instance with the given key from the datastore.
We support Key objects and string keys (we convert them to Key objects
automatically).
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for if it exists in the datastore, otherwise None; if a list of
keys was given: a list whose items are either a Model instance or
None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
try:
entities = datastore.Get(keys, rpc=rpc)
except datastore_errors.EntityNotFoundError:
assert not multiple
return None
models = []
for entity in entities:
if entity is None:
model = None
else:
cls1 = class_for_kind(entity.kind())
model = cls1.from_entity(entity)
models.append(model)
if multiple:
return models
assert len(models) == 1
return models[0]
def put(models, **kwargs):
"""Store one or more Model instances.
Args:
models: Model instance or list of Model instances.
Returns:
A Key or a list of Keys (corresponding to the argument's plurality).
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
keys = datastore.Put(entities, rpc=rpc)
if multiple:
return keys
assert len(keys) == 1
return keys[0]
save = put
def delete(models, **kwargs):
"""Delete one or more Model instances.
Args:
models_or_keys: Model instance or list of Model instances.
Raises:
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
if not isinstance(models, (list, tuple)):
models = [models]
keys = [_coerce_to_key(v) for v in models]
datastore.Delete(keys, rpc=rpc)
def allocate_ids(model, size, **kwargs):
"""Allocates a range of IDs of size for the model_key defined by model.
Allocates a range of IDs in the datastore such that those IDs will not
be automatically assigned to new entities. You can only allocate IDs
for model keys from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
model: Model instance, Key or string to serve as a template specifying the
ID sequence in which to allocate IDs. Returned ids should only be used
in entities with the same parent (if any) and kind as this key.
Returns:
(start, end) of the allocated range, inclusive.
"""
return datastore.AllocateIds(_coerce_to_key(model), size, **kwargs)
class Expando(Model):
"""Dynamically expandable model.
An Expando does not require (but can still benefit from) the definition
of any properties before it can be used to store information in the
datastore. Properties can be added to an expando object by simply
performing an assignment. The assignment of properties is done on
an instance by instance basis, so it is possible for one object of an
expando type to have different properties from another or even the same
properties with different types. It is still possible to define
properties on an expando, allowing those properties to behave the same
as on any other model.
Example:
import datetime
class Song(db.Expando):
title = db.StringProperty()
crazy = Song(title='Crazy like a diamond',
author='Lucy Sky',
publish_date='yesterday',
rating=5.0)
hoboken = Song(title='The man from Hoboken',
author=['Anthony', 'Lou'],
publish_date=datetime.datetime(1977, 5, 3))
crazy.last_minute_note=db.Text('Get a train to the station.')
Possible Uses:
One use of an expando is to create an object without any specific
structure and later, when your application mature and it in the right
state, change it to a normal model object and define explicit properties.
Additional exceptions for expando:
Protected attributes (ones whose names begin with '_') cannot be used
as dynamic properties. These are names that are reserved for protected
transient (non-persisted) attributes.
Order of lookup:
When trying to set or access an attribute value, any other defined
properties, such as methods and other values in __dict__ take precedence
over values in the datastore.
1 - Because it is not possible for the datastore to know what kind of
property to store on an undefined expando value, setting a property to
None is the same as deleting it from the expando.
2 - Persistent variables on Expando must not begin with '_'. These
variables considered to be 'protected' in Python, and are used
internally.
3 - Expando's dynamic properties are not able to store empty lists.
Attempting to assign an empty list to a dynamic property will raise
ValueError. Static properties on Expando can still support empty
lists but like normal Model properties is restricted from using
None.
"""
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
"""Creates a new instance of this expando model.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
if prop not in self.properties() and prop != 'key':
setattr(self, prop, value)
def __setattr__(self, key, value):
"""Dynamically set field values that are not defined.
Tries to set the value on the object normally, but failing that
sets the value on the contained entity.
Args:
key: Name of attribute.
value: Value to set for attribute. Must be compatible with
datastore.
Raises:
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
if (key[:1] != '_' and
not hasattr(getattr(type(self), key, None), '__set__')):
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
raise TypeError("Expando cannot accept values of type '%s'." %
type(value).__name__)
if self._dynamic_properties is None:
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
super(Expando, self).__setattr__(key, value)
def __getattribute__(self, key):
"""Get attribute from expando.
Must be overridden to allow dynamic properties to obscure class attributes.
Since all attributes are stored in self._dynamic_properties, the normal
__getattribute__ does not attempt to access it until __setattr__ is called.
By then, the static attribute being overwritten has already been located
and returned from the call.
This method short circuits the usual __getattribute__ call when finding a
dynamic property and returns it to the user via __getattr__. __getattr__
is called to preserve backward compatibility with older Expando models
that may have overridden the original __getattr__.
NOTE: Access to properties defined by Python descriptors are not obscured
because setting those attributes are done through the descriptor and does
not place those attributes in self._dynamic_properties.
"""
if not key.startswith('_'):
dynamic_properties = self._dynamic_properties
if dynamic_properties is not None and key in dynamic_properties:
return self.__getattr__(key)
return super(Expando, self).__getattribute__(key)
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
Tries to get the value on the object normally, but failing that
retrieves value from contained entity.
Args:
key: Name of attribute.
Raises:
AttributeError when there is no attribute for key on object or
contained entity.
"""
_dynamic_properties = self._dynamic_properties
if _dynamic_properties is not None and key in _dynamic_properties:
return _dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
def __delattr__(self, key):
"""Remove attribute from expando.
Expando is not like normal entities in that undefined fields
can be removed.
Args:
key: Dynamic property to be deleted.
"""
if self._dynamic_properties and key in self._dynamic_properties:
del self._dynamic_properties[key]
else:
object.__delattr__(self, key)
def dynamic_properties(self):
"""Determine which properties are particular to instance of entity.
Returns:
Set of names which correspond only to the dynamic properties.
"""
if self._dynamic_properties is None:
return []
return self._dynamic_properties.keys()
def _to_entity(self, entity):
"""Store to entity, deleting dynamic properties that no longer exist.
When the expando is saved, it is possible that a given property no longer
exists. In this case, the property will be removed from the saved instance.
Args:
entity: Entity which will receive dynamic properties.
"""
super(Expando, self)._to_entity(entity)
if self._dynamic_properties is None:
self._dynamic_properties = {}
for key, value in self._dynamic_properties.iteritems():
entity[key] = value
all_properties = set(self._dynamic_properties.iterkeys())
all_properties.update(self.properties().iterkeys())
for key in entity.keys():
if key not in all_properties:
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Expando needs to do a second pass to add the entity values which were
ignored by Model because they didn't have an corresponding predefined
property on the model.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = super(Expando, cls)._load_entity_values(entity)
for key, value in entity.iteritems():
if key not in entity_values:
entity_values[str(key)] = value
return entity_values
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
_compile = False
def __init__(self, model_class=None, keys_only=False, compile=True,
cursor=None):
"""Constructor.
Args:
model_class: Model class from which entities are constructed.
keys_only: Whether the query should return full entities or only keys.
compile: Whether the query should also return a compiled query.
cursor: A compiled query from which to resume.
"""
self._model_class = model_class
self._keys_only = keys_only
self._compile = compile
self.with_cursor(cursor)
def is_keys_only(self):
"""Returns whether this query is keys only.
Returns:
True if this query returns keys, False if it returns entities.
"""
return self._keys_only
def _get_query(self):
"""Subclass must override (and not call their super method).
Returns:
A datastore.Query instance representing the query.
"""
raise NotImplementedError
def run(self, **kwargs):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
iterator = raw_query.Run(rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return iterator
else:
return _QueryIterator(self._model_class, iter(iterator))
def __iter__(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
"""
return self.run()
def get(self, **kwargs):
"""Get first result from this.
Beware: get() ignores the LIMIT clause on GQL queries.
Returns:
First result from running the query if there are any, else None.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
results = self.fetch(1, rpc=rpc)
try:
return results[0]
except IndexError:
return None
def count(self, limit=None, **kwargs):
"""Number of entities this query fetches.
Beware: count() ignores the LIMIT clause on GQL queries.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
Number of entities this query fetches.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
raw_query = self._get_query()
result = raw_query.Count(limit=limit, rpc=rpc)
self._last_raw_query = None
return result
def fetch(self, limit, offset=0, **kwargs):
"""Return a list of items selected using SQL-like limit and offset.
Whenever possible, use fetch() instead of iterating over the query
results with run() or __iter__() . fetch() is more efficient.
Beware: fetch() ignores the LIMIT clause on GQL queries.
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
rpc: datastore.DatastoreRPC to use for this request.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
results if there aren't enough results to satisfy the request.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
accepted = (int, long)
if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
raise TypeError('Arguments to fetch() must be integers')
if limit < 0 or offset < 0:
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
raw_query = self._get_query()
raw = raw_query.Get(limit, offset, rpc=rpc)
if self._compile:
self._last_raw_query = raw_query
if self._keys_only:
return raw
else:
if self._model_class is not None:
return [self._model_class.from_entity(e) for e in raw]
else:
return [class_for_kind(e.kind()).from_entity(e) for e in raw]
def cursor(self):
"""Get a serialized cursor for an already executed query.
The returned cursor effectively lets a future invocation of a similar
query to begin fetching results immediately after the last returned
result from this query invocation.
Returns:
A base64-encoded serialized cursor.
"""
if not self._compile:
raise AssertionError(
'Query must be created with compile=True to produce cursors')
try:
return base64.urlsafe_b64encode(
self._last_raw_query.GetCompiledCursor().Encode())
except AttributeError:
raise AssertionError('No cursor available.')
def with_cursor(self, cursor):
"""Set the start of this query to the given serialized cursor.
When executed, this query will start from the next result for a previous
invocation of a similar query.
Returns:
This Query instance, for chaining.
"""
if not cursor:
cursor = None
elif not isinstance(cursor, basestring):
raise BadValueError(
'Cursor must be a str or unicode instance, not a %s'
% type(cursor).__name__)
else:
cursor = str(cursor)
try:
decoded = base64.urlsafe_b64decode(cursor)
cursor = datastore_pb.CompiledCursor(decoded)
except (ValueError, TypeError), e:
raise datastore_errors.BadValueError(
'Invalid cursor %s. Details: %s' % (cursor, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadValueError('Invalid cursor %s. '
'Details: %s' % (cursor, e))
else:
raise
self._cursor = cursor
return self
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
Beware: this ignores the LIMIT clause on GQL queries.
Args:
arg: Either a single integer, corresponding to the query[index]
syntax, or a Python slice object, corresponding to the
query[start:stop] or query[start:stop:step] syntax.
Returns:
A single Model instance when the argument is a single integer.
A list of Model instances when the argument is a slice.
"""
if isinstance(arg, slice):
start, stop, step = arg.start, arg.stop, arg.step
if start is None:
start = 0
if stop is None:
raise ValueError('Open-ended slices are not supported')
if step is None:
step = 1
if start < 0 or stop < 0 or step != 1:
raise ValueError(
'Only slices with start>=0, stop>=0, step==1 are supported')
limit = stop - start
if limit < 0:
return []
return self.fetch(limit, start)
elif isinstance(arg, (int, long)):
if arg < 0:
raise ValueError('Only indices >= 0 are supported')
results = self.fetch(1, arg)
if results:
return results[0]
else:
raise IndexError('The query returned fewer than %d results' % (arg+1))
else:
raise TypeError('Only integer indices and slices are supported')
class _QueryIterator(object):
"""Wraps the datastore iterator to return Model instances.
The datastore returns entities. We wrap the datastore iterator to
return Model instances instead.
"""
def __init__(self, model_class, datastore_iterator):
"""Iterator constructor
Args:
model_class: Model class from which entities are constructed.
datastore_iterator: Underlying datastore iterator.
"""
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
"""Iterator on self.
Returns:
Self.
"""
return self
def next(self):
"""Get next Model instance in query results.
Returns:
Next model instance.
Raises:
StopIteration when there are no more results in query.
"""
if self.__model_class is not None:
return self.__model_class.from_entity(self.__iterator.next())
else:
entity = self.__iterator.next()
return class_for_kind(entity.kind()).from_entity(entity)
def _normalize_query_parameter(value):
"""Make any necessary type conversions to a query parameter.
The following conversions are made:
- Model instances are converted to Key instances. This is necessary so
that querying reference properties will work.
- datetime.date objects are converted to datetime.datetime objects (see
_date_to_datetime for details on this conversion). This is necessary so
that querying date properties with date objects will work.
- datetime.time objects are converted to datetime.datetime objects (see
_time_to_datetime for details on this conversion). This is necessary so
that querying time properties with time objects will work.
Args:
value: The query parameter value.
Returns:
The input value, or a converted value if value matches one of the
conversions specified above.
"""
if isinstance(value, Model):
value = value.key()
if (isinstance(value, datetime.date) and
not isinstance(value, datetime.datetime)):
value = _date_to_datetime(value)
elif isinstance(value, datetime.time):
value = _time_to_datetime(value)
return value
class Query(_BaseQuery):
"""A Query instance queries over instances of Models.
You construct a query with a model class, like this:
class Story(db.Model):
title = db.StringProperty()
date = db.DateTimeProperty()
query = Query(Story)
You modify a query with filters and orders like this:
query.filter('title =', 'Foo')
query.order('-date')
query.ancestor(key_or_model_instance)
Every query can return an iterator, so you access the results of a query
by iterating over it:
for story in query:
print story.title
For convenience, all of the filtering and ordering methods return "self",
so the easiest way to use the query interface is to cascade all filters and
orders in the iterator line like this:
for story in Query(story).filter('title =', 'Foo').order('-date'):
print story.title
"""
def __init__(self, model_class=None, keys_only=False, cursor=None):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
keys_only: Whether the query should return full entities or only keys.
cursor: A compiled query from which to resume.
"""
super(Query, self).__init__(model_class, keys_only, cursor=cursor)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
def _get_query(self,
_query_class=datastore.Query,
_multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
if self._model_class is not None:
kind = self._model_class.kind()
else:
kind = None
query = _query_class(kind,
query_set,
keys_only=self._keys_only,
compile=self._compile,
cursor=self._cursor)
query.Order(*self.__orderings)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
queries.append(query)
if (_query_class != datastore.Query and
_multi_query_class == datastore.MultiQuery):
warnings.warn(
'Custom _query_class specified without corresponding custom'
' _query_multi_class. Things will break if you use queries with'
' the "IN" or "!=" operators.', RuntimeWarning)
if len(queries) > 1:
raise datastore_errors.BadArgumentError(
'Query requires multiple subqueries to satisfy. If _query_class'
' is overridden, _multi_query_class must also be overridden.')
elif (_query_class == datastore.Query and
_multi_query_class != datastore.MultiQuery):
raise BadArgumentError('_query_class must also be overridden if'
' _multi_query_class is overridden.')
if len(queries) == 1:
return queries[0]
else:
return _multi_query_class(queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"""Add a disjunction of several filters and several values to the query.
This is implemented by duplicating queries and combining the
results later.
Args:
operations: a string or list of strings. Each string contains a
property name and an operator to filter by. The operators
themselves must not require multiple queries to evaluate
(currently, this means that 'in' and '!=' are invalid).
values: a value or list of filter values, normalized by
_normalize_query_parameter.
"""
if not isinstance(operations, (list, tuple)):
operations = [operations]
if not isinstance(values, (list, tuple)):
values = [values]
new_query_sets = []
for operation in operations:
if operation.lower().endswith('in') or operation.endswith('!='):
raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = copy.deepcopy(query_set)
datastore._AddOrAppend(new_query_set, operation, value)
new_query_sets.append(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
"""Add filter to query.
Args:
property_operator: string with the property and operator to filter by.
value: the filter value.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
match = _FILTER_REGEX.match(property_operator)
prop = match.group(1)
if match.group(3) is not None:
operator = match.group(3)
else:
operator = '=='
if self._model_class is None:
if prop != datastore_types._KEY_SPECIAL_PROPERTY:
raise BadQueryError(
'Only %s filters are allowed on kindless queries.' %
datastore_types._KEY_SPECIAL_PROPERTY)
elif prop in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % prop)
if operator.lower() == 'in':
if self._keys_only:
raise BadQueryError('Keys only queries do not support IN filters.')
elif not isinstance(value, (list, tuple)):
raise BadValueError('Argument to the "in" operator must be a list')
values = [_normalize_query_parameter(v) for v in value]
self.__filter_disjunction(prop + ' =', values)
else:
if isinstance(value, (list, tuple)):
raise BadValueError('Filtering on lists is not supported')
if operator == '!=':
if self._keys_only:
raise BadQueryError('Keys only queries do not support != filters.')
self.__filter_disjunction([prop + ' <', prop + ' >'],
_normalize_query_parameter(value))
else:
value = _normalize_query_parameter(value)
for query_set in self.__query_sets:
datastore._AddOrAppend(query_set, property_operator, value)
return self
def order(self, property):
"""Set order of query result.
To use descending order, prepend '-' (minus) to the property
name, e.g., '-date' rather than 'date'.
Args:
property: Property to sort on.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property is provided.
"""
if property.startswith('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if self._model_class is None:
if (property != datastore_types._KEY_SPECIAL_PROPERTY or
order != datastore.Query.ASCENDING):
raise BadQueryError(
'Only %s ascending orders are supported on kindless queries' %
datastore_types._KEY_SPECIAL_PROPERTY)
else:
if not issubclass(self._model_class, Expando):
if (property not in self._model_class.properties() and
property not in datastore_types._SPECIAL_PROPERTIES):
raise PropertyError('Invalid property name \'%s\'' % property)
if property in self._model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.__orderings.append((property, order))
return self
def ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return results that descend from
a given model instance. In other words, all of the results will
have the ancestor as their parent, or parent's parent, etc. The
ancestor itself is also a possible result!
Args:
ancestor: Model or Key (that has already been saved)
Returns:
Self to support method chaining.
Raises:
TypeError if the argument isn't a Key or Model; NotSavedError
if it is, but isn't saved yet.
"""
if isinstance(ancestor, datastore.Key):
if ancestor.has_id_or_name():
self.__ancestor = ancestor
else:
raise NotSavedError()
elif isinstance(ancestor, Model):
if ancestor.has_key():
self.__ancestor = ancestor.key()
else:
raise NotSavedError()
else:
raise TypeError('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
"""A Query class that uses GQL query syntax instead of .filter() etc."""
def __init__(self, query_string, *args, **kwds):
"""Constructor.
Args:
query_string: Properly formatted GQL query string.
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
Raises:
PropertyError if the query filters or sorts on a property that's not
indexed.
"""
from google.appengine.ext import gql
app = kwds.pop('_app', None)
self._proto_query = gql.GQL(query_string, _app=app)
if self._proto_query._entity is not None:
model_class = class_for_kind(self._proto_query._entity)
else:
model_class = None
super(GqlQuery, self).__init__(model_class,
keys_only=self._proto_query._keys_only)
if model_class is not None:
for property, unused in (self._proto_query.filters().keys() +
self._proto_query.orderings()):
if property in model_class._unindexed_properties:
raise PropertyError('Property \'%s\' is not indexed' % property)
self.bind(*args, **kwds)
def bind(self, *args, **kwds):
"""Bind arguments (positional or keyword) to the query.
Note that you can also pass arguments directly to the query
constructor. Each time you call bind() the previous set of
arguments is replaced with the new set. This is useful because
the hard work in in parsing the query; so if you expect to be
using the same query with different sets of arguments, you should
hold on to the GqlQuery() object and call bind() on it each time.
Args:
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
self._args = []
for arg in args:
self._args.append(_normalize_query_parameter(arg))
self._kwds = {}
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
def run(self, **kwargs):
"""Iterator for this query that handles the LIMIT clause property.
If the GQL query string contains a LIMIT clause, this function fetches
all results before returning an iterator. Otherwise results are retrieved
in batches by the iterator.
Args:
rpc: datastore.DatastoreRPC to use for this request.
Returns:
Iterator for this query.
"""
if self._proto_query.limit() >= 0:
return iter(self.fetch(limit=self._proto_query.limit(),
offset=self._proto_query.offset(),
**kwargs))
else:
results = _BaseQuery.run(self, **kwargs)
try:
for _ in xrange(self._proto_query.offset()):
results.next()
except StopIteration:
pass
return results
def _get_query(self):
return self._proto_query.Bind(self._args, self._kwds, self._cursor)
class UnindexedProperty(Property):
"""A property that isn't indexed by either built-in or composite indices.
TextProperty and BlobProperty derive from this class.
"""
def __init__(self, *args, **kwds):
"""Construct property. See the Property class for details.
Raises:
ConfigurationError if indexed=True.
"""
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
super(UnindexedProperty, self).__init__(*args, **kwds)
def validate(self, value):
"""Validate property.
Returns:
A valid value.
Raises:
BadValueError if property is not an instance of data_type.
"""
if value is not None and not isinstance(value, self.data_type):
try:
value = self.data_type(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a %s instance (%s)' %
(self.name, self.data_type.__name__, err))
value = super(UnindexedProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s instance' %
(self.name, self.data_type.__name__))
return value
class TextProperty(UnindexedProperty):
"""A string that can be longer than 500 bytes."""
data_type = Text
class StringProperty(Property):
"""A textual property, which can be multi- or single-line."""
def __init__(self, verbose_name=None, multiline=False, **kwds):
"""Construct string property.
Args:
verbose_name: Verbose name is always first parameter.
multi-line: Carriage returns permitted in property.
"""
super(StringProperty, self).__init__(verbose_name, **kwds)
self.multiline = multiline
def validate(self, value):
"""Validate string property.
Returns:
A valid value.
Raises:
BadValueError if property is not multi-line but value is.
"""
value = super(StringProperty, self).validate(value)
if value is not None and not isinstance(value, basestring):
raise BadValueError(
'Property %s must be a str or unicode instance, not a %s'
% (self.name, type(value).__name__))
if not self.multiline and value and value.find('\n') != -1:
raise BadValueError('Property %s is not multi-line' % self.name)
return value
data_type = basestring
class _CoercingProperty(Property):
"""A Property subclass that extends validate() to coerce to self.data_type."""
def validate(self, value):
"""Coerce values (except None) to self.data_type.
Args:
value: The value to be validated and coerced.
Returns:
The coerced and validated value. It is guaranteed that this is
either None or an instance of self.data_type; otherwise an exception
is raised.
Raises:
BadValueError if the value could not be validated or coerced.
"""
value = super(_CoercingProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class CategoryProperty(_CoercingProperty):
"""A property whose values are Category instances."""
data_type = Category
class LinkProperty(_CoercingProperty):
"""A property whose values are Link instances."""
def validate(self, value):
value = super(LinkProperty, self).validate(value)
if value is not None:
scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
if not scheme or not netloc:
raise BadValueError('Property %s must be a full URL (\'%s\')' %
(self.name, value))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
"""A property whose values are Email instances."""
data_type = Email
class GeoPtProperty(_CoercingProperty):
"""A property whose values are GeoPt instances."""
data_type = GeoPt
class IMProperty(_CoercingProperty):
"""A property whose values are IM instances."""
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
"""A property whose values are PhoneNumber instances."""
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
"""A property whose values are PostalAddress instances."""
data_type = PostalAddress
class BlobProperty(UnindexedProperty):
"""A byte string that can be longer than 500 bytes."""
data_type = Blob
class ByteStringProperty(Property):
"""A short (<=500 bytes) byte string.
This type should be used for short binary values that need to be indexed. If
you do not require indexing (regardless of length), use BlobProperty instead.
"""
def validate(self, value):
"""Validate ByteString property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'ByteString'.
"""
if value is not None and not isinstance(value, ByteString):
try:
value = ByteString(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a ByteString instance (%s)' % (self.name, err))
value = super(ByteStringProperty, self).validate(value)
if value is not None and not isinstance(value, ByteString):
raise BadValueError('Property %s must be a ByteString instance'
% self.name)
return value
data_type = ByteString
class DateTimeProperty(Property):
"""The base class of all of our date/time properties.
We handle common operations, like converting between time tuples and
datetime instances.
"""
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
**kwds):
"""Construct a DateTimeProperty
Args:
verbose_name: Verbose name is always first parameter.
auto_now: Date/time property is updated with the current time every time
it is saved to the datastore. Useful for properties that want to track
the modification time of an instance.
auto_now_add: Date/time is set to the when its instance is created.
Useful for properties that record the creation time of an entity.
"""
super(DateTimeProperty, self).__init__(verbose_name, **kwds)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"""Validate datetime.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'datetime'.
"""
value = super(DateTimeProperty, self).validate(value)
if value and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s' %
(self.name, self.data_type.__name__))
return value
def default_value(self):
"""Default value for datetime.
Returns:
value of now() as appropriate to the date-time instance if auto_now
or auto_now_add is set, else user configured default value implementation.
"""
if self.auto_now or self.auto_now_add:
return self.now()
return Property.default_value(self)
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
auto_now is set to True, else the default implementation.
"""
if self.auto_now:
return self.now()
else:
return super(DateTimeProperty,
self).get_value_for_datastore(model_instance)
data_type = datetime.datetime
@staticmethod
def now():
"""Get now as a full datetime value.
Returns:
'now' as a whole timestamp, including both time and date.
"""
return datetime.datetime.now()
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A date property, which stores a date without a time."""
@staticmethod
def now():
"""Get now as a date datetime value.
Returns:
'date' part of 'now' only.
"""
return datetime.datetime.now().date()
def validate(self, value):
"""Validate date.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'date',
or if it is an instance of 'datetime' (which is a subclass
of 'date', but for all practical purposes a different type).
"""
value = super(DateProperty, self).validate(value)
if isinstance(value, datetime.datetime):
raise BadValueError('Property %s must be a %s, not a datetime' %
(self.name, self.data_type.__name__))
return value
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.date from the model instance and return a
datetime.datetime instance with the time set to zero.
See base class method documentation for details.
"""
value = super(DateProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.date)
value = _date_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its date portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.date()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
"""A time property, which stores a time without a date."""
@staticmethod
def now():
"""Get now as a time datetime value.
Returns:
'time' part of 'now' only.
"""
return datetime.datetime.now().time()
def empty(self, value):
"""Is time property empty.
"0:0" (midnight) is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.time from the model instance and return a
datetime.datetime instance with the date set to 1/1/1970.
See base class method documentation for details.
"""
value = super(TimeProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.time), repr(value)
value = _time_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its time portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.time()
return value
data_type = datetime.time
class IntegerProperty(Property):
"""An integer property."""
def validate(self, value):
"""Validate integer property.
Returns:
A valid value.
Raises:
BadValueError if value is not an integer or long instance.
"""
value = super(IntegerProperty, self).validate(value)
if value is None:
return value
if not isinstance(value, (int, long)) or isinstance(value, bool):
raise BadValueError('Property %s must be an int or long, not a %s'
% (self.name, type(value).__name__))
if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
raise BadValueError('Property %s must fit in 64 bits' % self.name)
return value
data_type = int
def empty(self, value):
"""Is integer property empty.
0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class RatingProperty(_CoercingProperty, IntegerProperty):
"""A property whose values are Rating instances."""
data_type = Rating
class FloatProperty(Property):
"""A float property."""
def validate(self, value):
"""Validate float.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'float'.
"""
value = super(FloatProperty, self).validate(value)
if value is not None and not isinstance(value, float):
raise BadValueError('Property %s must be a float' % self.name)
return value
data_type = float
def empty(self, value):
"""Is float property empty.
0.0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class BooleanProperty(Property):
"""A boolean property."""
def validate(self, value):
"""Validate boolean.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'bool'.
"""
value = super(BooleanProperty, self).validate(value)
if value is not None and not isinstance(value, bool):
raise BadValueError('Property %s must be a bool' % self.name)
return value
data_type = bool
def empty(self, value):
"""Is boolean property empty.
False is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class UserProperty(Property):
"""A user property."""
def __init__(self,
verbose_name=None,
name=None,
required=False,
validator=None,
choices=None,
auto_current_user=False,
auto_current_user_add=False,
indexed=True):
"""Initializes this Property with the given options.
Note: this does *not* support the 'default' keyword argument.
Use auto_current_user_add=True instead.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
auto_current_user: If true, the value is set to the current user
each time the entity is written to the datastore.
auto_current_user_add: If true, the value is set to the current user
the first time the entity is written to the datastore.
indexed: Whether property is indexed.
"""
super(UserProperty, self).__init__(verbose_name, name,
required=required,
validator=validator,
choices=choices,
indexed=indexed)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"""Validate user.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'User'.
"""
value = super(UserProperty, self).validate(value)
if value is not None and not isinstance(value, users.User):
raise BadValueError('Property %s must be a User' % self.name)
return value
def default_value(self):
"""Default value for user.
Returns:
Value of users.get_current_user() if auto_current_user or
auto_current_user_add is set; else None. (But *not* the default
implementation, since we don't support the 'default' keyword
argument.)
"""
if self.auto_current_user or self.auto_current_user_add:
return users.get_current_user()
return None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
else the default implementation.
"""
if self.auto_current_user:
return users.get_current_user()
return super(UserProperty, self).get_value_for_datastore(model_instance)
data_type = users.User
class ListProperty(Property):
"""A property that stores a list of things.
This is a parameterized property; the parameter must be a valid
non-list data type, and all items must conform to this type.
"""
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"""Construct ListProperty.
Args:
item_type: Type for the list items; must be one of the allowed property
types.
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to base class.
Note that the only permissible value for 'required' is True.
"""
if item_type is str:
item_type = basestring
if not isinstance(item_type, type):
raise TypeError('Item type should be a type object')
if item_type not in _ALLOWED_PROPERTY_TYPES:
raise ValueError('Item type %s is not acceptable' % item_type.__name__)
if issubclass(item_type, (Blob, Text)):
self._require_parameter(kwds, 'indexed', False)
kwds['indexed'] = True
self._require_parameter(kwds, 'required', True)
if default is None:
default = []
self.item_type = item_type
super(ListProperty, self).__init__(verbose_name,
default=default,
**kwds)
def validate(self, value):
"""Validate list.
Returns:
A valid value.
Raises:
BadValueError if property is not a list whose items are instances of
the item_type given to the constructor.
"""
value = super(ListProperty, self).validate(value)
if value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
"""Validates that all items in the list are of the correct type.
Returns:
The validated list.
Raises:
BadValueError if the list has items are not instances of the
item_type given to the constructor.
"""
if self.item_type in (int, long):
item_type = (int, long)
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
raise BadValueError('Items in the %s list must all be integers.' %
self.name)
else:
raise BadValueError(
'Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
return value
def empty(self, value):
"""Is list property empty.
[] is not an empty value.
Returns:
True if value is None, else false.
"""
return value is None
data_type = list
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
return list(super(ListProperty, self).default_value())
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
validated list appropriate to save in the datastore.
"""
value = self.validate_list_contents(
super(ListProperty, self).get_value_for_datastore(model_instance))
if self.validator:
self.validator(value)
return value
class StringListProperty(ListProperty):
"""A property that stores a list of strings.
A shorthand for the most common type of ListProperty.
"""
def __init__(self, verbose_name=None, default=None, **kwds):
"""Construct StringListProperty.
Args:
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to ListProperty().
"""
super(StringListProperty, self).__init__(basestring,
verbose_name=verbose_name,
default=default,
**kwds)
class ReferenceProperty(Property):
"""A property that represents a many-to-one reference to another model.
For example, a reference property in model A that refers to model B forms
a many-to-one relationship from A to B: every instance of A refers to a
single B instance, and every B instance can have many A instances refer
to it.
"""
def __init__(self,
reference_class=None,
verbose_name=None,
collection_name=None,
**attrs):
"""Construct ReferenceProperty.
Args:
reference_class: Which model class this property references.
verbose_name: User friendly name of property.
collection_name: If provided, alternate name of collection on
reference_class to store back references. Use this to allow
a Model to have multiple fields which refer to the same class.
"""
super(ReferenceProperty, self).__init__(verbose_name, **attrs)
self.collection_name = collection_name
if reference_class is None:
reference_class = Model
if not ((isinstance(reference_class, type) and
issubclass(reference_class, Model)) or
reference_class is _SELF_REFERENCE):
raise KindError('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def __property_config__(self, model_class, property_name):
"""Loads all of the references that point to this model.
We need to do this to create the ReverseReferenceProperty properties for
this model and create the <reference>_set attributes on the referenced
model, e.g.:
class Story(db.Model):
title = db.StringProperty()
class Comment(db.Model):
story = db.ReferenceProperty(Story)
story = Story.get(id)
print [c for c in story.comment_set]
In this example, the comment_set property was created based on the reference
from Comment to Story (which is inherently one to many).
Args:
model_class: Model class which will have its reference properties
initialized.
property_name: Name of property being configured.
Raises:
DuplicatePropertyError if referenced class already has the provided
collection name as a property.
"""
super(ReferenceProperty, self).__property_config__(model_class,
property_name)
if self.reference_class is _SELF_REFERENCE:
self.reference_class = self.data_type = model_class
if self.collection_name is None:
self.collection_name = '%s_set' % (model_class.__name__.lower())
existing_prop = getattr(self.reference_class, self.collection_name, None)
if existing_prop is not None:
if not (isinstance(existing_prop, _ReverseReferenceProperty) and
existing_prop._prop_name == property_name and
existing_prop._model.__name__ == model_class.__name__ and
existing_prop._model.__module__ == model_class.__module__):
raise DuplicatePropertyError('Class %s already has property %s '
% (self.reference_class.__name__,
self.collection_name))
setattr(self.reference_class,
self.collection_name,
_ReverseReferenceProperty(model_class, property_name))
def __get__(self, model_instance, model_class):
"""Get reference object.
This method will fetch unresolved entities from the datastore if
they are not already loaded.
Returns:
ReferenceProperty to Model object if property is set, else None.
"""
if model_instance is None:
return self
if hasattr(model_instance, self.__id_attr_name()):
reference_id = getattr(model_instance, self.__id_attr_name())
else:
reference_id = None
if reference_id is not None:
resolved = getattr(model_instance, self.__resolved_attr_name())
if resolved is not None:
return resolved
else:
instance = get(reference_id)
if instance is None:
raise Error('ReferenceProperty failed to be resolved')
setattr(model_instance, self.__resolved_attr_name(), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
"""Set reference."""
value = self.validate(value)
if value is not None:
if isinstance(value, datastore.Key):
setattr(model_instance, self.__id_attr_name(), value)
setattr(model_instance, self.__resolved_attr_name(), None)
else:
setattr(model_instance, self.__id_attr_name(), value.key())
setattr(model_instance, self.__resolved_attr_name(), value)
else:
setattr(model_instance, self.__id_attr_name(), None)
setattr(model_instance, self.__resolved_attr_name(), None)
def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
return getattr(model_instance, self.__id_attr_name())
def validate(self, value):
"""Validate reference.
Returns:
A valid value.
Raises:
BadValueError for the following reasons:
- Value is not saved.
- Object not of correct model type for reference.
"""
if isinstance(value, datastore.Key):
return value
if value is not None and not value.has_key():
raise BadValueError(
'%s instance must have a complete key before it can be stored as a '
'reference' % self.reference_class.kind())
value = super(ReferenceProperty, self).validate(value)
if value is not None and not isinstance(value, self.reference_class):
raise KindError('Property %s must be an instance of %s' %
(self.name, self.reference_class.kind()))
return value
def __id_attr_name(self):
"""Get attribute of referenced id.
Returns:
Attribute where to store id of referenced entity.
"""
return self._attr_name()
def __resolved_attr_name(self):
"""Get attribute of resolved attribute.
The resolved attribute is where the actual loaded reference instance is
stored on the referring model instance.
Returns:
Attribute name of where to store resolved reference model instance.
"""
return '_RESOLVED' + self._attr_name()
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"""Create a self reference.
Function for declaring a self referencing property on a model.
Example:
class HtmlNode(db.Model):
parent = db.SelfReferenceProperty('Parent', 'children')
Args:
verbose_name: User friendly name of property.
collection_name: Name of collection on model.
Raises:
ConfigurationError if reference_class provided as parameter.
"""
if 'reference_class' in attrs:
raise ConfigurationError(
'Do not provide reference_class to self-reference.')
return ReferenceProperty(_SELF_REFERENCE,
verbose_name,
collection_name,
**attrs)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
"""The inverse of the Reference property above.
We construct reverse references automatically for the model to which
the Reference property is pointing to create the one-to-many property for
that model. For example, if you put a Reference property in model A that
refers to model B, we automatically create a _ReverseReference property in
B called a_set that can fetch all of the model A instances that refer to
that instance of model B.
"""
def __init__(self, model, prop):
"""Constructor for reverse reference.
Constructor does not take standard values of other property types.
Args:
model: Model class that this property is a collection of.
property: Name of foreign property on referred model that points back
to this properties entity.
"""
self.__model = model
self.__property = prop
@property
def _model(self):
"""Internal helper to access the model class, read-only."""
return self.__model
@property
def _prop_name(self):
"""Internal helper to access the property name, read-only."""
return self.__property
def __get__(self, model_instance, model_class):
"""Fetches collection of model instances of this collection property."""
if model_instance is not None:
query = Query(self.__model)
return query.filter(self.__property + ' =', model_instance.key())
else:
return self
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
raise BadValueError('Virtual property is read-only')
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries<|fim▁end|> | #!/usr/bin/env python
#
# Copyright 2007 Google Inc. |
<|file_name|>mecabsegmenter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MeCab based Segmenter.
Word segmenter module powered by `MeCab <https://github.com/taku910/mecab>`_.
You need to install MeCab to use this segmenter.
The easiest way to install MeCab is to run :code:`make install-mecab`. The
script will download source codes from GitHub and build the tool. It also setup
`IPAdic <https://ja.osdn.net/projects/ipadic/>`_, a standard dictionary for
Japanese.
"""
import logging
import sys
import six
from .segmenter import Segmenter
from .chunk import Chunk, ChunkList
_DEPENDENT_POS_FORWARD = set()
_DEPENDENT_POS_BACKWARD = {u'助詞', u'助動詞'}
_DEPENDENT_LABEL_FORWARD = set()
_DEPENDENT_LABEL_BACKWARD = {u'非自立'}
class MecabSegmenter(Segmenter):
"""MeCab Segmenter.<|fim▁hole|> supported_languages (list of str): List of supported languages' codes.
"""
supported_languages = {'ja'}
def __init__(self):
try:
import MeCab
self.tagger = MeCab.Tagger('-Ochasen')
except ImportError:
logging.error(
('mecab-python3 is not installed. Install the module by running '
'`$ pip install mecab-python3`. If MeCab is not installed in your '
'system yet, run `$ make install-mecab` instead.'))
sys.exit(1)
def segment(self, source, language=None):
"""Returns a chunk list from the given sentence.
Args:
source (str): Source string to segment.
language (str, optional): A language code.
Returns:
A chunk list. (:obj:`budou.chunk.ChunkList`)
Raises:
ValueError: If :code:`language` is given and it is not included in
:code:`supported_languages`.
"""
if language and not language in self.supported_languages:
raise ValueError(
'Language {} is not supported by MeCab segmenter'.format(language))
chunks = ChunkList()
seek = 0
source_str = source.encode('utf-8') if six.PY2 else source
results = self.tagger.parse(source_str).split('\n')[:-2]
for row in results:
if six.PY2:
row = row.decode('utf-8')
token = row.split('\t')
word = token[0]
labels = token[3].split('-')
pos = labels[0]
label = labels[1] if len(labels) > 1 else None
if source[seek: seek + len(word)] != word:
assert source[seek] == ' '
assert source[seek + 1: seek + len(word) + 1] == word
chunks.append(Chunk.space())
seek += 1
dependency = None
if pos in _DEPENDENT_POS_FORWARD:
dependency = True
elif pos in _DEPENDENT_POS_BACKWARD:
dependency = False
elif label in _DEPENDENT_LABEL_FORWARD:
dependency = True
elif label in _DEPENDENT_LABEL_BACKWARD:
dependency = False
chunk = Chunk(word, pos=pos, label=label, dependency=dependency)
if chunk.is_punct():
chunk.dependency = chunk.is_open_punct()
chunks.append(chunk)
seek += len(word)
chunks.resolve_dependencies()
return chunks<|fim▁end|> |
Attributes:
tagger (MeCab.Tagger): MeCab Tagger to parse the input sentence. |
<|file_name|>test_full_coinjoin.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import * # noqa: F401
'''Runs a full joinmarket pit (using `nirc` miniircd servers,
with `nirc` options specified as an option to pytest),in
bitcoin regtest mode with 3 maker bots and 1 taker bot,
and does 1 coinjoin. This is intended as an E2E sanity check
but certainly could be extended further.
'''
from common import make_wallets
import pytest
import sys
from jmclient import YieldGeneratorBasic, load_test_config, jm_single,\
sync_wallet, JMClientProtocolFactory, start_reactor, Taker, \
random_under_max_order_choose
from jmbase.support import get_log
from twisted.internet import reactor
from twisted.python.log import startLogging
log = get_log()
# Note that this parametrization is inherited (i.e. copied) from
# the previous 'ygrunner.py' script which is intended to be run
# manually to test out complex scenarios. Here, we only run one
# simple test with honest makers (and for simplicity malicious
# makers are not included in the code). Vars are left in in case
# we want to do more complex stuff in the automated tests later.
@pytest.mark.parametrize(
"num_ygs, wallet_structures, mean_amt, malicious, deterministic",
[
# 1sp 3yg, honest makers
(3, [[1, 3, 0, 0, 0]] * 4, 2, 0, False),
])
def test_cj(setup_full_coinjoin, num_ygs, wallet_structures, mean_amt,
malicious, deterministic):
"""Starts by setting up wallets for maker and taker bots; then,
instantiates a single taker with the final wallet.
The remaining wallets are used to set up YieldGenerators (basic form).
All the wallets are given coins according to the rules of make_wallets,
using the parameters for the values.
The final start_reactor call is the only one that actually starts the
reactor; the others only set up protocol instances.
Inline are custom callbacks for the Taker, and these are basically
copies of those in the `sendpayment.py` script for now, but they could
be customized later for testing.
The Taker's schedule is a single coinjoin, using basically random values,
again this could be easily edited or parametrized if we feel like it.
"""
# Set up some wallets, for the ygs and 1 sp.
wallets = make_wallets(num_ygs + 1,
wallet_structures=wallet_structures,
mean_amt=mean_amt)
#the sendpayment bot uses the last wallet in the list
wallet = wallets[num_ygs]['wallet']
sync_wallet(wallet, fast=True)
# grab a dest addr from the wallet
destaddr = wallet.get_external_addr(4)
coinjoin_amt = 20000000
schedule = [[1, coinjoin_amt, 2, destaddr,
0.0, False]]
""" The following two callback functions are as simple as possible
modifications of the same in scripts/sendpayment.py
"""
def filter_orders_callback(orders_fees, cjamount):
return True
def taker_finished(res, fromtx=False, waittime=0.0, txdetails=None):
def final_checks():
sync_wallet(wallet, fast=True)
newbal = wallet.get_balance_by_mixdepth()[4]
oldbal = wallet.get_balance_by_mixdepth()[1]
# These are our check that the coinjoin succeeded
assert newbal == coinjoin_amt
# TODO: parametrize these; cj fees = 38K (.001 x 20M x 2 makers)
# minus 1K tx fee contribution each; 600M is original balance
# in mixdepth 1
assert oldbal + newbal + (40000 - 2000) + taker.total_txfee == 600000000
if fromtx == "unconfirmed":
#If final entry, stop *here*, don't wait for confirmation
if taker.schedule_index + 1 == len(taker.schedule):
reactor.stop()
final_checks()
return
if fromtx:
# currently this test uses a schedule with only one entry
assert False, "taker_finished was called with fromtx=True"
reactor.stop()
return
else:
if not res:
assert False, "Did not complete successfully, shutting down"
# Note that this is required in both conditional branches,
# especially in testing, because it's possible to receive the
# confirmed callback before the unconfirmed.
reactor.stop()
final_checks()
# twisted logging is required for debugging:
startLogging(sys.stdout)
taker = Taker(wallet,
schedule,
order_chooser=random_under_max_order_choose,
max_cj_fee=(0.1, 200),
callbacks=(filter_orders_callback, None, taker_finished))
clientfactory = JMClientProtocolFactory(taker)
nodaemon = jm_single().config.getint("DAEMON", "no_daemon")
daemon = True if nodaemon == 1 else False
start_reactor(jm_single().config.get("DAEMON", "daemon_host"),
jm_single().config.getint("DAEMON", "daemon_port"),
clientfactory, daemon=daemon, rs=False)
txfee = 1000
cjfee_a = 4200
cjfee_r = '0.001'
ordertype = 'swreloffer'
minsize = 100000
ygclass = YieldGeneratorBasic
# As noted above, this is not currently used but can be in future:
if malicious or deterministic:
raise NotImplementedError
for i in range(num_ygs):
cfg = [txfee, cjfee_a, cjfee_r, ordertype, minsize]
sync_wallet(wallets[i]["wallet"], fast=True)
yg = ygclass(wallets[i]["wallet"], cfg)
if malicious:
yg.set_maliciousness(malicious, mtype="tx")
clientfactory = JMClientProtocolFactory(yg, proto_type="MAKER")
nodaemon = jm_single().config.getint("DAEMON", "no_daemon")
daemon = True if nodaemon == 1 else False<|fim▁hole|> jm_single().config.getint("DAEMON", "daemon_port"),
clientfactory, daemon=daemon, rs=rs)
@pytest.fixture(scope="module")
def setup_full_coinjoin():
load_test_config()
jm_single().bc_interface.tick_forward_chain_interval = 10
jm_single().bc_interface.simulate_blocks()<|fim▁end|> | # As noted above, only the final start_reactor() call will
# actually start it!
rs = True if i == num_ygs - 1 else False
start_reactor(jm_single().config.get("DAEMON", "daemon_host"), |
<|file_name|>StateMachine.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.hdodenhof.androidstatemachine;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.text.TextUtils;
import android.util.Log;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.Vector;
/**
* <p>The state machine defined here is a hierarchical state machine which processes messages
* and can have states arranged hierarchically.</p>
*
* <p>A state is a <code>State</code> object and must implement
* <code>processMessage</code> and optionally <code>enter/exit/getName</code>.
* The enter/exit methods are equivalent to the construction and destruction
* in Object Oriented programming and are used to perform initialization and
* cleanup of the state respectively. The <code>getName</code> method returns the
* name of the state the default implementation returns the class name it may be
* desirable to have this return the name of the state instance name instead.
* In particular if a particular state class has multiple instances.</p>
*
* <p>When a state machine is created <code>addState</code> is used to build the
* hierarchy and <code>setInitialState</code> is used to identify which of these
* is the initial state. After construction the programmer calls <code>start</code>
* which initializes and starts the state machine. The first action the StateMachine
* is to the invoke <code>enter</code> for all of the initial state's hierarchy,
* starting at its eldest parent. The calls to enter will be done in the context
* of the StateMachines Handler not in the context of the call to start and they
* will be invoked before any messages are processed. For example, given the simple
* state machine below mP1.enter will be invoked and then mS1.enter. Finally,
* messages sent to the state machine will be processed by the current state,
* in our simple state machine below that would initially be mS1.processMessage.</p>
<code>
mP1
/ \
mS2 mS1 ----> initial state
</code>
* <p>After the state machine is created and started, messages are sent to a state
* machine using <code>sendMessage</code> and the messages are created using
* <code>obtainMessage</code>. When the state machine receives a message the
* current state's <code>processMessage</code> is invoked. In the above example
* mS1.processMessage will be invoked first. The state may use <code>transitionTo</code>
* to change the current state to a new state</p>
*
* <p>Each state in the state machine may have a zero or one parent states and if
* a child state is unable to handle a message it may have the message processed
* by its parent by returning false or NOT_HANDLED. If a message is never processed
* <code>unhandledMessage</code> will be invoked to give one last chance for the state machine
* to process the message.</p>
*
* <p>When all processing is completed a state machine may choose to call
* <code>transitionToHaltingState</code>. When the current <code>processingMessage</code>
* returns the state machine will transfer to an internal <code>HaltingState</code>
* and invoke <code>halting</code>. Any message subsequently received by the state
* machine will cause <code>haltedProcessMessage</code> to be invoked.</p>
*
* <p>If it is desirable to completely stop the state machine call <code>quit</code> or
* <code>quitNow</code>. These will call <code>exit</code> of the current state and its parents,
* call <code>onQuiting</code> and then exit Thread/Loopers.</p>
*
* <p>In addition to <code>processMessage</code> each <code>State</code> has
* an <code>enter</code> method and <code>exit</code> method which may be overridden.</p>
*
* <p>Since the states are arranged in a hierarchy transitioning to a new state
* causes current states to be exited and new states to be entered. To determine
* the list of states to be entered/exited the common parent closest to
* the current state is found. We then exit from the current state and its
* parent's up to but not including the common parent state and then enter all
* of the new states below the common parent down to the destination state.
* If there is no common parent all states are exited and then the new states
* are entered.</p>
*
* <p>Two other methods that states can use are <code>deferMessage</code> and
* <code>sendMessageAtFrontOfQueue</code>. The <code>sendMessageAtFrontOfQueue</code> sends
* a message but places it on the front of the queue rather than the back. The
* <code>deferMessage</code> causes the message to be saved on a list until a
* transition is made to a new state. At which time all of the deferred messages
* will be put on the front of the state machine queue with the oldest message
* at the front. These will then be processed by the new current state before
* any other messages that are on the queue or might be added later. Both of
* these are protected and may only be invoked from within a state machine.</p>
*
* <p>To illustrate some of these properties we'll use state machine with an 8
* state hierarchy:</p>
<code>
mP0
/ \
mP1 mS0
/ \
mS2 mS1
/ \ \
mS3 mS4 mS5 ---> initial state
</code>
* <p>After starting mS5 the list of active states is mP0, mP1, mS1 and mS5.
* So the order of calling processMessage when a message is received is mS5,
* mS1, mP1, mP0 assuming each processMessage indicates it can't handle this
* message by returning false or NOT_HANDLED.</p>
*
* <p>Now assume mS5.processMessage receives a message it can handle, and during
* the handling determines the machine should change states. It could call
* transitionTo(mS4) and return true or HANDLED. Immediately after returning from
* processMessage the state machine runtime will find the common parent,
* which is mP1. It will then call mS5.exit, mS1.exit, mS2.enter and then
* mS4.enter. The new list of active states is mP0, mP1, mS2 and mS4. So
* when the next message is received mS4.processMessage will be invoked.</p>
*
* <p>Now for some concrete examples, here is the canonical HelloWorld as a state machine.
* It responds with "Hello World" being printed to the log for every message.</p>
<code>
class HelloWorld extends StateMachine {
HelloWorld(String name) {
super(name);
addState(mState1);
setInitialState(mState1);
}
public static HelloWorld makeHelloWorld() {
HelloWorld hw = new HelloWorld("hw");
hw.start();
return hw;
}
class State1 extends State {
@Override public boolean processMessage(Message message) {
log("Hello World");
return HANDLED;
}
}
State1 mState1 = new State1();
}
void testHelloWorld() {
HelloWorld hw = makeHelloWorld();
hw.sendMessage(hw.obtainMessage());
}
</code>
* <p>A more interesting state machine is one with four states
* with two independent parent states.</p>
<code>
mP1 mP2
/ \
mS2 mS1
</code>
* <p>Here is a description of this state machine using pseudo code.</p>
<code>
state mP1 {
enter { log("mP1.enter"); }
exit { log("mP1.exit"); }
on msg {
CMD_2 {
send(CMD_3);
defer(msg);
transitonTo(mS2);
return HANDLED;
}
return NOT_HANDLED;
}
}
INITIAL
state mS1 parent mP1 {
enter { log("mS1.enter"); }
exit { log("mS1.exit"); }
on msg {
CMD_1 {
transitionTo(mS1);
return HANDLED;
}
return NOT_HANDLED;
}
}
state mS2 parent mP1 {
enter { log("mS2.enter"); }
exit { log("mS2.exit"); }
on msg {
CMD_2 {
send(CMD_4);
return HANDLED;
}
CMD_3 {
defer(msg);
transitionTo(mP2);
return HANDLED;
}
return NOT_HANDLED;
}
}
state mP2 {
enter {
log("mP2.enter");
send(CMD_5);
}
exit { log("mP2.exit"); }
on msg {
CMD_3, CMD_4 { return HANDLED; }
CMD_5 {
transitionTo(HaltingState);
return HANDLED;
}
return NOT_HANDLED;
}
}
</code>
* <p>The implementation is below and also in StateMachineTest:</p>
<code>
class Hsm1 extends StateMachine {
public static final int CMD_1 = 1;
public static final int CMD_2 = 2;
public static final int CMD_3 = 3;
public static final int CMD_4 = 4;
public static final int CMD_5 = 5;
public static Hsm1 makeHsm1() {
log("makeHsm1 E");
Hsm1 sm = new Hsm1("hsm1");
sm.start();
log("makeHsm1 X");
return sm;
}
Hsm1(String name) {
super(name);
log("ctor E");
// Add states, use indentation to show hierarchy
addState(mP1);
addState(mS1, mP1);
addState(mS2, mP1);
addState(mP2);
// Set the initial state
setInitialState(mS1);
log("ctor X");
}
class P1 extends State {
@Override public void enter() {
log("mP1.enter");
}
@Override public boolean processMessage(Message message) {
boolean retVal;
log("mP1.processMessage what=" + message.what);
switch(message.what) {
case CMD_2:
// CMD_2 will arrive in mS2 before CMD_3
sendMessage(obtainMessage(CMD_3));
deferMessage(message);
transitionTo(mS2);
retVal = HANDLED;
break;
default:
// Any message we don't understand in this state invokes unhandledMessage
retVal = NOT_HANDLED;
break;
}
return retVal;
}
@Override public void exit() {
log("mP1.exit");
}
}
class S1 extends State {
@Override public void enter() {
log("mS1.enter");
}
@Override public boolean processMessage(Message message) {
log("S1.processMessage what=" + message.what);
if (message.what == CMD_1) {
// Transition to ourself to show that enter/exit is called
transitionTo(mS1);
return HANDLED;
} else {
// Let parent process all other messages
return NOT_HANDLED;
}
}
@Override public void exit() {
log("mS1.exit");
}
}
class S2 extends State {
@Override public void enter() {
log("mS2.enter");
}
@Override public boolean processMessage(Message message) {
boolean retVal;
log("mS2.processMessage what=" + message.what);
switch(message.what) {
case(CMD_2):
sendMessage(obtainMessage(CMD_4));
retVal = HANDLED;
break;
case(CMD_3):
deferMessage(message);
transitionTo(mP2);
retVal = HANDLED;
break;
default:
retVal = NOT_HANDLED;
break;
}
return retVal;
}
@Override public void exit() {
log("mS2.exit");
}
}
class P2 extends State {
@Override public void enter() {
log("mP2.enter");
sendMessage(obtainMessage(CMD_5));
}
@Override public boolean processMessage(Message message) {
log("P2.processMessage what=" + message.what);
switch(message.what) {
case(CMD_3):
break;
case(CMD_4):
break;
case(CMD_5):
transitionToHaltingState();
break;
}
return HANDLED;
}
@Override public void exit() {
log("mP2.exit");
}
}
@Override
void onHalting() {
log("halting");
synchronized (this) {
this.notifyAll();
}
}
P1 mP1 = new P1();
S1 mS1 = new S1();
S2 mS2 = new S2();
P2 mP2 = new P2();
}
</code>
* <p>If this is executed by sending two messages CMD_1 and CMD_2
* (Note the synchronize is only needed because we use hsm.wait())</p>
<code>
Hsm1 hsm = makeHsm1();
synchronize(hsm) {
hsm.sendMessage(obtainMessage(hsm.CMD_1));
hsm.sendMessage(obtainMessage(hsm.CMD_2));
try {
// wait for the messages to be handled
hsm.wait();
} catch (InterruptedException e) {
loge("exception while waiting " + e.getMessage());
}
}
</code>
* <p>The output is:</p>
<code>
D/hsm1 ( 1999): makeHsm1 E
D/hsm1 ( 1999): ctor E
D/hsm1 ( 1999): ctor X
D/hsm1 ( 1999): mP1.enter
D/hsm1 ( 1999): mS1.enter
D/hsm1 ( 1999): makeHsm1 X
D/hsm1 ( 1999): mS1.processMessage what=1
D/hsm1 ( 1999): mS1.exit
D/hsm1 ( 1999): mS1.enter
D/hsm1 ( 1999): mS1.processMessage what=2
D/hsm1 ( 1999): mP1.processMessage what=2
D/hsm1 ( 1999): mS1.exit
D/hsm1 ( 1999): mS2.enter
D/hsm1 ( 1999): mS2.processMessage what=2
D/hsm1 ( 1999): mS2.processMessage what=3
D/hsm1 ( 1999): mS2.exit
D/hsm1 ( 1999): mP1.exit
D/hsm1 ( 1999): mP2.enter
D/hsm1 ( 1999): mP2.processMessage what=3
D/hsm1 ( 1999): mP2.processMessage what=4
D/hsm1 ( 1999): mP2.processMessage what=5
D/hsm1 ( 1999): mP2.exit
D/hsm1 ( 1999): halting
</code>
*/
public class StateMachine {
// Name of the state machine and used as logging tag
private String mName;
/** Message.what value when quitting */
private static final int SM_QUIT_CMD = -1;
/** Message.what value when initializing */
private static final int SM_INIT_CMD = -2;
/**
* Convenience constant that maybe returned by processMessage
* to indicate the the message was processed and is not to be
* processed by parent states
*/
public static final boolean HANDLED = true;
/**
* Convenience constant that maybe returned by processMessage
* to indicate the the message was NOT processed and is to be
* processed by parent states
*/
public static final boolean NOT_HANDLED = false;
/**
* StateMachine logging record.
*/
public static class LogRec {
private StateMachine mSm;
private long mTime;
private int mWhat;
private String mInfo;
private IState mState;
private IState mOrgState;
private IState mDstState;
/**
* Constructor
*<|fim▁hole|> * @param state the state which handled the message
* @param orgState is the first state the received the message but
* did not processes the message.
* @param transToState is the state that was transitioned to after the message was
* processed.
*/
LogRec(StateMachine sm, Message msg, String info, IState state, IState orgState,
IState transToState) {
update(sm, msg, info, state, orgState, transToState);
}
/**
* Update the information in the record.
* @param state that handled the message
* @param orgState is the first state the received the message
* @param dstState is the state that was the transition target when logging
*/
public void update(StateMachine sm, Message msg, String info, IState state, IState orgState,
IState dstState) {
mSm = sm;
mTime = System.currentTimeMillis();
mWhat = (msg != null) ? msg.what : 0;
mInfo = info;
mState = state;
mOrgState = orgState;
mDstState = dstState;
}
/**
* @return time stamp
*/
public long getTime() {
return mTime;
}
/**
* @return msg.what
*/
public long getWhat() {
return mWhat;
}
/**
* @return the command that was executing
*/
public String getInfo() {
return mInfo;
}
/**
* @return the state that handled this message
*/
public IState getState() {
return mState;
}
/**
* @return the state destination state if a transition is occurring or null if none.
*/
public IState getDestState() {
return mDstState;
}
/**
* @return the original state that received the message.
*/
public IState getOriginalState() {
return mOrgState;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("time=");
Calendar c = Calendar.getInstance();
c.setTimeInMillis(mTime);
sb.append(String.format("%tm-%td %tH:%tM:%tS.%tL", c, c, c, c, c, c));
sb.append(" processed=");
sb.append(mState == null ? "<null>" : mState.getName());
sb.append(" org=");
sb.append(mOrgState == null ? "<null>" : mOrgState.getName());
sb.append(" dest=");
sb.append(mDstState == null ? "<null>" : mDstState.getName());
sb.append(" what=");
String what = mSm != null ? mSm.getWhatToString(mWhat) : "";
if (TextUtils.isEmpty(what)) {
sb.append(mWhat);
sb.append("(0x");
sb.append(Integer.toHexString(mWhat));
sb.append(")");
} else {
sb.append(what);
}
if (!TextUtils.isEmpty(mInfo)) {
sb.append(" ");
sb.append(mInfo);
}
return sb.toString();
}
}
/**
* A list of log records including messages recently processed by the state machine.
*
* The class maintains a list of log records including messages
* recently processed. The list is finite and may be set in the
* constructor or by calling setSize. The public interface also
* includes size which returns the number of recent records,
* count which is the number of records processed since the
* the last setSize, get which returns a record and
* add which adds a record.
*/
private static class LogRecords {
private static final int DEFAULT_SIZE = 20;
private Vector<LogRec> mLogRecVector = new Vector<LogRec>();
private int mMaxSize = DEFAULT_SIZE;
private int mOldestIndex = 0;
private int mCount = 0;
private boolean mLogOnlyTransitions = false;
/**
* private constructor use add
*/
private LogRecords() {
}
/**
* Set size of messages to maintain and clears all current records.
*
* @param maxSize number of records to maintain at anyone time.
*/
synchronized void setSize(int maxSize) {
mMaxSize = maxSize;
mCount = 0;
mLogRecVector.clear();
}
synchronized void setLogOnlyTransitions(boolean enable) {
mLogOnlyTransitions = enable;
}
synchronized boolean logOnlyTransitions() {
return mLogOnlyTransitions;
}
/**
* @return the number of recent records.
*/
synchronized int size() {
return mLogRecVector.size();
}
/**
* @return the total number of records processed since size was set.
*/
synchronized int count() {
return mCount;
}
/**
* Clear the list of records.
*/
synchronized void cleanup() {
mLogRecVector.clear();
}
/**
* @return the information on a particular record. 0 is the oldest
* record and size()-1 is the newest record. If the index is to
* large null is returned.
*/
synchronized LogRec get(int index) {
int nextIndex = mOldestIndex + index;
if (nextIndex >= mMaxSize) {
nextIndex -= mMaxSize;
}
if (nextIndex >= size()) {
return null;
} else {
return mLogRecVector.get(nextIndex);
}
}
/**
* Add a processed message.
*
* @param msg
* @param messageInfo to be stored
* @param state that handled the message
* @param orgState is the first state the received the message but
* did not processes the message.
* @param transToState is the state that was transitioned to after the message was
* processed.
*
*/
synchronized void add(StateMachine sm, Message msg, String messageInfo, IState state,
IState orgState, IState transToState) {
mCount += 1;
if (mLogRecVector.size() < mMaxSize) {
mLogRecVector.add(new LogRec(sm, msg, messageInfo, state, orgState, transToState));
} else {
LogRec pmi = mLogRecVector.get(mOldestIndex);
mOldestIndex += 1;
if (mOldestIndex >= mMaxSize) {
mOldestIndex = 0;
}
pmi.update(sm, msg, messageInfo, state, orgState, transToState);
}
}
}
private static class SmHandler extends Handler {
/** true if StateMachine has quit */
private boolean mHasQuit = false;
/** The debug flag */
private boolean mDbg = false;
/** The SmHandler object, identifies that message is internal */
private static final Object mSmHandlerObj = new Object();
/** The current message */
private Message mMsg;
/** A list of log records including messages this state machine has processed */
private LogRecords mLogRecords = new LogRecords();
/** true if construction of the state machine has not been completed */
private boolean mIsConstructionCompleted;
/** Stack used to manage the current hierarchy of states */
private StateInfo mStateStack[];
/** Top of mStateStack */
private int mStateStackTopIndex = -1;
/** A temporary stack used to manage the state stack */
private StateInfo mTempStateStack[];
/** The top of the mTempStateStack */
private int mTempStateStackCount;
/** State used when state machine is halted */
private HaltingState mHaltingState = new HaltingState();
/** State used when state machine is quitting */
private QuittingState mQuittingState = new QuittingState();
/** Reference to the StateMachine */
private StateMachine mSm;
/**
* Information about a state.
* Used to maintain the hierarchy.
*/
private class StateInfo {
/** The state */
State state;
/** The parent of this state, null if there is no parent */
StateInfo parentStateInfo;
/** True when the state has been entered and on the stack */
boolean active;
/**
* Convert StateInfo to string
*/
@Override
public String toString() {
return "state=" + state.getName() + ",active=" + active + ",parent="
+ ((parentStateInfo == null) ? "null" : parentStateInfo.state.getName());
}
}
/** The map of all of the states in the state machine */
private HashMap<State, StateInfo> mStateInfo = new HashMap<State, StateInfo>();
/** The initial state that will process the first message */
private State mInitialState;
/** The destination state when transitionTo has been invoked */
private State mDestState;
/** The list of deferred messages */
private ArrayList<Message> mDeferredMessages = new ArrayList<Message>();
/**
* State entered when transitionToHaltingState is called.
*/
private class HaltingState extends State {
@Override
public boolean processMessage(Message msg) {
mSm.haltedProcessMessage(msg);
return true;
}
}
/**
* State entered when a valid quit message is handled.
*/
private class QuittingState extends State {
@Override
public boolean processMessage(Message msg) {
return NOT_HANDLED;
}
}
/**
* Handle messages sent to the state machine by calling
* the current state's processMessage. It also handles
* the enter/exit calls and placing any deferred messages
* back onto the queue when transitioning to a new state.
*/
@Override
public final void handleMessage(Message msg) {
if (!mHasQuit) {
if (mDbg) mSm.log("handleMessage: E msg.what=" + msg.what);
/** Save the current message */
mMsg = msg;
/** State that processed the message */
State msgProcessedState = null;
if (mIsConstructionCompleted) {
/** Normal path */
msgProcessedState = processMsg(msg);
} else if (!mIsConstructionCompleted && (mMsg.what == SM_INIT_CMD)
&& (mMsg.obj == mSmHandlerObj)) {
/** Initial one time path. */
mIsConstructionCompleted = true;
invokeEnterMethods(0);
} else {
throw new RuntimeException("StateMachine.handleMessage: "
+ "The start method not called, received msg: " + msg);
}
performTransitions(msgProcessedState, msg);
// We need to check if mSm == null here as we could be quitting.
if (mDbg && mSm != null) mSm.log("handleMessage: X");
}
}
/**
* Do any transitions
* @param msgProcessedState is the state that processed the message
*/
private void performTransitions(State msgProcessedState, Message msg) {
/**
* If transitionTo has been called, exit and then enter
* the appropriate states. We loop on this to allow
* enter and exit methods to use transitionTo.
*/
State orgState = mStateStack[mStateStackTopIndex].state;
/**
* Record whether message needs to be logged before we transition and
* and we won't log special messages SM_INIT_CMD or SM_QUIT_CMD which
* always set msg.obj to the handler.
*/
boolean recordLogMsg = mSm.recordLogRec(mMsg) && (msg.obj != mSmHandlerObj);
if (mLogRecords.logOnlyTransitions()) {
/** Record only if there is a transition */
if (mDestState != null) {
mLogRecords.add(mSm, mMsg, mSm.getLogRecString(mMsg), msgProcessedState,
orgState, mDestState);
}
} else if (recordLogMsg) {
/** Record message */
mLogRecords.add(mSm, mMsg, mSm.getLogRecString(mMsg), msgProcessedState, orgState,
mDestState);
}
State destState = mDestState;
if (destState != null) {
/**
* Process the transitions including transitions in the enter/exit methods
*/
while (true) {
if (mDbg) mSm.log("handleMessage: new destination call exit/enter");
/**
* Determine the states to exit and enter and return the
* common ancestor state of the enter/exit states. Then
* invoke the exit methods then the enter methods.
*/
StateInfo commonStateInfo = setupTempStateStackWithStatesToEnter(destState);
invokeExitMethods(commonStateInfo);
int stateStackEnteringIndex = moveTempStateStackToStateStack();
invokeEnterMethods(stateStackEnteringIndex);
/**
* Since we have transitioned to a new state we need to have
* any deferred messages moved to the front of the message queue
* so they will be processed before any other messages in the
* message queue.
*/
moveDeferredMessageAtFrontOfQueue();
if (destState != mDestState) {
// A new mDestState so continue looping
destState = mDestState;
} else {
// No change in mDestState so we're done
break;
}
}
mDestState = null;
}
/**
* After processing all transitions check and
* see if the last transition was to quit or halt.
*/
if (destState != null) {
if (destState == mQuittingState) {
/**
* Call onQuitting to let subclasses cleanup.
*/
mSm.onQuitting();
cleanupAfterQuitting();
} else if (destState == mHaltingState) {
/**
* Call onHalting() if we've transitioned to the halting
* state. All subsequent messages will be processed in
* in the halting state which invokes haltedProcessMessage(msg);
*/
mSm.onHalting();
}
}
}
/**
* Cleanup all the static variables and the looper after the SM has been quit.
*/
private final void cleanupAfterQuitting() {
if (mSm.mSmThread != null) {
// If we made the thread then quit looper which stops the thread.
getLooper().quit();
mSm.mSmThread = null;
}
mSm.mSmHandler = null;
mSm = null;
mMsg = null;
mLogRecords.cleanup();
mStateStack = null;
mTempStateStack = null;
mStateInfo.clear();
mInitialState = null;
mDestState = null;
mDeferredMessages.clear();
mHasQuit = true;
}
/**
* Complete the construction of the state machine.
*/
private final void completeConstruction() {
if (mDbg) mSm.log("completeConstruction: E");
/**
* Determine the maximum depth of the state hierarchy
* so we can allocate the state stacks.
*/
int maxDepth = 0;
for (StateInfo si : mStateInfo.values()) {
int depth = 0;
for (StateInfo i = si; i != null; depth++) {
i = i.parentStateInfo;
}
if (maxDepth < depth) {
maxDepth = depth;
}
}
if (mDbg) mSm.log("completeConstruction: maxDepth=" + maxDepth);
mStateStack = new StateInfo[maxDepth];
mTempStateStack = new StateInfo[maxDepth];
setupInitialStateStack();
/** Sending SM_INIT_CMD message to invoke enter methods asynchronously */
sendMessageAtFrontOfQueue(obtainMessage(SM_INIT_CMD, mSmHandlerObj));
if (mDbg) mSm.log("completeConstruction: X");
}
/**
* Process the message. If the current state doesn't handle
* it, call the states parent and so on. If it is never handled then
* call the state machines unhandledMessage method.
* @return the state that processed the message
*/
private final State processMsg(Message msg) {
StateInfo curStateInfo = mStateStack[mStateStackTopIndex];
if (mDbg) {
mSm.log("processMsg: " + curStateInfo.state.getName());
}
if (isQuit(msg)) {
transitionTo(mQuittingState);
} else {
while (!curStateInfo.state.processMessage(msg)) {
/**
* Not processed
*/
curStateInfo = curStateInfo.parentStateInfo;
if (curStateInfo == null) {
/**
* No parents left so it's not handled
*/
mSm.unhandledMessage(msg);
break;
}
if (mDbg) {
mSm.log("processMsg: " + curStateInfo.state.getName());
}
}
}
return (curStateInfo != null) ? curStateInfo.state : null;
}
/**
* Call the exit method for each state from the top of stack
* up to the common ancestor state.
*/
private final void invokeExitMethods(StateInfo commonStateInfo) {
while ((mStateStackTopIndex >= 0)
&& (mStateStack[mStateStackTopIndex] != commonStateInfo)) {
State curState = mStateStack[mStateStackTopIndex].state;
if (mDbg) mSm.log("invokeExitMethods: " + curState.getName());
curState.exit();
mStateStack[mStateStackTopIndex].active = false;
mStateStackTopIndex -= 1;
}
}
/**
* Invoke the enter method starting at the entering index to top of state stack
*/
private final void invokeEnterMethods(int stateStackEnteringIndex) {
for (int i = stateStackEnteringIndex; i <= mStateStackTopIndex; i++) {
if (mDbg) mSm.log("invokeEnterMethods: " + mStateStack[i].state.getName());
mStateStack[i].state.enter();
mStateStack[i].active = true;
}
}
/**
* Move the deferred message to the front of the message queue.
*/
private final void moveDeferredMessageAtFrontOfQueue() {
/**
* The oldest messages on the deferred list must be at
* the front of the queue so start at the back, which
* as the most resent message and end with the oldest
* messages at the front of the queue.
*/
for (int i = mDeferredMessages.size() - 1; i >= 0; i--) {
Message curMsg = mDeferredMessages.get(i);
if (mDbg) mSm.log("moveDeferredMessageAtFrontOfQueue; what=" + curMsg.what);
sendMessageAtFrontOfQueue(curMsg);
}
mDeferredMessages.clear();
}
/**
* Move the contents of the temporary stack to the state stack
* reversing the order of the items on the temporary stack as
* they are moved.
*
* @return index into mStateStack where entering needs to start
*/
private final int moveTempStateStackToStateStack() {
int startingIndex = mStateStackTopIndex + 1;
int i = mTempStateStackCount - 1;
int j = startingIndex;
while (i >= 0) {
if (mDbg) mSm.log("moveTempStackToStateStack: i=" + i + ",j=" + j);
mStateStack[j] = mTempStateStack[i];
j += 1;
i -= 1;
}
mStateStackTopIndex = j - 1;
if (mDbg) {
mSm.log("moveTempStackToStateStack: X mStateStackTop=" + mStateStackTopIndex
+ ",startingIndex=" + startingIndex + ",Top="
+ mStateStack[mStateStackTopIndex].state.getName());
}
return startingIndex;
}
/**
* Setup the mTempStateStack with the states we are going to enter.
*
* This is found by searching up the destState's ancestors for a
* state that is already active i.e. StateInfo.active == true.
* The destStae and all of its inactive parents will be on the
* TempStateStack as the list of states to enter.
*
* @return StateInfo of the common ancestor for the destState and
* current state or null if there is no common parent.
*/
private final StateInfo setupTempStateStackWithStatesToEnter(State destState) {
/**
* Search up the parent list of the destination state for an active
* state. Use a do while() loop as the destState must always be entered
* even if it is active. This can happen if we are exiting/entering
* the current state.
*/
mTempStateStackCount = 0;
StateInfo curStateInfo = mStateInfo.get(destState);
do {
mTempStateStack[mTempStateStackCount++] = curStateInfo;
curStateInfo = curStateInfo.parentStateInfo;
} while ((curStateInfo != null) && !curStateInfo.active);
if (mDbg) {
mSm.log("setupTempStateStackWithStatesToEnter: X mTempStateStackCount="
+ mTempStateStackCount + ",curStateInfo: " + curStateInfo);
}
return curStateInfo;
}
/**
* Initialize StateStack to mInitialState.
*/
private final void setupInitialStateStack() {
if (mDbg) {
mSm.log("setupInitialStateStack: E mInitialState=" + mInitialState.getName());
}
StateInfo curStateInfo = mStateInfo.get(mInitialState);
for (mTempStateStackCount = 0; curStateInfo != null; mTempStateStackCount++) {
mTempStateStack[mTempStateStackCount] = curStateInfo;
curStateInfo = curStateInfo.parentStateInfo;
}
// Empty the StateStack
mStateStackTopIndex = -1;
moveTempStateStackToStateStack();
}
/**
* @return current message
*/
private final Message getCurrentMessage() {
return mMsg;
}
/**
* @return current state
*/
private final IState getCurrentState() {
return mStateStack[mStateStackTopIndex].state;
}
/**
* Add a new state to the state machine. Bottom up addition
* of states is allowed but the same state may only exist
* in one hierarchy.
*
* @param state the state to add
* @param parent the parent of state
* @return stateInfo for this state
*/
private final StateInfo addState(State state, State parent) {
if (mDbg) {
mSm.log("addStateInternal: E state=" + state.getName() + ",parent="
+ ((parent == null) ? "" : parent.getName()));
}
StateInfo parentStateInfo = null;
if (parent != null) {
parentStateInfo = mStateInfo.get(parent);
if (parentStateInfo == null) {
// Recursively add our parent as it's not been added yet.
parentStateInfo = addState(parent, null);
}
}
StateInfo stateInfo = mStateInfo.get(state);
if (stateInfo == null) {
stateInfo = new StateInfo();
mStateInfo.put(state, stateInfo);
}
// Validate that we aren't adding the same state in two different hierarchies.
if ((stateInfo.parentStateInfo != null)
&& (stateInfo.parentStateInfo != parentStateInfo)) {
throw new RuntimeException("state already added");
}
stateInfo.state = state;
stateInfo.parentStateInfo = parentStateInfo;
stateInfo.active = false;
if (mDbg) mSm.log("addStateInternal: X stateInfo: " + stateInfo);
return stateInfo;
}
/**
* Constructor
*
* @param looper for dispatching messages
* @param sm the hierarchical state machine
*/
private SmHandler(Looper looper, StateMachine sm) {
super(looper);
mSm = sm;
addState(mHaltingState, null);
addState(mQuittingState, null);
}
/** @see StateMachine#setInitialState(State) */
private final void setInitialState(State initialState) {
if (mDbg) mSm.log("setInitialState: initialState=" + initialState.getName());
mInitialState = initialState;
}
/** @see StateMachine#transitionTo(IState) */
private final void transitionTo(IState destState) {
mDestState = (State) destState;
if (mDbg) mSm.log("transitionTo: destState=" + mDestState.getName());
}
/** @see StateMachine#deferMessage(Message) */
private final void deferMessage(Message msg) {
if (mDbg) mSm.log("deferMessage: msg=" + msg.what);
/* Copy the "msg" to "newMsg" as "msg" will be recycled */
Message newMsg = obtainMessage();
newMsg.copyFrom(msg);
mDeferredMessages.add(newMsg);
}
/** @see StateMachine#quit() */
private final void quit() {
if (mDbg) mSm.log("quit:");
sendMessage(obtainMessage(SM_QUIT_CMD, mSmHandlerObj));
}
/** @see StateMachine#quitNow() */
private final void quitNow() {
if (mDbg) mSm.log("quitNow:");
sendMessageAtFrontOfQueue(obtainMessage(SM_QUIT_CMD, mSmHandlerObj));
}
/** Validate that the message was sent by quit or quitNow. */
private final boolean isQuit(Message msg) {
return (msg.what == SM_QUIT_CMD) && (msg.obj == mSmHandlerObj);
}
/** @see StateMachine#isDbg() */
private final boolean isDbg() {
return mDbg;
}
/** @see StateMachine#setDbg(boolean) */
private final void setDbg(boolean dbg) {
mDbg = dbg;
}
}
private SmHandler mSmHandler;
private HandlerThread mSmThread;
/**
* Initialize.
*
* @param looper for this state machine
* @param name of the state machine
*/
private void initStateMachine(String name, Looper looper) {
mName = name;
mSmHandler = new SmHandler(looper, this);
}
/**
* Constructor creates a StateMachine with its own thread.
*
* @param name of the state machine
*/
protected StateMachine(String name) {
mSmThread = new HandlerThread(name);
mSmThread.start();
Looper looper = mSmThread.getLooper();
initStateMachine(name, looper);
}
/**
* Constructor creates a StateMachine using the looper.
*
* @param name of the state machine
*/
protected StateMachine(String name, Looper looper) {
initStateMachine(name, looper);
}
/**
* Constructor creates a StateMachine using the handler.
*
* @param name of the state machine
*/
protected StateMachine(String name, Handler handler) {
initStateMachine(name, handler.getLooper());
}
/**
* Add a new state to the state machine
* @param state the state to add
* @param parent the parent of state
*/
protected final void addState(State state, State parent) {
mSmHandler.addState(state, parent);
}
/**
* Add a new state to the state machine, parent will be null
* @param state to add
*/
protected final void addState(State state) {
mSmHandler.addState(state, null);
}
/**
* Set the initial state. This must be invoked before
* and messages are sent to the state machine.
*
* @param initialState is the state which will receive the first message.
*/
protected final void setInitialState(State initialState) {
mSmHandler.setInitialState(initialState);
}
/**
* @return current message
*/
protected final Message getCurrentMessage() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return null;
return smh.getCurrentMessage();
}
/**
* @return current state
*/
protected final IState getCurrentState() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return null;
return smh.getCurrentState();
}
/**
* transition to destination state. Upon returning
* from processMessage the current state's exit will
* be executed and upon the next message arriving
* destState.enter will be invoked.
*
* this function can also be called inside the enter function of the
* previous transition target, but the behavior is undefined when it is
* called mid-way through a previous transition (for example, calling this
* in the enter() routine of a intermediate node when the current transition
* target is one of the nodes descendants).
*
* @param destState will be the state that receives the next message.
*/
protected final void transitionTo(IState destState) {
mSmHandler.transitionTo(destState);
}
/**
* transition to halt state. Upon returning
* from processMessage we will exit all current
* states, execute the onHalting() method and then
* for all subsequent messages haltedProcessMessage
* will be called.
*/
protected final void transitionToHaltingState() {
mSmHandler.transitionTo(mSmHandler.mHaltingState);
}
/**
* Defer this message until next state transition.
* Upon transitioning all deferred messages will be
* placed on the queue and reprocessed in the original
* order. (i.e. The next state the oldest messages will
* be processed first)
*
* @param msg is deferred until the next transition.
*/
protected final void deferMessage(Message msg) {
mSmHandler.deferMessage(msg);
}
/**
* Called when message wasn't handled
*
* @param msg that couldn't be handled.
*/
protected void unhandledMessage(Message msg) {
if (mSmHandler.mDbg) loge(" - unhandledMessage: msg.what=" + msg.what);
}
/**
* Called for any message that is received after
* transitionToHalting is called.
*/
protected void haltedProcessMessage(Message msg) {
}
/**
* This will be called once after handling a message that called
* transitionToHalting. All subsequent messages will invoke
* {@link StateMachine#haltedProcessMessage(Message)}
*/
protected void onHalting() {
}
/**
* This will be called once after a quit message that was NOT handled by
* the derived StateMachine. The StateMachine will stop and any subsequent messages will be
* ignored. In addition, if this StateMachine created the thread, the thread will
* be stopped after this method returns.
*/
protected void onQuitting() {
}
/**
* @return the name
*/
public final String getName() {
return mName;
}
/**
* Set number of log records to maintain and clears all current records.
*
* @param maxSize number of messages to maintain at anyone time.
*/
public final void setLogRecSize(int maxSize) {
mSmHandler.mLogRecords.setSize(maxSize);
}
/**
* Set to log only messages that cause a state transition
*
* @param enable {@code true} to enable, {@code false} to disable
*/
public final void setLogOnlyTransitions(boolean enable) {
mSmHandler.mLogRecords.setLogOnlyTransitions(enable);
}
/**
* @return number of log records
*/
public final int getLogRecSize() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return 0;
return smh.mLogRecords.size();
}
/**
* @return the total number of records processed
*/
public final int getLogRecCount() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return 0;
return smh.mLogRecords.count();
}
/**
* @return a log record, or null if index is out of range
*/
public final LogRec getLogRec(int index) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return null;
return smh.mLogRecords.get(index);
}
/**
* @return a copy of LogRecs as a collection
*/
public final Collection<LogRec> copyLogRecs() {
Vector<LogRec> vlr = new Vector<LogRec>();
SmHandler smh = mSmHandler;
if (smh != null) {
for (LogRec lr : smh.mLogRecords.mLogRecVector) {
vlr.add(lr);
}
}
return vlr;
}
/**
* Add the string to LogRecords.
*
* @param string
*/
protected void addLogRec(String string) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.mLogRecords.add(this, smh.getCurrentMessage(), string, smh.getCurrentState(),
smh.mStateStack[smh.mStateStackTopIndex].state, smh.mDestState);
}
/**
* @return true if msg should be saved in the log, default is true.
*/
protected boolean recordLogRec(Message msg) {
return true;
}
/**
* Return a string to be logged by LogRec, default
* is an empty string. Override if additional information is desired.
*
* @param msg that was processed
* @return information to be logged as a String
*/
protected String getLogRecString(Message msg) {
return "";
}
/**
* @return the string for msg.what
*/
protected String getWhatToString(int what) {
return null;
}
/**
* @return Handler, maybe null if state machine has quit.
*/
public final Handler getHandler() {
return mSmHandler;
}
/**
* Get a message and set Message.target state machine handler.
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @return A Message object from the global pool
*/
public final Message obtainMessage() {
return Message.obtain(mSmHandler);
}
/**
* Get a message and set Message.target state machine handler, what.
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @param what is the assigned to Message.what.
* @return A Message object from the global pool
*/
public final Message obtainMessage(int what) {
return Message.obtain(mSmHandler, what);
}
/**
* Get a message and set Message.target state machine handler,
* what and obj.
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @param what is the assigned to Message.what.
* @param obj is assigned to Message.obj.
* @return A Message object from the global pool
*/
public final Message obtainMessage(int what, Object obj) {
return Message.obtain(mSmHandler, what, obj);
}
/**
* Get a message and set Message.target state machine handler,
* what, arg1 and arg2
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @param what is assigned to Message.what
* @param arg1 is assigned to Message.arg1
* @return A Message object from the global pool
*/
public final Message obtainMessage(int what, int arg1) {
// use this obtain so we don't match the obtain(h, what, Object) method
return Message.obtain(mSmHandler, what, arg1, 0);
}
/**
* Get a message and set Message.target state machine handler,
* what, arg1 and arg2
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @param what is assigned to Message.what
* @param arg1 is assigned to Message.arg1
* @param arg2 is assigned to Message.arg2
* @return A Message object from the global pool
*/
public final Message obtainMessage(int what, int arg1, int arg2) {
return Message.obtain(mSmHandler, what, arg1, arg2);
}
/**
* Get a message and set Message.target state machine handler,
* what, arg1, arg2 and obj
*
* Note: The handler can be null if the state machine has quit,
* which means target will be null and may cause a AndroidRuntimeException
* in MessageQueue#enqueMessage if sent directly or if sent using
* StateMachine#sendMessage the message will just be ignored.
*
* @param what is assigned to Message.what
* @param arg1 is assigned to Message.arg1
* @param arg2 is assigned to Message.arg2
* @param obj is assigned to Message.obj
* @return A Message object from the global pool
*/
public final Message obtainMessage(int what, int arg1, int arg2, Object obj) {
return Message.obtain(mSmHandler, what, arg1, arg2, obj);
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(int what) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(obtainMessage(what));
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(int what, Object obj) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(obtainMessage(what, obj));
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(int what, int arg1) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(obtainMessage(what, arg1));
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(int what, int arg1, int arg2) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(obtainMessage(what, arg1, arg2));
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(int what, int arg1, int arg2, Object obj) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(obtainMessage(what, arg1, arg2, obj));
}
/**
* Enqueue a message to this state machine.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessage(Message msg) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessage(msg);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(int what, long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(obtainMessage(what), delayMillis);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(int what, Object obj, long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(obtainMessage(what, obj), delayMillis);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(int what, int arg1, long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(obtainMessage(what, arg1), delayMillis);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(int what, int arg1, int arg2, long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(obtainMessage(what, arg1, arg2), delayMillis);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(int what, int arg1, int arg2, Object obj,
long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(obtainMessage(what, arg1, arg2, obj), delayMillis);
}
/**
* Enqueue a message to this state machine after a delay.
*
* Message is ignored if state machine has quit.
*/
public final void sendMessageDelayed(Message msg, long delayMillis) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageDelayed(msg, delayMillis);
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(int what) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(obtainMessage(what));
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(int what, Object obj) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(obtainMessage(what, obj));
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(int what, int arg1) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(obtainMessage(what, arg1));
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(int what, int arg1, int arg2) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(obtainMessage(what, arg1, arg2));
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(int what, int arg1, int arg2, Object obj) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(obtainMessage(what, arg1, arg2, obj));
}
/**
* Enqueue a message to the front of the queue for this state machine.
* Protected, may only be called by instances of StateMachine.
*
* Message is ignored if state machine has quit.
*/
protected final void sendMessageAtFrontOfQueue(Message msg) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.sendMessageAtFrontOfQueue(msg);
}
/**
* Removes a message from the message queue.
* Protected, may only be called by instances of StateMachine.
*/
protected final void removeMessages(int what) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.removeMessages(what);
}
/**
* Validate that the message was sent by
* {@link StateMachine#quit} or {@link StateMachine#quitNow}.
* */
protected final boolean isQuit(Message msg) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return msg.what == SM_QUIT_CMD;
return smh.isQuit(msg);
}
/**
* Quit the state machine after all currently queued up messages are processed.
*/
protected final void quit() {
// mSmHandler can be null if the state machine is already stopped.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.quit();
}
/**
* Quit the state machine immediately all currently queued messages will be discarded.
*/
protected final void quitNow() {
// mSmHandler can be null if the state machine is already stopped.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.quitNow();
}
/**
* @return if debugging is enabled
*/
public boolean isDbg() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return false;
return smh.isDbg();
}
/**
* Set debug enable/disabled.
*
* @param dbg is true to enable debugging.
*/
public void setDbg(boolean dbg) {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
smh.setDbg(dbg);
}
/**
* Start the state machine.
*/
public void start() {
// mSmHandler can be null if the state machine has quit.
SmHandler smh = mSmHandler;
if (smh == null) return;
/** Send the complete construction message */
smh.completeConstruction();
}
/**
* Dump the current state.
*
* @param fd
* @param pw
* @param args
*/
public void dump(FileDescriptor fd, PrintWriter pw, String[] args) {
pw.println(getName() + ":");
pw.println(" total records=" + getLogRecCount());
for (int i = 0; i < getLogRecSize(); i++) {
pw.printf(" rec[%d]: %s\n", i, getLogRec(i).toString());
pw.flush();
}
pw.println("curState=" + getCurrentState().getName());
}
/**
* Log with debug and add to the LogRecords.
*
* @param s is string log
*/
protected void logAndAddLogRec(String s) {
addLogRec(s);
log(s);
}
/**
* Log with debug
*
* @param s is string log
*/
protected void log(String s) {
Log.d(mName, s);
}
/**
* Log with debug attribute
*
* @param s is string log
*/
protected void logd(String s) {
Log.d(mName, s);
}
/**
* Log with verbose attribute
*
* @param s is string log
*/
protected void logv(String s) {
Log.v(mName, s);
}
/**
* Log with info attribute
*
* @param s is string log
*/
protected void logi(String s) {
Log.i(mName, s);
}
/**
* Log with warning attribute
*
* @param s is string log
*/
protected void logw(String s) {
Log.w(mName, s);
}
/**
* Log with error attribute
*
* @param s is string log
*/
protected void loge(String s) {
Log.e(mName, s);
}
/**
* Log with error attribute
*
* @param s is string log
* @param e is a Throwable which logs additional information.
*/
protected void loge(String s, Throwable e) {
Log.e(mName, s, e);
}
}<|fim▁end|> | * @param msg |
<|file_name|>StyleResolverState.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1999 Lars Knoll ([email protected])
* Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "sky/engine/core/css/resolver/StyleResolverState.h"
#include "sky/engine/core/animation/css/CSSAnimations.h"
#include "sky/engine/core/dom/Node.h"
#include "sky/engine/core/dom/NodeRenderStyle.h"
#include "sky/engine/core/frame/FrameHost.h"
namespace blink {
StyleResolverState::StyleResolverState(Document& document, Element* element, RenderStyle* parentStyle)
: m_elementContext(element ? ElementResolveContext(*element) : ElementResolveContext(document))
, m_document(document)
, m_style(nullptr)
, m_cssToLengthConversionData(0, document.renderView())
, m_parentStyle(parentStyle)
, m_lineHeightValue(nullptr)
, m_styleMap(*this, m_elementStyleResources)
{<|fim▁hole|> if (!parentStyle && m_elementContext.parentNode())
m_parentStyle = m_elementContext.parentNode()->renderStyle();
ASSERT(document.isActive());
m_elementStyleResources.setDeviceScaleFactor(document.frameHost()->deviceScaleFactor());
}
StyleResolverState::~StyleResolverState()
{
}
void StyleResolverState::setAnimationUpdate(PassOwnPtr<CSSAnimationUpdate> update)
{
m_animationUpdate = update;
}
PassOwnPtr<CSSAnimationUpdate> StyleResolverState::takeAnimationUpdate()
{
return m_animationUpdate.release();
}
} // namespace blink<|fim▁end|> | |
<|file_name|>IncTemplate.js<|end_file_name|><|fim▁begin|>'use strict';
var util = require('util');
var GtReq = require('../GtReq');
var BaseTemplate = require('./BaseTemplate');
function IncTemplate(options) {
BaseTemplate.call(this, options);
options = util._extend({
transmissionContent: '',
incAppId: ''
}, options);
util._extend(this, options);
}
util.inherits(IncTemplate, BaseTemplate);
IncTemplate.prototype.getActionChain = function() {
var actionChain1 = new GtReq.ActionChain({
actionId: 1,
type: GtReq.ActionChain.Type.Goto,
next: 10030
});
var appStartUp = new GtReq.AppStartUp({
android: '',
symbia: '',
ios: ''
});
// 启动app
// Start the app
var actionChain2 = new GtReq.ActionChain({
actionId: 10030,
type: GtReq.ActionChain.Type.startapp,
appid: this.incAppId,
autostart: 1 === this.transmissionType,
appstartupid: appStartUp,
failedAction: 100,
next: 100
});
// 结束
// Finish
var actionChain3 = new GtReq.ActionChain({
actionId: 100,
type: GtReq.ActionChain.Type.eoa
});
var actionChains = [actionChain1, actionChain2, actionChain3];
return actionChains;
};
IncTemplate.prototype.getTransmissionContent = function() {
return this.transmissionContent;
};
IncTemplate.prototype.getPushType = function() {
return 'TransmissionMsg';
};
/**
<|fim▁hole|> * Set direct display message type 1:Start the app once gets notification. 2:Not to start the app once gets notification
* @param transmissionType
*/
IncTemplate.prototype.setTransmissionType = function(transmissionType) {
this.transmissionType = transmissionType;
return this;
};
IncTemplate.prototype.setTransmissionContent = function(transmissionContent) {
this.transmissionContent = transmissionContent;
return this;
};
IncTemplate.prototype.setIncAppId = function(incAppId) {
this.incAppId = incAppId;
return this;
};
module.exports = IncTemplate;<|fim▁end|> | * 设置 透传消息类型 1:收到通知立即启动应用 2:收到通知不启动应用
|
<|file_name|>http_cache.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(missing_docs)]
//! A memory cache implementing the logic specified in http://tools.ietf.org/html/rfc7234
//! and <http://tools.ietf.org/html/rfc7232>.
use fetch::methods::DoneChannel;
use hyper::header;
use hyper::header::ContentType;
use hyper::header::Headers;
use hyper::method::Method;
use hyper::status::StatusCode;
use hyper_serde::Serde;
use net_traits::{Metadata, FetchMetadata};
use net_traits::request::Request;
use net_traits::response::{HttpsState, Response, ResponseBody};
use servo_config::prefs::PREFS;
use servo_url::ServoUrl;
use std::collections::HashMap;
use std::str;
use std::sync::{Arc, Mutex};
use time;
use time::{Duration, Tm};
/// The key used to differentiate requests in the cache.
#[derive(Clone, Eq, Hash, PartialEq)]
pub struct CacheKey {
url: ServoUrl
}
impl CacheKey {
fn new(request: Request) -> CacheKey {
CacheKey {
url: request.current_url().clone()
}
}
fn from_servo_url(servo_url: &ServoUrl) -> CacheKey {
CacheKey {
url: servo_url.clone()
}
}
/// Retrieve the URL associated with this key
pub fn url(&self) -> ServoUrl {
self.url.clone()
}
}
/// A complete cached resource.
#[derive(Clone)]
struct CachedResource {
metadata: CachedMetadata,
request_headers: Arc<Mutex<Headers>>,
body: Arc<Mutex<ResponseBody>>,
location_url: Option<Result<ServoUrl, String>>,
https_state: HttpsState,
status: Option<StatusCode>,
raw_status: Option<(u16, Vec<u8>)>,
url_list: Vec<ServoUrl>,
expires: Duration,
last_validated: Tm
}
/// Metadata about a loaded resource, such as is obtained from HTTP headers.
#[derive(Clone)]
struct CachedMetadata {
/// Final URL after redirects.
pub final_url: ServoUrl,
/// MIME type / subtype.
pub content_type: Option<Serde<ContentType>>,
/// Character set.
pub charset: Option<String>,
/// Headers
pub headers: Arc<Mutex<Headers>>,
/// HTTP Status
pub status: Option<(u16, Vec<u8>)>
}
/// Wrapper around a cached response, including information on re-validation needs
pub struct CachedResponse {
/// The response constructed from the cached resource
pub response: Response,
/// The revalidation flag for the stored response
pub needs_validation: bool
}
/// A memory cache.
pub struct HttpCache {
/// cached responses.
entries: HashMap<CacheKey, Vec<CachedResource>>,
}
/// Determine if a given response is cacheable based on the initial metadata received.
/// Based on <https://tools.ietf.org/html/rfc7234#section-3>
fn response_is_cacheable(metadata: &Metadata) -> bool {
// TODO: if we determine that this cache should be considered shared:
// 1. check for absence of private response directive <https://tools.ietf.org/html/rfc7234#section-5.2.2.6>
// 2. check for absence of the Authorization header field.
let mut is_cacheable = false;
let headers = metadata.headers.as_ref().unwrap();
if headers.has::<header::Expires>() ||
headers.has::<header::LastModified>() ||
headers.has::<header::ETag>() {
is_cacheable = true;
}
if let Some(&header::CacheControl(ref directive)) = headers.get::<header::CacheControl>() {
for directive in directive.iter() {
match *directive {
header::CacheDirective::NoStore => return false,
header::CacheDirective::Public | header::CacheDirective::SMaxAge(_)
| header::CacheDirective::MaxAge(_) | header::CacheDirective::NoCache => is_cacheable = true,
_ => {},
}
}
}
if let Some(&header::Pragma::NoCache) = headers.get::<header::Pragma>() {
return false;
}
is_cacheable
}
/// Calculating Age
/// <https://tools.ietf.org/html/rfc7234#section-4.2.3>
fn calculate_response_age(response: &Response) -> Duration {
// TODO: follow the spec more closely (Date headers, request/response lag, ...)
if let Some(secs) = response.headers.get_raw("Age") {
let seconds_string = String::from_utf8_lossy(&secs[0]);
if let Ok(secs) = seconds_string.parse::<i64>() {
return Duration::seconds(secs);
}
}
Duration::seconds(0i64)
}
/// Determine the expiry date from relevant headers,
/// or uses a heuristic if none are present.
fn get_response_expiry(response: &Response) -> Duration {
// Calculating Freshness Lifetime <https://tools.ietf.org/html/rfc7234#section-4.2.1>
let age = calculate_response_age(&response);
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
let has_no_cache_directive = directives.iter().any(|directive| {
header::CacheDirective::NoCache == *directive
});
if has_no_cache_directive {
// Requires validation on first use.
return Duration::seconds(0i64);
} else {
for directive in directives {
match *directive {
header::CacheDirective::SMaxAge(secs) | header::CacheDirective::MaxAge(secs) => {
let max_age = Duration::seconds(secs as i64);
if max_age < age {
return Duration::seconds(0i64);
}
return max_age - age;
},
_ => (),
}
}
}
}
if let Some(&header::Expires(header::HttpDate(t))) = response.headers.get::<header::Expires>() {
// store the period of time from now until expiry
let desired = t.to_timespec();
let current = time::now().to_timespec();
if desired > current {
return desired - current;
} else {
return Duration::seconds(0i64);
}
} else {
if let Some(_) = response.headers.get_raw("Expires") {
// Malformed Expires header, shouldn't be used to construct a valid response.
return Duration::seconds(0i64);
}
}
// Calculating Heuristic Freshness
// <https://tools.ietf.org/html/rfc7234#section-4.2.2>
if let Some((ref code, _)) = response.raw_status {
// <https://tools.ietf.org/html/rfc7234#section-5.5.4>
// Since presently we do not generate a Warning header field with a 113 warn-code,
// 24 hours minus response age is the max for heuristic calculation.
let max_heuristic = Duration::hours(24) - age;
let heuristic_freshness = if let Some(&header::LastModified(header::HttpDate(t))) =
// If the response has a Last-Modified header field,
// caches are encouraged to use a heuristic expiration value
// that is no more than some fraction of the interval since that time.
response.headers.get::<header::LastModified>() {
let last_modified = t.to_timespec();
let current = time::now().to_timespec();
// A typical setting of this fraction might be 10%.
let raw_heuristic_calc = (current - last_modified) / 10;
let result = if raw_heuristic_calc < max_heuristic {
raw_heuristic_calc
} else {
max_heuristic
};
result
} else {
max_heuristic
};
match *code {
200 | 203 | 204 | 206 | 300 | 301 | 404 | 405 | 410 | 414 | 501 => {
// Status codes that are cacheable by default <https://tools.ietf.org/html/rfc7231#section-6.1>
return heuristic_freshness
},
_ => {
// Other status codes can only use heuristic freshness if the public cache directive is present.
if let Some(&header::CacheControl(ref directives)) = response.headers.get::<header::CacheControl>() {
let has_public_directive = directives.iter().any(|directive| {
header::CacheDirective::Public == *directive
});
if has_public_directive {
return heuristic_freshness;
}
}
},
}
}
// Requires validation upon first use as default.
Duration::seconds(0i64)
}
/// Request Cache-Control Directives
/// <https://tools.ietf.org/html/rfc7234#section-5.2.1>
fn get_expiry_adjustment_from_request_headers(request: &Request, expires: Duration) -> Duration {
let directive_data = match request.headers.get_raw("cache-control") {
Some(data) => data,
None => return expires,
};
let directives_string = String::from_utf8_lossy(&directive_data[0]);
for directive in directives_string.split(",") {
let mut directive_info = directive.split("=");
match (directive_info.next(), directive_info.next()) {
(Some("max-stale"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
return expires + Duration::seconds(secs);
}
},
(Some("max-age"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
let max_age = Duration::seconds(secs);
if expires > max_age {
return Duration::min_value();
}
return expires - max_age;
}
},
(Some("min-fresh"), Some(sec_str)) => {
if let Ok(secs) = sec_str.parse::<i64>() {
let min_fresh = Duration::seconds(secs);
if expires < min_fresh {
return Duration::min_value();
}
return expires - min_fresh;
}
},
(Some("no-cache"), _) | (Some("no-store"), _) => return Duration::min_value(),
_ => {}
}
}
expires
}
/// Create a CachedResponse from a request and a CachedResource.
fn create_cached_response(request: &Request, cached_resource: &CachedResource, cached_headers: &Headers)
-> CachedResponse {
let mut response = Response::new(cached_resource.metadata.final_url.clone());
response.headers = cached_headers.clone();
response.body = cached_resource.body.clone();
response.location_url = cached_resource.location_url.clone();
response.status = cached_resource.status.clone();
response.raw_status = cached_resource.raw_status.clone();
response.url_list = cached_resource.url_list.clone();
response.https_state = cached_resource.https_state.clone();
response.referrer = request.referrer.to_url().cloned();
response.referrer_policy = request.referrer_policy.clone();
let expires = cached_resource.expires;
let adjusted_expires = get_expiry_adjustment_from_request_headers(request, expires);
let now = Duration::seconds(time::now().to_timespec().sec);
let last_validated = Duration::seconds(cached_resource.last_validated.to_timespec().sec);
let time_since_validated = now - last_validated;
// TODO: take must-revalidate into account <https://tools.ietf.org/html/rfc7234#section-5.2.2.1>
// TODO: if this cache is to be considered shared, take proxy-revalidate into account
// <https://tools.ietf.org/html/rfc7234#section-5.2.2.7>
let has_expired = (adjusted_expires < time_since_validated) ||
(adjusted_expires == time_since_validated);
CachedResponse { response: response, needs_validation: has_expired }
}
/// Create a new resource, based on the bytes requested, and an existing resource,
/// with a status-code of 206.
fn create_resource_with_bytes_from_resource(bytes: &[u8], resource: &CachedResource)
-> CachedResource {
CachedResource {
metadata: resource.metadata.clone(),
request_headers: resource.request_headers.clone(),
body: Arc::new(Mutex::new(ResponseBody::Done(bytes.to_owned()))),
location_url: resource.location_url.clone(),
https_state: resource.https_state.clone(),
status: Some(StatusCode::PartialContent),
raw_status: Some((206, b"Partial Content".to_vec())),
url_list: resource.url_list.clone(),
expires: resource.expires.clone(),
last_validated: resource.last_validated.clone()
}
}
/// Support for range requests <https://tools.ietf.org/html/rfc7233>.
fn handle_range_request(request: &Request, candidates: Vec<&CachedResource>, range_spec: &[header::ByteRangeSpec])
-> Option<CachedResponse> {
let mut complete_cached_resources = candidates.iter().filter(|resource| {
match resource.raw_status {
Some((ref code, _)) => *code == 200,
None => false
}
});
let partial_cached_resources = candidates.iter().filter(|resource| {
match resource.raw_status {
Some((ref code, _)) => *code == 206,
None => false
}
});
match (range_spec.first().unwrap(), complete_cached_resources.next()) {
// TODO: take the full range spec into account.
// If we have a complete resource, take the request range from the body.
// When there isn't a complete resource available, we loop over cached partials,
// and see if any individual partial response can fulfill the current request for a bytes range.
// TODO: combine partials that in combination could satisfy the requested range?
// see <https://tools.ietf.org/html/rfc7233#section-4.3>.
// TODO: add support for complete and partial resources,
// whose body is in the ResponseBody::Receiving state.
(&header::ByteRangeSpec::FromTo(beginning, end), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let e = end as usize + 1;
let requested = body.get(b..e);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers);
return Some(cached_response);
}
}
},
(&header::ByteRangeSpec::FromTo(beginning, end), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)), .. })) => (res_beginning, res_end),
_ => continue,
};
if res_beginning - 1 < beginning && res_end + 1 > end {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let b = beginning as usize - res_beginning as usize;
let e = end as usize - res_beginning as usize + 1;
body.get(b..e)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers);
return Some(cached_response);
}
}
}
},
(&header::ByteRangeSpec::AllFrom(beginning), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let b = beginning as usize;
let requested = body.get(b..);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers);
return Some(cached_response);
}
}
},
(&header::ByteRangeSpec::AllFrom(beginning), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end, total) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)),
instance_length: Some(total) })) => (res_beginning, res_end, total),
_ => continue,
};
if res_beginning < beginning && res_end == total - 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let from_byte = beginning as usize - res_beginning as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers);
return Some(cached_response);
}
}
}
},
(&header::ByteRangeSpec::Last(offset), Some(ref complete_resource)) => {
if let ResponseBody::Done(ref body) = *complete_resource.body.lock().unwrap() {
let from_byte = body.len() - offset as usize;
let requested = body.get(from_byte..);
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(bytes, complete_resource);
let cached_headers = new_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, &new_resource, &*cached_headers);
return Some(cached_response);
}<|fim▁hole|> (&header::ByteRangeSpec::Last(offset), None) => {
for partial_resource in partial_cached_resources {
let headers = partial_resource.metadata.headers.lock().unwrap();
let content_range = headers.get::<header::ContentRange>();
let (res_beginning, res_end, total) = match content_range {
Some(&header::ContentRange(
header::ContentRangeSpec::Bytes {
range: Some((res_beginning, res_end)),
instance_length: Some(total) })) => (res_beginning, res_end, total),
_ => continue,
};
if (total - res_beginning) > (offset - 1 ) && (total - res_end) < offset + 1 {
let resource_body = &*partial_resource.body.lock().unwrap();
let requested = match resource_body {
&ResponseBody::Done(ref body) => {
let from_byte = body.len() - offset as usize;
body.get(from_byte..)
},
_ => continue,
};
if let Some(bytes) = requested {
let new_resource = create_resource_with_bytes_from_resource(&bytes, partial_resource);
let cached_response = create_cached_response(request, &new_resource, &*headers);
return Some(cached_response);
}
}
}
}
}
None
}
impl HttpCache {
/// Create a new memory cache instance.
pub fn new() -> HttpCache {
HttpCache {
entries: HashMap::new()
}
}
/// Constructing Responses from Caches.
/// <https://tools.ietf.org/html/rfc7234#section-4>
pub fn construct_response(&self, request: &Request) -> Option<CachedResponse> {
// TODO: generate warning headers as appropriate <https://tools.ietf.org/html/rfc7234#section-5.5>
if request.method != Method::Get {
// Only Get requests are cached, avoid a url based match for others.
return None;
}
let entry_key = CacheKey::new(request.clone());
let resources = self.entries.get(&entry_key)?.clone();
let mut candidates = vec![];
for cached_resource in resources.iter() {
let mut can_be_constructed = true;
let cached_headers = cached_resource.metadata.headers.lock().unwrap();
let original_request_headers = cached_resource.request_headers.lock().unwrap();
if let Some(vary_data) = cached_headers.get_raw("Vary") {
// Calculating Secondary Keys with Vary <https://tools.ietf.org/html/rfc7234#section-4.1>
let vary_data_string = String::from_utf8_lossy(&vary_data[0]);
let vary_values = vary_data_string.split(",").map(|val| val.trim());
for vary_val in vary_values {
// For every header name found in the Vary header of the stored response.
if vary_val == "*" {
// A Vary header field-value of "*" always fails to match.
can_be_constructed = false;
break;
}
match request.headers.get_raw(vary_val) {
Some(header_data) => {
// If the header is present in the request.
let request_header_data_string = String::from_utf8_lossy(&header_data[0]);
if let Some(original_header_data) = original_request_headers.get_raw(vary_val) {
// Check that the value of the nominated header field,
// in the original request, matches the value in the current request.
let original_request_header_data_string =
String::from_utf8_lossy(&original_header_data[0]);
if original_request_header_data_string != request_header_data_string {
can_be_constructed = false;
break;
}
}
},
None => {
// If a header field is absent from a request,
// it can only match a stored response if those headers,
// were also absent in the original request.
can_be_constructed = original_request_headers.get_raw(vary_val).is_none();
},
}
if !can_be_constructed {
break;
}
}
}
if can_be_constructed {
candidates.push(cached_resource);
}
}
// Support for range requests
if let Some(&header::Range::Bytes(ref range_spec)) = request.headers.get::<header::Range>() {
return handle_range_request(request, candidates, &range_spec);
} else {
// Not a Range request.
if let Some(ref cached_resource) = candidates.first() {
// Returning the first response that can be constructed
// TODO: select the most appropriate one, using a known mechanism from a selecting header field,
// or using the Date header to return the most recent one.
let cached_headers = cached_resource.metadata.headers.lock().unwrap();
let cached_response = create_cached_response(request, cached_resource, &*cached_headers);
return Some(cached_response);
}
}
None
}
/// Freshening Stored Responses upon Validation.
/// <https://tools.ietf.org/html/rfc7234#section-4.3.4>
pub fn refresh(&mut self, request: &Request, response: Response, done_chan: &mut DoneChannel) -> Option<Response> {
assert!(response.status == Some(StatusCode::NotModified));
let entry_key = CacheKey::new(request.clone());
if let Some(cached_resources) = self.entries.get_mut(&entry_key) {
for cached_resource in cached_resources.iter_mut() {
let mut stored_headers = cached_resource.metadata.headers.lock().unwrap();
// Received a response with 304 status code, in response to a request that matches a cached resource.
// 1. update the headers of the cached resource.
// 2. return a response, constructed from the cached resource.
stored_headers.extend(response.headers.iter());
let mut constructed_response = Response::new(cached_resource.metadata.final_url.clone());
constructed_response.headers = stored_headers.clone();
constructed_response.body = cached_resource.body.clone();
constructed_response.status = cached_resource.status.clone();
constructed_response.https_state = cached_resource.https_state.clone();
constructed_response.referrer = request.referrer.to_url().cloned();
constructed_response.referrer_policy = request.referrer_policy.clone();
constructed_response.raw_status = cached_resource.raw_status.clone();
constructed_response.url_list = cached_resource.url_list.clone();
// done_chan will have been set to Some by http_network_fetch,
// set it back to None since the response returned here replaces the 304 one from the network.
*done_chan = None;
cached_resource.expires = get_response_expiry(&constructed_response);
return Some(constructed_response);
}
}
None
}
fn invalidate_for_url(&mut self, url: &ServoUrl) {
let entry_key = CacheKey::from_servo_url(url);
if let Some(cached_resources) = self.entries.get_mut(&entry_key) {
for cached_resource in cached_resources.iter_mut() {
cached_resource.expires = Duration::seconds(0i64);
}
}
}
/// Invalidation.
/// <https://tools.ietf.org/html/rfc7234#section-4.4>
pub fn invalidate(&mut self, request: &Request, response: &Response) {
if let Some(&header::Location(ref location)) = response.headers.get::<header::Location>() {
if let Ok(url) = request.current_url().join(location) {
self.invalidate_for_url(&url);
}
}
// TODO: update hyper to use typed getter.
if let Some(url_data) = response.headers.get_raw("Content-Location") {
if let Ok(content_location) = str::from_utf8(&url_data[0]) {
if let Ok(url) = request.current_url().join(content_location) {
self.invalidate_for_url(&url);
}
}
}
self.invalidate_for_url(&request.url());
}
/// Storing Responses in Caches.
/// <https://tools.ietf.org/html/rfc7234#section-3>
pub fn store(&mut self, request: &Request, response: &Response) {
if PREFS.get("network.http-cache.disabled").as_boolean().unwrap_or(false) {
return
}
if request.method != Method::Get {
// Only Get requests are cached.
return
}
let entry_key = CacheKey::new(request.clone());
let metadata = match response.metadata() {
Ok(FetchMetadata::Filtered {
filtered: _,
unsafe_: metadata }) |
Ok(FetchMetadata::Unfiltered(metadata)) => metadata,
_ => return,
};
if !response_is_cacheable(&metadata) {
return;
}
let expiry = get_response_expiry(&response);
let cacheable_metadata = CachedMetadata {
final_url: metadata.final_url,
content_type: metadata.content_type,
charset: metadata.charset,
status: metadata.status,
headers: Arc::new(Mutex::new(response.headers.clone()))
};
let entry_resource = CachedResource {
metadata: cacheable_metadata,
request_headers: Arc::new(Mutex::new(request.headers.clone())),
body: response.body.clone(),
location_url: response.location_url.clone(),
https_state: response.https_state.clone(),
status: response.status.clone(),
raw_status: response.raw_status.clone(),
url_list: response.url_list.clone(),
expires: expiry,
last_validated: time::now()
};
let entry = self.entries.entry(entry_key).or_insert(vec![]);
entry.push(entry_resource);
}
}<|fim▁end|> | }
}, |
<|file_name|>dom_media_list.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use DOMObject;
use glib;
use glib::GString;
use glib::object::Cast;
use glib::object::IsA;<|fim▁hole|>use glib_sys;
use libc;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
use std::ptr;
use webkit2_webextension_sys;
glib_wrapper! {
pub struct DOMMediaList(Object<webkit2_webextension_sys::WebKitDOMMediaList, webkit2_webextension_sys::WebKitDOMMediaListClass, DOMMediaListClass>) @extends DOMObject;
match fn {
get_type => || webkit2_webextension_sys::webkit_dom_media_list_get_type(),
}
}
pub const NONE_DOM_MEDIA_LIST: Option<&DOMMediaList> = None;
pub trait DOMMediaListExt: 'static {
#[cfg_attr(feature = "v2_22", deprecated)]
fn append_medium(&self, newMedium: &str) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn delete_medium(&self, oldMedium: &str) -> Result<(), glib::Error>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_length(&self) -> libc::c_ulong;
#[cfg_attr(feature = "v2_22", deprecated)]
fn get_media_text(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn item(&self, index: libc::c_ulong) -> Option<GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
fn set_media_text(&self, value: &str) -> Result<(), glib::Error>;
fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
fn connect_property_media_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMMediaList>> DOMMediaListExt for O {
fn append_medium(&self, newMedium: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = webkit2_webextension_sys::webkit_dom_media_list_append_medium(self.as_ref().to_glib_none().0, newMedium.to_glib_none().0, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
fn delete_medium(&self, oldMedium: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = webkit2_webextension_sys::webkit_dom_media_list_delete_medium(self.as_ref().to_glib_none().0, oldMedium.to_glib_none().0, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
fn get_length(&self) -> libc::c_ulong {
unsafe {
webkit2_webextension_sys::webkit_dom_media_list_get_length(self.as_ref().to_glib_none().0)
}
}
fn get_media_text(&self) -> Option<GString> {
unsafe {
from_glib_full(webkit2_webextension_sys::webkit_dom_media_list_get_media_text(self.as_ref().to_glib_none().0))
}
}
fn item(&self, index: libc::c_ulong) -> Option<GString> {
unsafe {
from_glib_full(webkit2_webextension_sys::webkit_dom_media_list_item(self.as_ref().to_glib_none().0, index))
}
}
fn set_media_text(&self, value: &str) -> Result<(), glib::Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = webkit2_webextension_sys::webkit_dom_media_list_set_media_text(self.as_ref().to_glib_none().0, value.to_glib_none().0, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
fn connect_property_length_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_length_trampoline<P, F: Fn(&P) + 'static>(this: *mut webkit2_webextension_sys::WebKitDOMMediaList, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer)
where P: IsA<DOMMediaList>
{
let f: &F = &*(f as *const F);
f(&DOMMediaList::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::length\0".as_ptr() as *const _,
Some(transmute(notify_length_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
fn connect_property_media_text_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_media_text_trampoline<P, F: Fn(&P) + 'static>(this: *mut webkit2_webextension_sys::WebKitDOMMediaList, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer)
where P: IsA<DOMMediaList>
{
let f: &F = &*(f as *const F);
f(&DOMMediaList::from_glib_borrow(this).unsafe_cast())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"notify::media-text\0".as_ptr() as *const _,
Some(transmute(notify_media_text_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
impl fmt::Display for DOMMediaList {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "DOMMediaList")
}
}<|fim▁end|> | use glib::signal::SignalHandlerId;
use glib::signal::connect_raw;
use glib::translate::*; |
<|file_name|>debug.py<|end_file_name|><|fim▁begin|>import os, sys, commands
def print_debug( msg, verbose ):
data_dir_root = os.environ.get('DATADIR')
debug_level = int(os.environ.get('DEBUGLEVEL'))
#print the message to debug log if debug variable is set
#add 'from debug import *' to header
# call with print_debug("my message",5)<|fim▁hole|> commands.getoutput('echo '+msg+' >> '+data_dir_root+'/debug.log')
return 1
return 0<|fim▁end|> | # outputs to Datadir/debug.log if the number above is > than the number in Datadir/debug.level
if int(verbose) < debug_level: |
<|file_name|>docker_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package api
import (
"fmt"
"os"
"strconv"
"testing"
"time"
info "github.com/Clever/cadvisor/info/v1"
"github.com/Clever/cadvisor/integration/framework"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Sanity check the container by:
// - Checking that the specified alias is a valid one for this container.
// - Verifying that stats are not empty.
func sanityCheck(alias string, containerInfo info.ContainerInfo, t *testing.T) {
assert.Contains(t, containerInfo.Aliases, alias, "Alias %q should be in list of aliases %v", alias, containerInfo.Aliases)
assert.NotEmpty(t, containerInfo.Stats, "Expected container to have stats")
}
// Waits up to 5s for a container with the specified alias to appear.
func waitForContainer(alias string, fm framework.Framework) {
err := framework.RetryForDuration(func() error {
ret, err := fm.Cadvisor().Client().DockerContainer(alias, &info.ContainerInfoRequest{
NumStats: 1,
})
if err != nil {
return err
}
if len(ret.Stats) != 1 {
return fmt.Errorf("no stats returned for container %q", alias)
}
return nil
}, 5*time.Second)
require.NoError(fm.T(), err, "Timed out waiting for container %q to be available in cAdvisor: %v", alias, err)
}
// A Docker container in /docker/<ID>
func TestDockerContainerById(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerId := fm.Docker().RunPause()
// Wait for the container to show up.
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
require.NoError(t, err)
sanityCheck(containerId, containerInfo, t)
}
// A Docker container in /docker/<name>
func TestDockerContainerByName(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerName := fmt.Sprintf("test-docker-container-by-name-%d", os.Getpid())
fm.Docker().Run(framework.DockerRunArgs{
Image: "kubernetes/pause",
Args: []string{"--name", containerName},
})
// Wait for the container to show up.
waitForContainer(containerName, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerName, request)
require.NoError(t, err)
sanityCheck(containerName, containerInfo, t)
}
// Find the first container with the specified alias in containers.
func findContainer(alias string, containers []info.ContainerInfo, t *testing.T) info.ContainerInfo {
for _, cont := range containers {
for _, a := range cont.Aliases {
if alias == a {
return cont
}
}
}
t.Fatalf("Failed to find container %q in %+v", alias, containers)
return info.ContainerInfo{}
}
// All Docker containers through /docker
func TestGetAllDockerContainers(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the containers to show up.
containerId1 := fm.Docker().RunPause()
containerId2 := fm.Docker().RunPause()
waitForContainer(containerId1, fm)
waitForContainer(containerId2, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containersInfo, err := fm.Cadvisor().Client().AllDockerContainers(request)
require.NoError(t, err)
if len(containersInfo) < 2 {
t.Fatalf("At least 2 Docker containers should exist, received %d: %+v", len(containersInfo), containersInfo)
}
sanityCheck(containerId1, findContainer(containerId1, containersInfo, t), t)
sanityCheck(containerId2, findContainer(containerId2, containersInfo, t), t)
}
// Check expected properties of a Docker container.
func TestBasicDockerContainer(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerName := fmt.Sprintf("test-basic-docker-container-%d", os.Getpid())
containerId := fm.Docker().Run(framework.DockerRunArgs{
Image: "kubernetes/pause",
Args: []string{
"--name", containerName,
},
})
// Wait for the container to show up.
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
require.NoError(t, err)
// Check that the contianer is known by both its name and ID.
sanityCheck(containerId, containerInfo, t)
sanityCheck(containerName, containerInfo, t)
assert.Empty(t, containerInfo.Subcontainers, "Should not have subcontainers")
assert.Len(t, containerInfo.Stats, 1, "Should have exactly one stat")
}
// TODO(vmarmol): Handle if CPU or memory is not isolated on this system.
// Check the ContainerSpec.
func TestDockerContainerSpec(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()<|fim▁hole|> memoryLimit := uint64(1 << 30) // 1GB
containerId := fm.Docker().Run(framework.DockerRunArgs{
Image: "kubernetes/pause",
Args: []string{
"--cpu-shares", strconv.FormatUint(cpuShares, 10),
"--cpuset", cpuMask,
"--memory", strconv.FormatUint(memoryLimit, 10),
},
})
// Wait for the container to show up.
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
require.NoError(t, err)
sanityCheck(containerId, containerInfo, t)
assert := assert.New(t)
assert.True(containerInfo.Spec.HasCpu, "CPU should be isolated")
assert.Equal(containerInfo.Spec.Cpu.Limit, cpuShares, "Container should have %d shares, has %d", cpuShares, containerInfo.Spec.Cpu.Limit)
assert.Equal(containerInfo.Spec.Cpu.Mask, cpuMask, "Cpu mask should be %q, but is %q", cpuMask, containerInfo.Spec.Cpu.Mask)
assert.True(containerInfo.Spec.HasMemory, "Memory should be isolated")
assert.Equal(containerInfo.Spec.Memory.Limit, memoryLimit, "Container should have memory limit of %d, has %d", memoryLimit, containerInfo.Spec.Memory.Limit)
assert.True(containerInfo.Spec.HasNetwork, "Network should be isolated")
assert.True(containerInfo.Spec.HasDiskIo, "Blkio should be isolated")
}
// Check the CPU ContainerStats.
func TestDockerContainerCpuStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerId := fm.Docker().RunBusybox("ping", "www.google.com")
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
if err != nil {
t.Fatal(err)
}
sanityCheck(containerId, containerInfo, t)
// Checks for CpuStats.
checkCpuStats(t, containerInfo.Stats[0].Cpu)
}
// Check the memory ContainerStats.
func TestDockerContainerMemoryStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerId := fm.Docker().RunBusybox("ping", "www.google.com")
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
require.NoError(t, err)
sanityCheck(containerId, containerInfo, t)
// Checks for MemoryStats.
checkMemoryStats(t, containerInfo.Stats[0].Memory)
}
// Check the network ContainerStats.
func TestDockerContainerNetworkStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerId := fm.Docker().RunBusybox("ping", "www.google.com")
waitForContainer(containerId, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerId, request)
require.NoError(t, err)
sanityCheck(containerId, containerInfo, t)
// Checks for NetworkStats.
stat := containerInfo.Stats[0]
assert.NotEqual(t, 0, stat.Network.TxBytes, "Network tx bytes should not be zero")
assert.NotEqual(t, 0, stat.Network.TxPackets, "Network tx packets should not be zero")
// TODO(vmarmol): Can probably do a better test with two containers pinging each other.
}<|fim▁end|> |
cpuShares := uint64(2048)
cpuMask := "0" |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from .models import File, Folder
class ItemAdmin(admin.ModelAdmin):
def get_form(self, request, obj=None, **kwargs):
self.exclude = ("slug", )<|fim▁hole|>admin.site.register(File, ItemAdmin)
admin.site.register(Folder, ItemAdmin)<|fim▁end|> | form = super(ItemAdmin, self).get_form(request, obj, **kwargs)
return form
|
<|file_name|>metadata.test.js<|end_file_name|><|fim▁begin|>import test from "ava";
import fs from "fs";
import path from "path";
import rimraf from "rimraf";
import webpack from "webpack";
import PnpWebpackPlugin from "pnp-webpack-plugin";
import createTestDirectory from "./helpers/createTestDirectory";
const ReactIntlPlugin = require("react-intl-webpack-plugin");
const cacheDir = path.join(__dirname, "output/cache/cachefiles");
const outputDir = path.join(__dirname, "output/metadata");
const babelLoader = path.join(__dirname, "../lib");
const globalConfig = {
mode: "development",
entry: "./test/fixtures/metadata.js",
output: {
path: outputDir,
filename: "[id].metadata.js",
},
plugins: [new ReactIntlPlugin()],
resolve: {
plugins: [PnpWebpackPlugin],
},
module: {
rules: [
{
test: /\.jsx?/,
loader: babelLoader,
options: {
metadataSubscribers: [ReactIntlPlugin.metadataContextFunctionName],
plugins: ["react-intl"],
presets: [],
},
exclude: /node_modules/,
},
],
},
};
// Create a separate directory for each test so that the tests
// can run in parallel
test.beforeEach.cb(t => {
createTestDirectory(outputDir, t.title, (err, directory) => {
if (err) return t.end(err);
t.context.directory = directory;
t.end();
});
});
test.afterEach.cb(t => rimraf(t.context.directory, t.end));<|fim▁hole|> const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
filename: "[id].metadata.js",
},
});
webpack(config, (err, stats) => {
t.is(err, null);
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);
fs.readdir(t.context.directory, (err, files) => {
t.is(err, null);
t.true(files.length > 0);
fs.readFile(
path.resolve(t.context.directory, "reactIntlMessages.json"),
function (err, data) {
t.is(err, null);
const text = data.toString();
const jsonText = JSON.parse(text);
t.true(jsonText.length == 1);
t.true(jsonText[0].id == "greetingId");
t.true(jsonText[0].defaultMessage == "Hello World!");
t.end();
},
);
});
});
});
test.cb("should not throw error", t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
filename: "[id].metadata.js",
},
});
webpack(config, (err, stats) => {
t.is(err, null);
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);
t.end();
});
});
test.cb("should throw error", t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
filename: "[id].metadata.js",
},
entry: "./test/fixtures/metadataErr.js",
});
webpack(config, (err, stats) => {
t.is(err, null);
t.true(stats.compilation.errors.length > 0);
t.deepEqual(stats.compilation.warnings, []);
t.end();
});
});
test.cb("should pass metadata code snippet ( cache version )", t => {
const config = Object.assign({}, globalConfig, {
output: {
path: t.context.directory,
filename: "[id].metadata.js",
},
module: {
rules: [
{
test: /\.jsx?/,
loader: babelLoader,
options: {
metadataSubscribers: [ReactIntlPlugin.metadataContextFunctionName],
plugins: ["react-intl"],
cacheDirectory: cacheDir,
presets: [],
},
exclude: /node_modules/,
},
],
},
});
webpack(config, (err, stats) => {
t.is(err, null);
t.deepEqual(stats.compilation.errors, []);
t.deepEqual(stats.compilation.warnings, []);
fs.readdir(t.context.directory, (err, files) => {
t.is(err, null);
t.true(files.length > 0);
fs.readFile(
path.resolve(t.context.directory, "reactIntlMessages.json"),
function (err, data) {
t.is(err, null);
const text = data.toString();
const jsonText = JSON.parse(text);
t.true(jsonText.length == 1);
t.true(jsonText[0].id == "greetingId");
t.true(jsonText[0].defaultMessage == "Hello World!");
t.end();
},
);
});
});
});<|fim▁end|> |
test.cb("should pass metadata code snippet", t => { |
<|file_name|>tmp_animts.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from sqlalchemy import select, desc
import const
from ..sqlalchemy_table import SqlTable
class TmpAnimts(SqlTable):
def __init__(self, engine, table):
self.engine = engine
self.table = table
SqlTable.__init__(self, self.engine, self.table, (const.NAME_TAN,))
def add(self, name, start_date, over_date, state, provider_id):
values = {
const.NAME_TAN: name,
const.ST_DATE_TAN: start_date,<|fim▁hole|> const.OV_DATE_TAN: over_date,
const.STATE_TAN: state,
const.PROVIDER_ID_TAN: provider_id
}
return self.insert(values)[0]
# vim: set ts=4 sw=4 sts=4 et:<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>import os
from django import forms
from pontoon.base.models import (
Locale,
ProjectLocale,
User,
UserProfile
)
from pontoon.sync.formats import SUPPORTED_FORMAT_PARSERS
class NoTabStopCharField(forms.CharField):
widget = forms.TextInput(attrs={'tabindex': '-1'})
class NoTabStopFileField(forms.FileField):
widget = forms.FileInput(attrs={'tabindex': '-1'})
class DownloadFileForm(forms.Form):
slug = NoTabStopCharField()
code = NoTabStopCharField()
part = NoTabStopCharField()
class UserPermissionGroupForm(object):
def assign_users_to_groups(self, group_name, users):
"""
Clear group membership and assign a set of users to a given group of users.
"""
group = getattr(self.instance, '{}_group'.format(group_name))
group.user_set.clear()
if users:
group.user_set.add(*users)
class LocalePermsForm(forms.ModelForm, UserPermissionGroupForm):
translators = forms.ModelMultipleChoiceField(queryset=User.objects.all(), required=False)
managers = forms.ModelMultipleChoiceField(queryset=User.objects.all(), required=False)
class Meta:
model = Locale
fields = ('translators', 'managers')
def save(self, *args, **kwargs):
self.assign_users_to_groups('translators', self.cleaned_data.get('translators', []))
self.assign_users_to_groups('managers', self.cleaned_data.get('managers', []))
class ProjectLocalePermsForm(forms.ModelForm, UserPermissionGroupForm):
translators = forms.ModelMultipleChoiceField(queryset=User.objects.all(), required=False)
class Meta:
model = ProjectLocale
fields = ('translators', 'has_custom_translators')
def save(self, *args, **kwargs):
super(ProjectLocalePermsForm, self).save(*args, **kwargs)
self.assign_users_to_groups('translators', self.cleaned_data.get('translators', []))
class ProjectLocaleFormSet(forms.models.BaseModelFormSet):
"""
Formset will update only existing objects and won't allow to create new-ones.
"""
@property
def errors_dict(self):
errors = {}
for form in self:
if form.errors:
errors[form.instance.pk] = form.errors
return errors
def save(self, commit=True):
self.new_objects = []
if commit:
for form in self:
if form.instance.pk and form.cleaned_data.get('has_custom_translators'):
form.save()
# We have to cleanup projects from translators
without_translators = (
form.instance.pk for form in self
if form.instance.pk and not form.cleaned_data.get('has_custom_translators')
)
if not without_translators:
return
ProjectLocale.objects.filter(
pk__in=without_translators
).update(has_custom_translators=False)
User.groups.through.objects.filter(
group__projectlocales__pk__in=without_translators
).delete()
ProjectLocalePermsFormsSet = forms.modelformset_factory(
ProjectLocale,
ProjectLocalePermsForm,
formset=ProjectLocaleFormSet,
)
class UploadFileForm(DownloadFileForm):
uploadfile = NoTabStopFileField()
def clean(self):
cleaned_data = super(UploadFileForm, self).clean()
part = cleaned_data.get("part")
uploadfile = cleaned_data.get("uploadfile")
if uploadfile:
limit = 5000
# File size validation
if uploadfile.size > limit * 1000:
current = round(uploadfile.size/1000)
message = (
'Upload failed. Keep filesize under {limit} kB. Your upload: {current} kB.'
.format(limit=limit, current=current)
)
raise forms.ValidationError(message)
# File format validation
if part:
file_extension = os.path.splitext(uploadfile.name)[1].lower()
part_extension = os.path.splitext(part)[1].lower()
# For now, skip if uploading file while using subpages
if part_extension in SUPPORTED_FORMAT_PARSERS.keys() and part_extension != file_extension:
message = (
'Upload failed. File format not supported. Use {supported}.'
.format(supported=part_extension)
)
raise forms.ValidationError(message)
class UserProfileForm(forms.ModelForm):
first_name = forms.RegexField(regex='^[^<>"\'&]+$', max_length=30, strip=True)
<|fim▁hole|> class Meta:
model = User
fields = ('first_name',)
class UserLocalesSettings(forms.ModelForm):
"""
Form is responsible for saving preferred locales of contributor.
"""
class Meta:
model = UserProfile
fields = ('locales_order',)<|fim▁end|> | |
<|file_name|>configparse.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Tencent Inc.
# All rights reserved.
#
# Author: Michaelpeng <[email protected]>
# Date: January 09, 2012
"""
This is the configuration parse module which parses
the BLADE_ROOT as a configuration file.
"""<|fim▁hole|>from blade_util import var_to_list
from cc_targets import HEAP_CHECK_VALUES
from proto_library_target import ProtocPlugin
# Global config object
blade_config = None
def config_items(**kwargs):
"""Used in config functions for config file, to construct a appended
items dict, and then make syntax more pretty
"""
return kwargs
class BladeConfig(object):
"""BladeConfig. A configuration parser class. """
def __init__(self, current_source_dir):
self.current_source_dir = current_source_dir
self.current_file_name = ''
self.configs = {
'global_config' : {
'build_path_template': 'build${m}_${profile}',
'duplicated_source_action': 'warning', # Can be 'warning', 'error', 'none'
'test_timeout': None,
},
'cc_test_config': {
'dynamic_link': False,
'heap_check': '',
'gperftools_libs': [],
'gperftools_debug_libs': [],
'gtest_libs': [],
'gtest_main_libs': [],
'pprof_path': '',
},
'cc_binary_config': {
'extra_libs': [],
'run_lib_paths' : [],
},
'distcc_config': {
'enabled': False
},
'link_config': {
'link_on_tmp': False,
'enable_dccc': False
},
'java_config': {
'version': '1.6',
'source_version': '',
'target_version': '',
'maven': 'mvn',
'maven_central': '',
'warnings':['-Werror', '-Xlint:all'],
'source_encoding': None,
'java_home':''
},
'java_binary_config': {
'one_jar_boot_jar' : '',
},
'java_test_config': {
'junit_libs' : [],
'jacoco_home' : '',
'coverage_reporter' : '',
},
'scala_config': {
'scala_home' : '',
'target_platform' : '',
'warnings' : '',
'source_encoding' : None,
},
'scala_test_config': {
'scalatest_libs' : '',
},
'go_config' : {
'go' : '',
'go_home' : '', # GOPATH
},
'thrift_config': {
'thrift': 'thrift',
'thrift_libs': [],
'thrift_incs': [],
},
'fbthrift_config': {
'fbthrift1': 'thrift1',
'fbthrift2': 'thrift2',
'fbthrift_libs': [],
'fbthrift_incs': [],
},
'proto_library_config': {
'protoc': 'thirdparty/protobuf/bin/protoc',
'protoc_java': '',
'protobuf_libs': [],
'protobuf_path': '',
'protobuf_incs': [],
'protobuf_php_path': '',
'protoc_php_plugin': '',
'protobuf_java_libs' : [],
'protoc_go_plugin': '',
# All the generated go source files will be placed
# into $GOPATH/src/protobuf_go_path
'protobuf_go_path': '',
},
'protoc_plugin_config' : {
},
'cc_config': {
'extra_incs': [],
'cppflags': [],
'cflags': [],
'cxxflags': [],
'linkflags': [],
'c_warnings': [],
'cxx_warnings': [],
'warnings': [],
'cpplint': 'cpplint.py',
'optimize': [],
'benchmark_libs': [],
'benchmark_main_libs': [],
'securecc' : None,
},
'cc_library_config': {
'generate_dynamic' : None,
# Options passed to ar/ranlib to control how
# the archive is created, such as, let ar operate
# in deterministic mode discarding timestamps
'arflags': [],
'ranlibflags': [],
}
}
def _try_parse_file(self, filename):
"""load the configuration file and parse. """
try:
self.current_file_name = filename
if os.path.exists(filename):
execfile(filename)
except SystemExit:
console.error_exit('Parse error in config file %s, exit...' % filename)
def parse(self):
"""load the configuration file and parse. """
self._try_parse_file(os.path.join(os.path.dirname(sys.argv[0]), 'blade.conf'))
self._try_parse_file(os.path.expanduser('~/.bladerc'))
self._try_parse_file(os.path.join(self.current_source_dir, 'BLADE_ROOT'))
def update_config(self, section_name, append, user_config):
"""update config section by name. """
config = self.configs.get(section_name, {})
if config:
if append:
self._append_config(section_name, config, append)
self._replace_config(section_name, config, user_config)
else:
console.error('%s: %s: unknown config section name' % (
self.current_file_name, section_name))
def _append_config(self, section_name, config, append):
"""Append config section items"""
if not isinstance(append, dict):
console.error('%s: %s: append must be a dict' %
(self.current_file_name, section_name))
else:
for k in append:
if k in config:
if isinstance(config[k], list):
config[k] += var_to_list(append[k])
else:
console.warning('%s: %s: config item %s is not a list' %
(self.current_file_name, section_name, k))
else:
console.warning('%s: %s: unknown config item name: %s' %
(self.current_file_name, section_name, k))
def _replace_config(self, section_name, config, user_config):
"""Replace config section items"""
unknown_keys = []
for k in user_config:
if k in config:
if isinstance(config[k], list):
user_config[k] = var_to_list(user_config[k])
else:
console.warning('%s: %s: unknown config item name: %s' %
(self.current_file_name, section_name, k))
unknown_keys.append(k)
for k in unknown_keys:
del user_config[k]
config.update(user_config)
def get_config(self, section_name):
"""get config section, returns default values if not set """
return self.configs.get(section_name, {})
def cc_test_config(append=None, **kwargs):
"""cc_test_config section. """
heap_check = kwargs.get('heap_check')
if heap_check is not None and heap_check not in HEAP_CHECK_VALUES:
console.error_exit('cc_test_config: heap_check can only be in %s' %
HEAP_CHECK_VALUES)
blade_config.update_config('cc_test_config', append, kwargs)
def cc_binary_config(append=None, **kwargs):
"""cc_binary_config section. """
blade_config.update_config('cc_binary_config', append, kwargs)
def cc_library_config(append=None, **kwargs):
"""cc_library_config section. """
blade_config.update_config('cc_library_config', append, kwargs)
__DUPLICATED_SOURCE_ACTION_VALUES = set(['warning', 'error', 'none', None])
def global_config(append=None, **kwargs):
"""global_config section. """
duplicated_source_action = kwargs.get('duplicated_source_action')
if duplicated_source_action not in __DUPLICATED_SOURCE_ACTION_VALUES:
console.error_exit('Invalid global_config.duplicated_source_action '
'value, can only be in %s' % __DUPLICATED_SOURCE_ACTION_VALUES)
blade_config.update_config('global_config', append, kwargs)
def distcc_config(append=None, **kwargs):
"""distcc_config. """
blade_config.update_config('distcc_config', append, kwargs)
def link_config(append=None, **kwargs):
"""link_config. """
blade_config.update_config('link_config', append, kwargs)
def java_config(append=None, **kwargs):
"""java_config. """
blade_config.update_config('java_config', append, kwargs)
def java_binary_config(append=None, **kwargs):
"""java_test_config. """
blade_config.update_config('java_binary_config', append, kwargs)
def java_test_config(append=None, **kwargs):
"""java_test_config. """
blade_config.update_config('java_test_config', append, kwargs)
def scala_config(append=None, **kwargs):
"""scala_config. """
blade_config.update_config('scala_config', append, kwargs)
def scala_test_config(append=None, **kwargs):
"""scala_test_config. """
blade_config.update_config('scala_test_config', append, kwargs)
def go_config(append=None, **kwargs):
"""go_config. """
blade_config.update_config('go_config', append, kwargs)
def proto_library_config(append=None, **kwargs):
"""protoc config. """
path = kwargs.get('protobuf_include_path')
if path:
console.warning(('%s: proto_library_config: protobuf_include_path has '
'been renamed to protobuf_incs, and become a list') %
blade_config.current_file_name)
del kwargs['protobuf_include_path']
if isinstance(path, basestring) and ' ' in path:
kwargs['protobuf_incs'] = path.split()
else:
kwargs['protobuf_incs'] = [path]
blade_config.update_config('proto_library_config', append, kwargs)
def protoc_plugin(**kwargs):
"""protoc_plugin. """
if 'name' not in kwargs:
console.error_exit("Missing 'name' in protoc_plugin parameters: %s" % kwargs)
config = blade_config.get_config('protoc_plugin_config')
config[kwargs['name']] = ProtocPlugin(**kwargs)
def thrift_library_config(append=None, **kwargs):
"""thrift config. """
blade_config.update_config('thrift_config', append, kwargs)
def fbthrift_library_config(append=None, **kwargs):
"""fbthrift config. """
blade_config.update_config('fbthrift_config', append, kwargs)
def cc_config(append=None, **kwargs):
"""extra cc config, like extra cpp include path splited by space. """
if 'extra_incs' in kwargs:
extra_incs = kwargs['extra_incs']
if isinstance(extra_incs, basestring) and ' ' in extra_incs:
console.warning('%s: cc_config: extra_incs has been changed to list' %
blade_config.current_file_name)
kwargs['extra_incs'] = extra_incs.split()
blade_config.update_config('cc_config', append, kwargs)<|fim▁end|> | import os
import sys
import console |
<|file_name|>test_rfc2314.py<|end_file_name|><|fim▁begin|>#
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2017, Ilya Etingof <[email protected]>
# License: http://pyasn1.sf.net/license.html
#
import sys
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.codec.der import encoder as der_encoder
from pyasn1_modules import pem
from pyasn1_modules import rfc2314
try:
import unittest2 as unittest
except ImportError:
import unittest
class CertificationRequestTestCase(unittest.TestCase):
pem_text = """\
MIIDATCCAekCAQAwgZkxCzAJBgNVBAYTAlJVMRYwFAYDVQQIEw1Nb3Njb3cgUmVn
aW9uMQ8wDQYDVQQHEwZNb3Njb3cxGjAYBgNVBAoTEVNOTVAgTGFib3JhdG9yaWVz
MQwwCgYDVQQLFANSJkQxFTATBgNVBAMTDHNubXBsYWJzLmNvbTEgMB4GCSqGSIb3
DQEJARYRaW5mb0Bzbm1wbGFicy5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
ggEKAoIBAQC9n2NfGS98JDBmAXQn+vNUyPB3QPYC1cwpX8UMYh9MdAmBZJCnvXrQ
Pp14gNAv6AQKxefmGES1b+Yd+1we9HB8AKm1/8xvRDUjAvy4iO0sqFCPvIfSujUy
pBcfnR7QE2itvyrMxCDSEVnMhKdCNb23L2TptUmpvLcb8wfAMLFsSu2yaOtJysep
oH/mvGqlRv2ti2+E2YA0M7Pf83wyV1XmuEsc9tQ225rprDk2uyshUglkDD2235rf
0QyONq3Aw3BMrO9ss1qj7vdDhVHVsxHnTVbEgrxEWkq2GkVKh9QReMZ2AKxe40j4
og+OjKXguOCggCZHJyXKxccwqCaeCztbAgMBAAGgIjAgBgkqhkiG9w0BCQIxExMR
U05NUCBMYWJvcmF0b3JpZXMwDQYJKoZIhvcNAQEFBQADggEBAAihbwmN9M2bsNNm
9KfxqiGMqqcGCtzIlpDz/2NVwY93cEZsbz3Qscc0QpknRmyTSoDwIG+1nUH0vzkT
Nv8sBmp9I1GdhGg52DIaWwL4t9O5WUHgfHSJpPxZ/zMP2qIsdPJ+8o19BbXRlufc
73c03H1piGeb9VcePIaulSHI622xukI6f4Sis49vkDaoi+jadbEEb6TYkJQ3AMRD
WdApGGm0BePdLqboW1Yv70WRRFFD8sxeT7Yw4qrJojdnq0xMHPGfKpf6dJsqWkHk
b5DRbjil1Zt9pJuF680S9wtBzSi0hsMHXR9TzS7HpMjykL2nmCVY6A78MZapsCzn
GGbx7DI=
"""
def setUp(self):
self.asn1Spec = rfc2314.CertificationRequest()
def testDerCodec(self):
substrate = pem.readBase64fromText(self.pem_text)
asn1Object, rest = der_decoder.decode(substrate, asn1Spec=self.asn1Spec)
assert not rest
assert asn1Object.prettyPrint()
assert der_encoder.encode(asn1Object) == substrate<|fim▁hole|>
suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | |
<|file_name|>SurfaceCircle2.java<|end_file_name|><|fim▁begin|>/* Copyright (C) 2001, 2008 United States Government as represented by
the Administrator of the National Aeronautics and Space Administration.
All Rights Reserved.
*/
package gov.nasa.worldwind.render;
import gov.nasa.worldwind.geom.*;
import gov.nasa.worldwind.render.airspaces.*;
import javax.media.opengl.*;
/**
* @author brownrigg
* @version $Id: SurfaceCircle2.java 9230 2009-03-06 05:36:26Z dcollins $
*/
public class SurfaceCircle2 extends CappedCylinder
{
public SurfaceCircle2(LatLon location, double radius)
{
super(location, radius);
}
public SurfaceCircle2(AirspaceAttributes shapeAttributes)
{
super(shapeAttributes);
}
public SurfaceCircle2()
{
super();<|fim▁hole|> protected void doRenderGeometry(DrawContext dc, String drawStyle)
{
beginDrawShape(dc);
super.doRenderGeometry(dc, drawStyle);
endDrawShape(dc);
}
protected void beginDrawShape(DrawContext dc)
{
// Modify the projection transform to shift the depth values slightly toward the camera in order to
// ensure the shape is selected during depth buffering.
GL gl = dc.getGL();
float[] pm = new float[16];
gl.glGetFloatv(GL.GL_PROJECTION_MATRIX, pm, 0);
pm[10] *= .8; // TODO: See Lengyel 2 ed. Section 9.1.2 to compute optimal/minimal offset
gl.glPushAttrib(GL.GL_TRANSFORM_BIT);
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPushMatrix();
gl.glLoadMatrixf(pm, 0);
}
protected void endDrawShape(DrawContext dc)
{
GL gl = dc.getGL();
gl.glMatrixMode(GL.GL_PROJECTION);
gl.glPopMatrix();
gl.glPopAttrib();
}
}<|fim▁end|> | }
|
<|file_name|>dictionary_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class DictionaryOperations(object):
"""DictionaryOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get null dictionary value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get empty dictionary value {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_empty(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{str}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_null_value(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullvalue'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_null_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with null key.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/nullkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_empty_string_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get Dictionary with key as empty string.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/keyemptystring'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get invalid Dictionary value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/invalid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_boolean_tfft(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": true, "1": false, "2": false, "3":
true }.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/tfft'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_boolean_tfft(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": true, "1": false, "2": false, "3":
true }.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/tfft'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{bool}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_boolean_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": true, "1": null, "2": false }.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/true.null.false'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_boolean_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value '{"0": true, "1": "boolean", "2": false}'.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/boolean/true.boolean.false'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bool}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_integer_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": -1, "2": 3, "3": 300}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_integer_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{int}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_int_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": null, "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.null.zero'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_int_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": "integer", "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/integer/1.integer.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{int}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_long_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": 1, "1": -1, "2": 3, "3": 300}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_long_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": 1, "1": -1, "2": 3, "3": 300}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.-1.3.300'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{long}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_long_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get long dictionary value {"0": 1, "1": null, "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.null.zero'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_long_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get long dictionary value {"0": 1, "1": "integer", "2": 0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/long/1.integer.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{long}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_float_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_float_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{float}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_float_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0.0, "1": null, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/0.0-null-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_float_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": 1.0, "1": "number", "2": 0.0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/float/1.number.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_double_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_double_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": 0, "1": -0.01, "2": 1.2e20}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0--0.01-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{float}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_double_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get float dictionary value {"0": 0.0, "1": null, "2": 1.2e20}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/0.0-null-1.2e20'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_double_invalid_string(
self, custom_headers=None, raw=False, **operation_config):
"""Get boolean dictionary value {"0": 1.0, "1": "number", "2": 0.0}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/double/1.number.0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{float}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_string_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo1", "1": "foo2", "2": "foo3"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo1.foo2.foo3'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_string_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "foo1", "1": "foo2", "2": "foo3"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo1.foo2.foo3'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{str}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_string_with_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo", "1": null, "2": "foo2"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo.null.foo2'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_string_with_invalid(
self, custom_headers=None, raw=False, **operation_config):
"""Get string dictionary value {"0": "foo", "1": 123, "2": "foo2"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/string/foo.123.foo2'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{str}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get integer dictionary value {"0": "2000-12-01", "1": "1980-01-02",
"2": "1492-10-12"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "2000-12-01", "1": "1980-01-02", "2":
"1492-10-12"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the<|fim▁hole|> deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{date}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_date_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2012-01-01", "1": null, "2":
"1776-07-04"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_invalid_chars(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2011-03-22", "1": "date"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date/invalidchars'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{date}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get date-time dictionary value {"0": "2000-12-01t00:00:01z", "1":
"1980-01-02T00:11:35+01:00", "2": "1492-10-12T10:15:01-08:00"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_time_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "2000-12-01t00:00:01z", "1":
"1980-01-02T00:11:35+01:00", "2": "1492-10-12T10:15:01-08:00"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{iso-8601}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_date_time_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2000-12-01t00:00:01z", "1": null}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_invalid_chars(
self, custom_headers=None, raw=False, **operation_config):
"""Get date dictionary value {"0": "2000-12-01t00:00:01z", "1":
"date-time"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time/invalidchars'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{iso-8601}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_date_time_rfc1123_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get date-time-rfc1123 dictionary value {"0": "Fri, 01 Dec 2000
00:00:01 GMT", "1": "Wed, 02 Jan 1980 00:11:35 GMT", "2": "Wed, 12
Oct 1492 10:15:01 GMT"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time-rfc1123/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{rfc-1123}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_date_time_rfc1123_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value empty {"0": "Fri, 01 Dec 2000 00:00:01 GMT", "1":
"Wed, 02 Jan 1980 00:11:35 GMT", "2": "Wed, 12 Oct 1492 10:15:01
GMT"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/date-time-rfc1123/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{rfc-1123}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_duration_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get duration dictionary value {"0": "P123DT22H14M12.011S", "1":
"P5DT1H0M0S"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/duration/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{duration}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_duration_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Set dictionary value {"0": "P123DT22H14M12.011S", "1": "P5DT1H0M0S"}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/duration/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{duration}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_byte_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get byte dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03),
"2": hex (25, 29, 43)} with each item encoded in base64.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bytearray}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_byte_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put the dictionary value {"0": hex(FF FF FF FA), "1": hex(01 02 03),
"2": hex (25, 29, 43)} with each elementencoded in base 64.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{bytearray}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_byte_invalid_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get byte dictionary value {"0": hex(FF FF FF FA), "1": null} with the
first item base64 encoded.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/byte/invalidnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{bytearray}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_base64_url(
self, custom_headers=None, raw=False, **operation_config):
"""Get base64url dictionary value {"0": "a string that gets encoded with
base64url", "1": "test string", "2": "Lorem ipsum"}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/prim/base64url/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{base64}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type null value.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get empty dictionary of complex type {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with null item {"0": {"integer": 1,
"string": "2"}, "1": null, "2": {"integer": 5, "string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with empty item {"0": {"integer": 1,
"string": "2"}, "1:" {}, "2": {"integer": 5, "string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_complex_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get dictionary of complex type with {"0": {"integer": 1, "string":
"2"}, "1": {"integer": 3, "string": "4"}, "2": {"integer": 5,
"string": "6"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{Widget}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_complex_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put an dictionary of complex type with values {"0": {"integer": 1,
"string": "2"}, "1": {"integer": 3, "string": "4"}, "2": {"integer":
5, "string": "6"}}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/complex/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{Widget}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_array_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get a null array.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an empty dictionary {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionary of array of strings {"0": ["1", "2", "3"], "1":
null, "2": ["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an array of array of strings [{"0": ["1", "2", "3"], "1": [], "2":
["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_array_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get an array of array of strings {"0": ["1", "2", "3"], "1": ["4",
"5", "6"], "2": ["7", "8", "9"]}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{[str]}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_array_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Put An array of array of strings {"0": ["1", "2", "3"], "1": ["4",
"5", "6"], "2": ["7", "8", "9"]}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/array/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{[str]}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get_dictionary_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries with value null.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/null'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/empty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_item_null(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": null, "2":
{"7": "seven", "8": "eight", "9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/itemnull'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_item_empty(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {}, "2":
{"7": "seven", "8": "eight", "9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/itemempty'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_dictionary_valid(
self, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {"4":
"four", "5": "five", "6": "six"}, "2": {"7": "seven", "8": "eight",
"9": "nine"}}.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: dict
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('{{str}}', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def put_dictionary_valid(
self, array_body, custom_headers=None, raw=False, **operation_config):
"""Get an dictionaries of dictionaries of type <string, string> with
value {"0": {"1": "one", "2": "two", "3": "three"}, "1": {"4":
"four", "5": "five", "6": "six"}, "2": {"7": "seven", "8": "eight",
"9": "nine"}}.
:param array_body:
:type array_body: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<Fixtures.AcceptanceTestsBodyDictionary.models.ErrorException>`
"""
# Construct URL
url = '/dictionary/dictionary/valid'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(array_body, '{{str}}')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2021 Anapaya Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import pathlib
import subprocess
import time
from typing import List
import sys
from http import client
from plumbum import cli
from acceptance.common import base
from acceptance.common import docker
from acceptance.common import scion
from python.lib import scion_addr
import toml
logger = logging.getLogger(__name__)
class Test(base.TestBase):
"""
Test that in a topology with multiple ASes, every AS is capable of
requesting renewed certificates. The test verifies that each AS has loaded
the renewed certificate.
The test is split into multiple steps:
1. Start the topology.
2. For each AS in the topology, create a new private key and request
certificate chain renewal. The renewed chain is verified against the
TRC.
3. Remove the previous private key from the control servers.
4. Ensure that the new private key and certificate are loaded by observing
the http endpoint.
5. Check connectivity with an end to end test.
6. Stop all control servers and purge the state. This includes deleting
all databases with cached data, including the path and trust database.
7. Restart control servers and check connectivity again.
"""
end2end = cli.SwitchAttr(
"end2end_integration",
str,
default="./bin/end2end_integration",
help="The end2end_integration binary " +
"(default: ./bin/end2end_integration)",
)
def main(self):
if not self.nested_command:
try:
self.setup()
# Give some time for the topology to start.
time.sleep(10)
self._run()
finally:
self.teardown()
def _run(self):<|fim▁hole|> isd_ases = scion.ASList.load("%s/gen/as_list.yml" %
self.test_state.artifacts).all
cs_configs = self._cs_configs()
logger.info("==> Start renewal process")
for isd_as in isd_ases:
logging.info("===> Start renewal: %s" % isd_as)
self._renewal_request(isd_as)
logger.info("==> Check key and certificate reloads")
self._check_key_cert(cs_configs)
logger.info("==> Check connectivity")
subprocess.run(
[self.end2end, "-d", "-outDir", self.test_state.artifacts],
check=True)
logger.info("==> Shutting down control servers and purging caches")
for container in self.list_containers("scion_sd.*"):
self.test_state.dc("rm", container)
for container in self.list_containers("scion_cs.*"):
self.stop_container(container)
for cs_config in cs_configs:
files = list((pathlib.Path(self.test_state.artifacts) /
"gen-cache").glob("%s*" % cs_config.stem))
for db_file in files:
db_file.unlink()
logger.info("Deleted files: %s" % [file.name for file in files])
logger.info("==> Restart containers")
self.setup_start()
time.sleep(5)
logger.info("==> Check connectivity")
subprocess.run(
[self.end2end, "-d", "-outDir", self.test_state.artifacts],
check=True)
logger.info("==> Backup mode")
for isd_as in isd_ases:
logging.info("===> Start renewal: %s" % isd_as)
self._renewal_request(isd_as, mode="--backup")
def _renewal_request(
self,
isd_as: scion_addr.ISD_AS,
mode: str = "--force",
):
as_dir = self._to_as_dir(isd_as)
docker_dir = pathlib.Path("/share") / self._rel(as_dir)
def read_file(filename: str) -> str:
with open(as_dir / "crypto/as" / filename) as f:
return f.read()
chain_name = "ISD%s-AS%s.pem" % (isd_as.isd_str(),
isd_as.as_file_fmt())
old_chain = read_file(chain_name)
old_key = read_file("cp-as.key")
chain = docker_dir / "crypto/as" / chain_name
args = [
chain,
docker_dir / "crypto/as/cp-as.key",
mode,
"--trc",
docker_dir / "certs/ISD1-B1-S1.trc",
"--sciond",
self.execute("tester_%s" % isd_as.file_fmt(), "sh", "-c",
"echo $SCION_DAEMON").strip(),
*self._local_flags(isd_as),
]
logger.info("Requesting certificate chain renewal: %s" %
chain.relative_to(docker_dir))
logger.info(
self.execute("tester_%s" % isd_as.file_fmt(), "./bin/scion-pki",
"certificate", "renew", *args))
logger.info("Verify renewed certificate chain")
verify_out = self.execute("tester_%s" % isd_as.file_fmt(),
"./bin/scion-pki", "certificate", "verify",
chain, "--trc",
"/share/gen/trcs/ISD1-B1-S1.trc")
logger.info(str(verify_out).rstrip("\n"))
renewed_chain = read_file(chain_name)
renewed_key = read_file("cp-as.key")
if renewed_chain == old_chain:
raise Exception(
"renewed chain does not differ from previous chain")
if renewed_key == old_key:
raise Exception("renewed key does not differ from previous key")
def _check_key_cert(self, cs_configs: List[pathlib.Path]):
not_ready = [*cs_configs]
for _ in range(5):
logger.info(
"Checking if all control servers have reloaded the key and certificate..."
)
for cs_config in not_ready:
conn = client.HTTPConnection(self._http_endpoint(cs_config))
conn.request("GET", "/signer")
resp = conn.getresponse()
if resp.status != 200:
logger.info("Unexpected response: %d %s", resp.status,
resp.reason)
continue
isd_as = scion_addr.ISD_AS(cs_config.stem[2:-2])
as_dir = self._to_as_dir(isd_as)
chain_name = "ISD%s-AS%s.pem" % (isd_as.isd_str(),
isd_as.as_file_fmt())
pld = json.loads(resp.read().decode("utf-8"))
if pld["subject_key_id"] != self._extract_skid(
as_dir / "crypto/as" / chain_name):
continue
logger.info(
"Control server successfully loaded new key and certificate: %s"
% self._rel(cs_config))
not_ready.remove(cs_config)
if not not_ready:
break
time.sleep(3)
else:
logger.error(
"Control servers without reloaded key and certificate: %s" %
[cs_config.name for cs_config in not_ready])
sys.exit(1)
def _http_endpoint(self, cs_config: pathlib.Path):
with open(cs_config, "r") as f:
cfg = toml.load(f)
return cfg["metrics"]["prometheus"]
def _extract_skid(self, file: pathlib.Path):
out = subprocess.check_output(
['openssl', 'x509', '-in', file, '-noout', '-text'])
lines = out.splitlines()
for i, v in enumerate(lines):
if v.decode("utf-8").find("Subject Key Identifier") > -1:
skid = lines[i + 1].decode("utf-8").split()[-1].replace(
":", " ").upper()
break
return skid
def _rel(self, path: pathlib.Path):
return path.relative_to(pathlib.Path(self.test_state.artifacts))
def _to_as_dir(self, isd_as: scion_addr.ISD_AS) -> pathlib.Path:
return pathlib.Path("%s/gen/AS%s" %
(self.test_state.artifacts, isd_as.as_file_fmt()))
def _cs_configs(self) -> List[pathlib.Path]:
return list(
pathlib.Path("%s/gen" %
self.test_state.artifacts).glob("AS*/cs*.toml"))
def _local_flags(self, isd_as: scion_addr.ISD_AS) -> List[str]:
return [
"--local",
self.execute("tester_%s" % isd_as.file_fmt(), "sh", "-c",
"echo $SCION_LOCAL_ADDR").strip(),
]
if __name__ == "__main__":
base.register_commands(Test)
base.TestBase.test_state = base.TestState(scion.SCIONDocker(),
docker.Compose())
Test.run()<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from . import series
from . import images
def _setup():
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('[%(name)s] %(levelname)s %(message)s')
ch = logging.StreamHandler()
ch.setFormatter(formatter)<|fim▁hole|>__version__ = '1.1.1'<|fim▁end|> | logger.addHandler(ch)
_setup()
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from argparse import ArgumentParser
from ansible_playbook_wrapper.command.play import PlayCommand
def main():
parser = ArgumentParser()
sub_parsers = parser.add_subparsers(help='commands')
play_parser = sub_parsers.add_parser('play', help='play playbook')
for arg_info in PlayCommand.ARGUMENT_INFO:
play_parser.add_argument(*(arg_info[0]), **(arg_info[1]))
play_parser.set_defaults(command_class=PlayCommand)
parsed_args = parser.parse_args()
parsed_args.command_class(parsed_args).run()<|fim▁end|> | # -*- coding: utf-8 -*- |
<|file_name|>wcst.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Programa baseado no Teste Wisconsin
Autor: Neno Henrique Albernaz
Criado em Junho de 2008"""
########################################################################
class Carta:
# Definição dos itens de uma carta.
_numeros = [(u"Um", u"Uma"),
(u"Dois", u"Duas"),
(u"Três", u"Três"),
(u"Quatro", u"Quatro")]
_formas = [(u"Triângulo", u"Triângulos", "triangulo"),
(u"Estrela", u"Estrelas", "estrela"),
(u"Cruz", u"Cruzes", "cruz"),
(u"Círculo", u"Círculos", "circulo")]
_cores = [((u"Vermelho", u"Vermelha"), (u"Vermelhos", u"Vermelhas"), "vermelho"),
((u"Verde", u"Verde"), (u"Verdes", u"Verdes"), "verde"),
((u"Amarelo", u"Amarela"), (u"Amarelos", u"Amarelas"), "amarelo"),
((u"Azul", u"Azul"), (u"Azuis", u"Azuis"), "azul")]
_atributos = dict(numero='num', forma='img', cor='color')
def __init__(self, numero, forma, cor):
genero = 0 if forma not in [1, 2] else 1
num = 0 if numero == 1 else 1
self.numero = self._numeros[numero-1][genero]
self.forma = self._formas[forma][num]
self.cor = self._cores[cor][num][genero]
self.num = numero
self.img = u"/static/plugins/wisconsin/images/%s.png" % self._formas[forma][2]
self.color = self._cores[cor][2]
def pegaAtributosCarta(self):
return u"%s %s %s" % (self.numero, self.forma, self.cor)
def testaMesmaCategoria(self, outra_carta, categoria):
"""Testa se as cartas são iguais na categoria.
Categoria pode ter atribuido três valores: numero, forma ou cor."""
return (getattr(self, self._atributos[categoria]) ==
getattr(outra_carta, self._atributos[categoria]))
def testaTudoDiferente(self, outra_carta):
"""Testa se as cartas são diferentes em todas as categorias."""
for categoria in self._atributos:
if self.testaMesmaCategoria(outra_carta, categoria):
return False
return True
########################################################################
def criaListaEstimulo():
"""Cria a lista de cartas estimulo. O conteudo de cada item da lista é uma carta."""
return [Carta(1, 0, 0),
Carta(2, 1, 1),
Carta(3, 2, 2),
Carta(4, 3, 3)]
########################################################################
def criaListaResposta():
"""Cria a lista de cartas resposta. O conteudo de cada item da lista é uma carta."""
lista = ['101', '420', '203', '130', '411', '122', '403', '330', '421', '232',
'113', '300', '223', '112', '301', '433', '210', '332', '400', '132',
'213', '321', '212', '303', '410', '202', '323', '430', '211', '120',
'431', '110', '333', '422', '111', '402', '233', '312', '131', '423',
'100', '313', '432', '201', '310', '222', '133', '302', '221', '412',
'103', '311', '230', '401', '123', '331', '220', '102', '320', '231',
'423', '322', '200', '122']
for indice in range(len(lista)):
lista[indice] = Carta(int(lista[indice][0]),
int(lista[indice][1]),
int(lista[indice][2]))
return 2 * lista
########################################################################
def criaListaCategorias():
"""Cria a lista de categorias.
Cria a lista com as três categorias: cor, forma e numero. Repete
devido ao teste passar duas vezes nas categorias."""
return ["cor", "forma", "numero", "cor", "forma", "numero"]
########################################################################
def instrucoes_teste():
"""Imprimi na tela as instruções do teste. """
return u"""Este é um teste um pouco diferente, porque eu não posso lhe
dizer muito a respeito do que fazer. Você vai ser solicitado a associar
cada uma das cartas que vou te dar com uma dessas quatro cartas-chave
mostradas na tela. Sempre selecione o link da carta-chave que você achar
que combine com a carta que vou te dar. Eu não posso lhe dizer como
associar as cartas, mas lhe direi, cada vez, se você está certo ou errado.
Não há limite de tempo neste teste. Está Pronto? Vamos começar."""
<|fim▁hole|>########################################################################
# Inicializa as variaveis.
listaCartasResposta = criaListaResposta()
listaCartasEstimulo = criaListaEstimulo()
listaCategorias = criaListaCategorias()
numCartasResposta = 64<|fim▁end|> | |
<|file_name|>datepicker-zh-TW.js<|end_file_name|><|fim▁begin|>/* Chinese initialisation for the jQuery UI date picker plugin. */
/* Written by Ressol ([email protected]). */
(function (factory) {
// AMD. Register as an anonymous module.
<|fim▁hole|> datepicker.regional['zh-TW'] = {
closeText: '\u95DC\u9589',
prevText: '<\u4E0A\u6708',
nextText: '\u4E0B\u6708>',
currentText: '\u4ECA\u5929',
monthNames: [
'\u4E00\u6708',
'\u4E8C\u6708',
'\u4E09\u6708',
'\u56DB\u6708',
'\u4E94\u6708',
'\u516D\u6708',
'\u4E03\u6708',
'\u516B\u6708',
'\u4E5D\u6708',
'\u5341\u6708',
'\u5341\u4E00\u6708',
'\u5341\u4E8C\u6708'
],
monthNamesShort: [
'\u4E00\u6708',
'\u4E8C\u6708',
'\u4E09\u6708',
'\u56DB\u6708',
'\u4E94\u6708',
'\u516D\u6708',
'\u4E03\u6708',
'\u516B\u6708',
'\u4E5D\u6708',
'\u5341\u6708',
'\u5341\u4E00\u6708',
'\u5341\u4E8C\u6708'
],
dayNames: [
'\u661F\u671F\u65E5',
'\u661F\u671F\u4E00',
'\u661F\u671F\u4E8C',
'\u661F\u671F\u4E09',
'\u661F\u671F\u56DB',
'\u661F\u671F\u4E94',
'\u661F\u671F\u516D'
],
dayNamesShort: [
'\u5468\u65E5',
'\u5468\u4E00',
'\u5468\u4E8C',
'\u5468\u4E09',
'\u5468\u56DB',
'\u5468\u4E94',
'\u5468\u516D'
],
dayNamesMin: [
'\u65E5',
'\u4E00',
'\u4E8C',
'\u4E09',
'\u56DB',
'\u4E94',
'\u516D'
],
weekHeader: '\u5468',
dateFormat: 'yy/mm/dd',
firstDay: 1,
isRTL: false,
showMonthAfterYear: true,
yearSuffix: '\u5E74'
};
datepicker.setDefaults(datepicker.regional['zh-TW']);
return datepicker.regional['zh-TW'];
}));<|fim▁end|> | module.exports = factory(require('../datepicker'));;
}(function (datepicker) { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.